From ecdd47f113eaad49974e3f3ea8b5ba18e50c6dfc Mon Sep 17 00:00:00 2001 From: Swenschaeferjohann Date: Tue, 29 Jul 2025 12:08:35 -0400 Subject: [PATCH] cherrypicked hasher update: support sha in hasher and lighthasher macro lint remove unused _output_account_info update lightdiscriminator macro chore: add sha flat macro test, perf: sha flat hash in LightAccount avoid double borsh serialization chore: cleanup tests, add new discriminator test, add anchor discriminator compatibility test: LightAccount close --- .github/workflows/rust.yml | 1 + .github/workflows/sdk-tests.yml | 6 +- Cargo.lock | 1 + program-libs/hasher/src/keccak.rs | 2 + program-libs/hasher/src/lib.rs | 1 + program-libs/hasher/src/poseidon.rs | 2 + program-libs/hasher/src/sha256.rs | 12 + sdk-libs/macros/Cargo.toml | 5 + sdk-libs/macros/src/discriminator.rs | 16 +- sdk-libs/macros/src/hasher/data_hasher.rs | 53 +- sdk-libs/macros/src/hasher/input_validator.rs | 30 + sdk-libs/macros/src/hasher/light_hasher.rs | 413 +++++++++- sdk-libs/macros/src/hasher/mod.rs | 2 +- sdk-libs/macros/src/hasher/to_byte_array.rs | 63 +- sdk-libs/macros/src/lib.rs | 57 +- sdk-libs/macros/tests/discriminator.rs | 16 + sdk-libs/macros/tests/hasher.rs | 158 +++- sdk-libs/sdk/Cargo.toml | 1 + sdk-libs/sdk/src/account.rs | 741 +++++++++++++----- sdk-libs/sdk/src/error.rs | 3 + sdk-libs/sdk/src/lib.rs | 17 +- .../programs/sdk-anchor-test/src/lib.rs | 108 ++- .../programs/sdk-anchor-test/tests/test.rs | 220 +++++- sdk-tests/sdk-native-test/src/create_pda.rs | 8 +- sdk-tests/sdk-native-test/src/update_pda.rs | 2 +- 25 files changed, 1664 insertions(+), 274 deletions(-) create mode 100644 sdk-libs/macros/tests/discriminator.rs diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 1099ac1ce4..212ae4364b 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -47,6 +47,7 @@ jobs: aligned-sized light-hasher light-compressed-account light-account-checks \ light-verifier light-merkle-tree-metadata light-zero-copy light-hash-set test_cmd: | + cargo test -p light-macros cargo test -p aligned-sized cargo test -p light-hasher --all-features cargo test -p light-compressed-account --all-features diff --git a/.github/workflows/sdk-tests.yml b/.github/workflows/sdk-tests.yml index 3785ea6811..74807d8cac 100644 --- a/.github/workflows/sdk-tests.yml +++ b/.github/workflows/sdk-tests.yml @@ -56,10 +56,12 @@ jobs: - program: sdk-anchor-test-program sub-tests: '["cargo-test-sbf -p sdk-anchor-test", "cargo-test-sbf -p sdk-pinocchio-test"]' - program: sdk-libs - packages: light-macros light-sdk light-program-test light-client light-batched-merkle-tree + packages: light-sdk-macros light-sdk light-program-test light-client light-batched-merkle-tree test_cmd: | - cargo test -p light-macros + cargo test -p light-sdk-macros + cargo test -p light-sdk-macros --all-features cargo test -p light-sdk + cargo test -p light-sdk --all-features cargo test -p light-program-test cargo test -p light-client cargo test -p client-test diff --git a/Cargo.lock b/Cargo.lock index 63a1c141f0..1a2b749b2c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3694,6 +3694,7 @@ name = "light-sdk-macros" version = "0.13.0" dependencies = [ "borsh 0.10.4", + "light-account-checks", "light-compressed-account", "light-hasher", "light-macros", diff --git a/program-libs/hasher/src/keccak.rs b/program-libs/hasher/src/keccak.rs index 81d81d810c..ab1c666ee8 100644 --- a/program-libs/hasher/src/keccak.rs +++ b/program-libs/hasher/src/keccak.rs @@ -9,6 +9,8 @@ use crate::{ pub struct Keccak; impl Hasher for Keccak { + const ID: u8 = 2; + fn hash(val: &[u8]) -> Result { Self::hashv(&[val]) } diff --git a/program-libs/hasher/src/lib.rs b/program-libs/hasher/src/lib.rs index 9f4e4758c0..83a0875ae9 100644 --- a/program-libs/hasher/src/lib.rs +++ b/program-libs/hasher/src/lib.rs @@ -24,6 +24,7 @@ pub const HASH_BYTES: usize = 32; pub type Hash = [u8; HASH_BYTES]; pub trait Hasher { + const ID: u8; fn hash(val: &[u8]) -> Result; fn hashv(vals: &[&[u8]]) -> Result; fn zero_bytes() -> ZeroBytes; diff --git a/program-libs/hasher/src/poseidon.rs b/program-libs/hasher/src/poseidon.rs index 0cd6c670da..b13d4a6a83 100644 --- a/program-libs/hasher/src/poseidon.rs +++ b/program-libs/hasher/src/poseidon.rs @@ -78,6 +78,8 @@ impl From for u64 { pub struct Poseidon; impl Hasher for Poseidon { + const ID: u8 = 0; + fn hash(val: &[u8]) -> Result { Self::hashv(&[val]) } diff --git a/program-libs/hasher/src/sha256.rs b/program-libs/hasher/src/sha256.rs index 8a4b985a52..8c36befa21 100644 --- a/program-libs/hasher/src/sha256.rs +++ b/program-libs/hasher/src/sha256.rs @@ -5,10 +5,22 @@ use crate::{ Hash, Hasher, }; +/// Compile-time assertion trait that ensures a generic Hasher type is SHA256. +/// Used by LightHasherSha macro to enforce SHA256-only implementation at compile time. +pub trait RequireSha256: Hasher { + const ASSERT: () = assert!( + Self::ID == 1, + "DataHasher for LightHasherSha only works with SHA256 (ID=1). Example: your_struct.hash::()?" + ); +} + +impl RequireSha256 for T {} + #[derive(Clone, Copy)] // To allow using with zero copy Solana accounts. pub struct Sha256; impl Hasher for Sha256 { + const ID: u8 = 1; fn hash(val: &[u8]) -> Result { Self::hashv(&[val]) } diff --git a/sdk-libs/macros/Cargo.toml b/sdk-libs/macros/Cargo.toml index 791a4e9787..1b13ed2d42 100644 --- a/sdk-libs/macros/Cargo.toml +++ b/sdk-libs/macros/Cargo.toml @@ -6,6 +6,10 @@ repository = "https://github.com/Lightprotocol/light-protocol" license = "Apache-2.0" edition = "2021" + +[features] +anchor-discriminator = [] + [dependencies] proc-macro2 = { workspace = true } quote = { workspace = true } @@ -22,6 +26,7 @@ prettyplease = "0.2.29" solana-pubkey = { workspace = true, features = ["borsh"] } borsh = { workspace = true } light-macros = { workspace = true } +light-account-checks = { workspace = true } [lib] proc-macro = true diff --git a/sdk-libs/macros/src/discriminator.rs b/sdk-libs/macros/src/discriminator.rs index 1d289db888..d9796bfc6f 100644 --- a/sdk-libs/macros/src/discriminator.rs +++ b/sdk-libs/macros/src/discriminator.rs @@ -5,11 +5,17 @@ use syn::{ItemStruct, Result}; pub(crate) fn discriminator(input: ItemStruct) -> Result { let account_name = &input.ident; + // When anchor-discriminator-compat feature is enabled, use "account:" prefix like Anchor does + #[cfg(feature = "anchor-discriminator")] + let hash_input = format!("account:{}", account_name); + + #[cfg(not(feature = "anchor-discriminator"))] + let hash_input = account_name.to_string(); let (impl_gen, type_gen, where_clause) = input.generics.split_for_impl(); let mut discriminator = [0u8; 8]; - discriminator.copy_from_slice(&Sha256::hash(account_name.to_string().as_bytes()).unwrap()[..8]); + discriminator.copy_from_slice(&Sha256::hash(hash_input.as_bytes()).unwrap()[..8]); let discriminator: proc_macro2::TokenStream = format!("{discriminator:?}").parse().unwrap(); Ok(quote! { @@ -26,12 +32,14 @@ pub(crate) fn discriminator(input: ItemStruct) -> Result { #[cfg(test)] mod tests { - use syn::parse_quote; - - use super::*; + #[cfg(not(feature = "anchor-discriminator"))] #[test] fn test_discriminator() { + use syn::parse_quote; + + use super::*; + let input: ItemStruct = parse_quote! { struct MyAccount { a: u32, diff --git a/sdk-libs/macros/src/hasher/data_hasher.rs b/sdk-libs/macros/src/hasher/data_hasher.rs index 2486fdd4b7..421438a11f 100644 --- a/sdk-libs/macros/src/hasher/data_hasher.rs +++ b/sdk-libs/macros/src/hasher/data_hasher.rs @@ -37,7 +37,14 @@ pub(crate) fn generate_data_hasher_impl( slices[num_flattned_fields] = element.as_slice(); } - H::hashv(slices.as_slice()) + let mut result = H::hashv(slices.as_slice())?; + + // Apply field size truncation for non-Poseidon hashers + if H::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + + Ok(result) } } } @@ -59,9 +66,16 @@ pub(crate) fn generate_data_hasher_impl( println!("DataHasher::hash inputs {:?}", debug_prints); } } - H::hashv(&[ + let mut result = H::hashv(&[ #(#data_hasher_assignments.as_slice(),)* - ]) + ])?; + + // Apply field size truncation for non-Poseidon hashers + if H::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + + Ok(result) } } } @@ -69,3 +83,36 @@ pub(crate) fn generate_data_hasher_impl( Ok(hasher_impl) } + +/// SHA256-specific DataHasher implementation that serializes the whole struct +pub(crate) fn generate_data_hasher_impl_sha( + struct_name: &syn::Ident, + generics: &syn::Generics, +) -> Result { + let (impl_gen, type_gen, where_clause) = generics.split_for_impl(); + + let hasher_impl = quote! { + impl #impl_gen ::light_hasher::DataHasher for #struct_name #type_gen #where_clause { + fn hash(&self) -> ::std::result::Result<[u8; 32], ::light_hasher::HasherError> + where + H: ::light_hasher::Hasher + { + use ::light_hasher::Hasher; + use borsh::BorshSerialize; + + // Compile-time assertion that H must be SHA256 (ID = 1) + use ::light_hasher::sha256::RequireSha256; + let _ = ::ASSERT; + + // For SHA256, we serialize the whole struct and hash it in one go + let serialized = self.try_to_vec().map_err(|_| ::light_hasher::HasherError::BorshError)?; + let mut result = H::hash(&serialized)?; + // Truncate sha256 to 31 be bytes less than 254 bits bn254 field size. + result[0] = 0; + Ok(result) + } + } + }; + + Ok(hasher_impl) +} diff --git a/sdk-libs/macros/src/hasher/input_validator.rs b/sdk-libs/macros/src/hasher/input_validator.rs index af57976b8d..0b2800e15a 100644 --- a/sdk-libs/macros/src/hasher/input_validator.rs +++ b/sdk-libs/macros/src/hasher/input_validator.rs @@ -60,6 +60,36 @@ pub(crate) fn validate_input(input: &ItemStruct) -> Result<()> { Ok(()) } +/// SHA256-specific validation - much more relaxed constraints +pub(crate) fn validate_input_sha(input: &ItemStruct) -> Result<()> { + // Check that we have a struct with named fields + match &input.fields { + Fields::Named(_) => (), + _ => { + return Err(Error::new_spanned( + input, + "Only structs with named fields are supported", + )) + } + }; + + // For SHA256, we don't limit field count or require specific attributes + // Just ensure flatten is not used (not implemented for SHA256 path) + let flatten_field_exists = input + .fields + .iter() + .any(|field| get_field_attribute(field) == FieldAttribute::Flatten); + + if flatten_field_exists { + return Err(Error::new_spanned( + input, + "Flatten attribute is not supported in SHA256 hasher.", + )); + } + + Ok(()) +} + /// Gets the primary attribute for a field (only one attribute can be active) pub(crate) fn get_field_attribute(field: &Field) -> FieldAttribute { if field.attrs.iter().any(|attr| attr.path().is_ident("hash")) { diff --git a/sdk-libs/macros/src/hasher/light_hasher.rs b/sdk-libs/macros/src/hasher/light_hasher.rs index 911cc35f73..20f07b28c9 100644 --- a/sdk-libs/macros/src/hasher/light_hasher.rs +++ b/sdk-libs/macros/src/hasher/light_hasher.rs @@ -3,10 +3,10 @@ use quote::quote; use syn::{Fields, ItemStruct, Result}; use crate::hasher::{ - data_hasher::generate_data_hasher_impl, + data_hasher::{generate_data_hasher_impl, generate_data_hasher_impl_sha}, field_processor::{process_field, FieldProcessingContext}, - input_validator::{get_field_attribute, validate_input, FieldAttribute}, - to_byte_array::generate_to_byte_array_impl, + input_validator::{get_field_attribute, validate_input, validate_input_sha, FieldAttribute}, + to_byte_array::{generate_to_byte_array_impl_sha, generate_to_byte_array_impl_with_hasher}, }; /// - ToByteArray: @@ -49,6 +49,33 @@ use crate::hasher::{ /// - Enums, References, SmartPointers: /// - Not supported pub(crate) fn derive_light_hasher(input: ItemStruct) -> Result { + derive_light_hasher_with_hasher(input, "e!(::light_hasher::Poseidon)) +} + +pub(crate) fn derive_light_hasher_sha(input: ItemStruct) -> Result { + // Use SHA256-specific validation (no field count limits) + validate_input_sha(&input)?; + + let generics = input.generics.clone(); + + let fields = match &input.fields { + Fields::Named(fields) => fields.clone(), + _ => unreachable!("Validation should have caught this"), + }; + + let field_count = fields.named.len(); + + let to_byte_array_impl = generate_to_byte_array_impl_sha(&input.ident, &generics, field_count)?; + let data_hasher_impl = generate_data_hasher_impl_sha(&input.ident, &generics)?; + + Ok(quote! { + #to_byte_array_impl + + #data_hasher_impl + }) +} + +fn derive_light_hasher_with_hasher(input: ItemStruct, hasher: &TokenStream) -> Result { // Validate the input structure validate_input(&input)?; @@ -74,8 +101,13 @@ pub(crate) fn derive_light_hasher(input: ItemStruct) -> Result { process_field(field, i, &mut context); }); - let to_byte_array_impl = - generate_to_byte_array_impl(&input.ident, &generics, field_count, &context)?; + let to_byte_array_impl = generate_to_byte_array_impl_with_hasher( + &input.ident, + &generics, + field_count, + &context, + hasher, + )?; let data_hasher_impl = generate_data_hasher_impl(&input.ident, &generics, &context)?; @@ -113,7 +145,21 @@ mod tests { fn to_byte_array( &self, ) -> ::std::result::Result<[u8; 32], ::light_hasher::HasherError> { - ::light_hasher::DataHasher::hash::<::light_hasher::Poseidon>(self) + use ::light_hasher::to_byte_array::ToByteArray; + use ::light_hasher::hash_to_field_size::HashToFieldSize; + use ::light_hasher::Hasher; + let mut result = ::light_hasher::Poseidon::hashv( + &[ + self.a.to_byte_array()?.as_slice(), + self.b.to_byte_array()?.as_slice(), + self.c.to_byte_array()?.as_slice(), + self.d.to_byte_array()?.as_slice(), + ], + )?; + if ::light_hasher::Poseidon::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) } } impl ::light_hasher::DataHasher for MyAccount { @@ -134,14 +180,18 @@ impl ::light_hasher::DataHasher for MyAccount { println!("DataHasher::hash inputs {:?}", debug_prints); } } - H::hashv( + let mut result = H::hashv( &[ self.a.to_byte_array()?.as_slice(), self.b.to_byte_array()?.as_slice(), self.c.to_byte_array()?.as_slice(), self.d.to_byte_array()?.as_slice(), ], - ) + )?; + if H::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) } }"#; @@ -167,7 +217,16 @@ impl ::light_hasher::DataHasher for MyAccount { fn to_byte_array( &self, ) -> ::std::result::Result<[u8; 32], ::light_hasher::HasherError> { - ::light_hasher::DataHasher::hash::<::light_hasher::Poseidon>(self) + use ::light_hasher::to_byte_array::ToByteArray; + use ::light_hasher::hash_to_field_size::HashToFieldSize; + use ::light_hasher::Hasher; + let mut result = ::light_hasher::Poseidon::hashv( + &[self.a.to_byte_array()?.as_slice(), self.b.to_byte_array()?.as_slice()], + )?; + if ::light_hasher::Poseidon::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) } } impl ::light_hasher::DataHasher for OptionStruct { @@ -188,9 +247,13 @@ impl ::light_hasher::DataHasher for OptionStruct { println!("DataHasher::hash inputs {:?}", debug_prints); } } - H::hashv( + let mut result = H::hashv( &[self.a.to_byte_array()?.as_slice(), self.b.to_byte_array()?.as_slice()], - ) + )?; + if H::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) } }"#; @@ -212,10 +275,11 @@ impl ::light_hasher::DataHasher for OptionStruct { let formatted_output = unparse(&syn::parse2(output).unwrap()); const EXPECTED_OUTPUT: &str = r#"impl ::light_hasher::to_byte_array::ToByteArray for TruncateOptionStruct { - const NUM_FIELDS: usize = 1usize; + const NUM_FIELDS: usize = 1; fn to_byte_array( &self, ) -> ::std::result::Result<[u8; 32], ::light_hasher::HasherError> { + use ::light_hasher::to_byte_array::ToByteArray; use ::light_hasher::hash_to_field_size::HashToFieldSize; Ok( if let Some(a) = &self.a { @@ -244,7 +308,7 @@ impl ::light_hasher::DataHasher for TruncateOptionStruct { #[cfg(debug_assertions)] { if std::env::var("RUST_BACKTRACE").is_ok() { - let debug_prints: Vec<[u8; 32]> = vec![ + let debug_prints: Vec<[u8;32]> = vec![ if let Some(a) = & self.a { let result = a.hash_to_field_size() ?; if result == [0u8; 32] { return Err(::light_hasher::errors::HasherError::OptionHashToFieldSizeZero); } @@ -253,7 +317,7 @@ impl ::light_hasher::DataHasher for TruncateOptionStruct { println!("DataHasher::hash inputs {:?}", debug_prints); } } - H::hashv( + let mut result = H::hashv( &[ if let Some(a) = &self.a { let result = a.hash_to_field_size()?; @@ -269,7 +333,11 @@ impl ::light_hasher::DataHasher for TruncateOptionStruct { .as_slice(), ], - ) + )?; + if H::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) } }"#; @@ -297,7 +365,20 @@ impl ::light_hasher::DataHasher for TruncateOptionStruct { fn to_byte_array( &self, ) -> ::std::result::Result<[u8; 32], ::light_hasher::HasherError> { - ::light_hasher::DataHasher::hash::<::light_hasher::Poseidon>(self) + use ::light_hasher::to_byte_array::ToByteArray; + use ::light_hasher::hash_to_field_size::HashToFieldSize; + use ::light_hasher::Hasher; + let mut result = ::light_hasher::Poseidon::hashv( + &[ + self.a.to_byte_array()?.as_slice(), + self.b.hash_to_field_size()?.as_slice(), + self.c.to_byte_array()?.as_slice(), + ], + )?; + if ::light_hasher::Poseidon::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) } } impl ::light_hasher::DataHasher for MixedStruct { @@ -320,13 +401,17 @@ impl ::light_hasher::DataHasher for MixedStruct { println!("DataHasher::hash inputs {:?}", debug_prints); } } - H::hashv( + let mut result = H::hashv( &[ self.a.to_byte_array()?.as_slice(), self.b.hash_to_field_size()?.as_slice(), self.c.to_byte_array()?.as_slice(), ], - ) + )?; + if H::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) } }"#; @@ -352,7 +437,16 @@ impl ::light_hasher::DataHasher for MixedStruct { fn to_byte_array( &self, ) -> ::std::result::Result<[u8; 32], ::light_hasher::HasherError> { - ::light_hasher::DataHasher::hash::<::light_hasher::Poseidon>(self) + use ::light_hasher::to_byte_array::ToByteArray; + use ::light_hasher::hash_to_field_size::HashToFieldSize; + use ::light_hasher::Hasher; + let mut result = ::light_hasher::Poseidon::hashv( + &[self.a.to_byte_array()?.as_slice(), self.b.to_byte_array()?.as_slice()], + )?; + if ::light_hasher::Poseidon::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) } } impl ::light_hasher::DataHasher for OuterStruct { @@ -373,9 +467,13 @@ impl ::light_hasher::DataHasher for OuterStruct { println!("DataHasher::hash inputs {:?}", debug_prints); } } - H::hashv( + let mut result = H::hashv( &[self.a.to_byte_array()?.as_slice(), self.b.to_byte_array()?.as_slice()], - ) + )?; + if H::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) } }"#; // Format both the expected and actual output using prettyplease @@ -405,4 +503,277 @@ impl ::light_hasher::DataHasher for OuterStruct { }; assert!(derive_light_hasher(input).is_ok()); } + + #[test] + fn test_sha256_large_struct_with_pubkeys() { + // Test that SHA256 can handle large structs with Pubkeys that would fail with Poseidon + // This struct has 15 fields including Pubkeys without #[hash] attribute + let input: ItemStruct = parse_quote! { + struct LargeAccountSha { + pub field1: u64, + pub field2: u64, + pub field3: u64, + pub field4: u64, + pub field5: u64, + pub field6: u64, + pub field7: u64, + pub field8: u64, + pub field9: u64, + pub field10: u64, + pub field11: u64, + pub field12: u64, + pub field13: u64, + // Pubkeys without #[hash] attribute - this would fail with Poseidon + pub owner: solana_program::pubkey::Pubkey, + pub authority: solana_program::pubkey::Pubkey, + } + }; + + // SHA256 should handle this fine + let sha_result = derive_light_hasher_sha(input.clone()); + assert!( + sha_result.is_ok(), + "SHA256 should handle large structs with Pubkeys" + ); + + // Regular Poseidon hasher should fail due to field count (>12) and Pubkey without #[hash] + let poseidon_result = derive_light_hasher(input); + assert!( + poseidon_result.is_err(), + "Poseidon should fail with >12 fields and unhashed Pubkeys" + ); + } + + #[test] + fn test_sha256_vs_poseidon_hashing_behavior() { + // Test a struct that both can handle to show the difference in hashing approach + let input: ItemStruct = parse_quote! { + struct TestAccount { + pub data: [u8; 31], + pub counter: u64, + } + }; + + // Both should succeed + let sha_result = derive_light_hasher_sha(input.clone()); + assert!(sha_result.is_ok()); + + let poseidon_result = derive_light_hasher(input); + assert!(poseidon_result.is_ok()); + + // Verify SHA256 implementation serializes whole struct + let sha_output = sha_result.unwrap(); + let sha_code = sha_output.to_string(); + + // SHA256 should use try_to_vec() for whole struct serialization (account for spaces) + assert!( + sha_code.contains("try_to_vec") && sha_code.contains("BorshSerialize"), + "SHA256 should serialize whole struct using try_to_vec. Actual code: {}", + sha_code + ); + assert!( + sha_code.contains("result [0] = 0") || sha_code.contains("result[0] = 0"), + "SHA256 should truncate first byte. Actual code: {}", + sha_code + ); + + // Poseidon should use field-by-field hashing + let poseidon_output = poseidon_result.unwrap(); + let poseidon_code = poseidon_output.to_string(); + + assert!( + poseidon_code.contains("to_byte_array") && poseidon_code.contains("as_slice"), + "Poseidon should use field-by-field hashing with to_byte_array. Actual code: {}", + poseidon_code + ); + } + + #[test] + fn test_sha256_no_field_limit() { + // Test that SHA256 doesn't enforce the 12-field limit + let input: ItemStruct = parse_quote! { + struct ManyFieldsStruct { + pub f1: u32, pub f2: u32, pub f3: u32, pub f4: u32, + pub f5: u32, pub f6: u32, pub f7: u32, pub f8: u32, + pub f9: u32, pub f10: u32, pub f11: u32, pub f12: u32, + pub f13: u32, pub f14: u32, pub f15: u32, pub f16: u32, + pub f17: u32, pub f18: u32, pub f19: u32, pub f20: u32, + } + }; + + // SHA256 should handle 20 fields without issue + let result = derive_light_hasher_sha(input); + assert!(result.is_ok(), "SHA256 should handle any number of fields"); + } + + #[test] + fn test_sha256_flatten_not_supported() { + // Test that SHA256 rejects flatten attribute (not implemented) + let input: ItemStruct = parse_quote! { + struct FlattenStruct { + #[flatten] + pub inner: InnerStruct, + pub data: u64, + } + }; + + let result = derive_light_hasher_sha(input); + assert!(result.is_err(), "SHA256 should reject flatten attribute"); + + let error_msg = result.unwrap_err().to_string(); + assert!( + error_msg.contains("not supported in SHA256"), + "Should mention SHA256 limitation" + ); + } + + #[test] + fn test_sha256_with_discriminator_integration() { + // Test that shows LightHasherSha works with LightDiscriminatorSha for large structs + // This would be impossible with regular Poseidon-based macros + let input: ItemStruct = parse_quote! { + struct LargeIntegratedAccount { + pub field1: u64, pub field2: u64, pub field3: u64, pub field4: u64, + pub field5: u64, pub field6: u64, pub field7: u64, pub field8: u64, + pub field9: u64, pub field10: u64, pub field11: u64, pub field12: u64, + pub field13: u64, pub field14: u64, pub field15: u64, pub field16: u64, + pub field17: u64, pub field18: u64, pub field19: u64, pub field20: u64, + // Pubkeys without #[hash] attribute + pub owner: solana_program::pubkey::Pubkey, + pub authority: solana_program::pubkey::Pubkey, + pub delegate: solana_program::pubkey::Pubkey, + } + }; + + // Both SHA256 hasher and discriminator should work + let sha_hasher_result = derive_light_hasher_sha(input.clone()); + assert!( + sha_hasher_result.is_ok(), + "SHA256 hasher should work with large structs" + ); + + let sha_discriminator_result = crate::discriminator::discriminator(input.clone()); + assert!( + sha_discriminator_result.is_ok(), + "SHA256 discriminator should work with large structs" + ); + + // Regular Poseidon variants should fail + let poseidon_hasher_result = derive_light_hasher(input); + assert!( + poseidon_hasher_result.is_err(), + "Poseidon hasher should fail with large structs" + ); + + // Verify the generated code contains expected patterns + let sha_hasher_code = sha_hasher_result.unwrap().to_string(); + assert!( + sha_hasher_code.contains("try_to_vec"), + "Should use serialization approach" + ); + assert!( + sha_hasher_code.contains("BorshSerialize"), + "Should use Borsh serialization" + ); + + let sha_discriminator_code = sha_discriminator_result.unwrap().to_string(); + assert!( + sha_discriminator_code.contains("LightDiscriminator"), + "Should implement LightDiscriminator" + ); + assert!( + sha_discriminator_code.contains("LIGHT_DISCRIMINATOR"), + "Should provide discriminator constant" + ); + } + + #[test] + fn test_complete_sha256_ecosystem_practical_example() { + // Demonstrates a real-world scenario where SHA256 variants are essential + // This struct would be impossible with Poseidon due to: + // 1. >12 fields (23+ fields) + // 2. Multiple Pubkeys without #[hash] attribute + // 3. Large data structures + let input: ItemStruct = parse_quote! { + pub struct ComplexGameState { + // Game metadata (13 fields) + pub game_id: u64, + pub round: u32, + pub turn: u8, + pub phase: u8, + pub start_time: i64, + pub end_time: i64, + pub max_players: u8, + pub current_players: u8, + pub entry_fee: u64, + pub prize_pool: u64, + pub game_mode: u32, + pub difficulty: u8, + pub status: u8, + + // Player information (6 Pubkey fields - would require #[hash] with Poseidon) + pub creator: solana_program::pubkey::Pubkey, + pub winner: solana_program::pubkey::Pubkey, + pub current_player: solana_program::pubkey::Pubkey, + pub authority: solana_program::pubkey::Pubkey, + pub treasury: solana_program::pubkey::Pubkey, + pub program_id: solana_program::pubkey::Pubkey, + + // Game state data (4+ more fields) + pub board_state: [u8; 64], // Large array + pub player_scores: [u32; 8], // Array of scores + pub moves_history: [u16; 32], // Move history + pub special_flags: u32, + + // This gives us 23+ fields total - way beyond Poseidon's 12-field limit + } + }; + + // SHA256 variants should handle this complex struct effortlessly + let sha_hasher_result = derive_light_hasher_sha(input.clone()); + assert!( + sha_hasher_result.is_ok(), + "SHA256 hasher must handle complex real-world structs" + ); + + let sha_discriminator_result = crate::discriminator::discriminator(input.clone()); + assert!( + sha_discriminator_result.is_ok(), + "SHA256 discriminator must handle complex real-world structs" + ); + + // Poseidon would fail with this struct + let poseidon_result = derive_light_hasher(input); + assert!( + poseidon_result.is_err(), + "Poseidon cannot handle structs with >12 fields and unhashed Pubkeys" + ); + + // Verify SHA256 generates efficient serialization-based code + let hasher_code = sha_hasher_result.unwrap().to_string(); + assert!( + hasher_code.contains("try_to_vec"), + "Should serialize entire struct efficiently" + ); + assert!( + hasher_code.contains("BorshSerialize"), + "Should use Borsh for serialization" + ); + assert!( + hasher_code.contains("result [0] = 0") || hasher_code.contains("result[0] = 0"), + "Should apply field size truncation. Actual code: {}", + hasher_code + ); + + // Verify discriminator works correctly + let discriminator_code = sha_discriminator_result.unwrap().to_string(); + assert!( + discriminator_code.contains("ComplexGameState"), + "Should target correct struct" + ); + assert!( + discriminator_code.contains("LIGHT_DISCRIMINATOR"), + "Should provide discriminator constant" + ); + } } diff --git a/sdk-libs/macros/src/hasher/mod.rs b/sdk-libs/macros/src/hasher/mod.rs index 5c81807edf..c2ebd8034e 100644 --- a/sdk-libs/macros/src/hasher/mod.rs +++ b/sdk-libs/macros/src/hasher/mod.rs @@ -4,4 +4,4 @@ mod input_validator; mod light_hasher; mod to_byte_array; -pub(crate) use light_hasher::derive_light_hasher; +pub(crate) use light_hasher::{derive_light_hasher, derive_light_hasher_sha}; diff --git a/sdk-libs/macros/src/hasher/to_byte_array.rs b/sdk-libs/macros/src/hasher/to_byte_array.rs index 27d49ae232..600583d8ff 100644 --- a/sdk-libs/macros/src/hasher/to_byte_array.rs +++ b/sdk-libs/macros/src/hasher/to_byte_array.rs @@ -4,11 +4,12 @@ use syn::Result; use crate::hasher::field_processor::FieldProcessingContext; -pub(crate) fn generate_to_byte_array_impl( +pub(crate) fn generate_to_byte_array_impl_with_hasher( struct_name: &syn::Ident, generics: &syn::Generics, field_count: usize, context: &FieldProcessingContext, + hasher: &TokenStream, ) -> Result { let (impl_gen, type_gen, where_clause) = generics.split_for_impl(); @@ -20,34 +21,70 @@ pub(crate) fn generate_to_byte_array_impl( Some(s) => s, None => &alt_res, }; - let field_assignment: TokenStream = syn::parse_str(str)?; - - // Create a token stream with the field_assignment and the import code - let mut hash_imports = proc_macro2::TokenStream::new(); - for code in &context.hash_to_field_size_code { - hash_imports.extend(code.clone()); - } + let content: TokenStream = str.parse().expect("Invalid generated code"); Ok(quote! { impl #impl_gen ::light_hasher::to_byte_array::ToByteArray for #struct_name #type_gen #where_clause { - const NUM_FIELDS: usize = #field_count; + const NUM_FIELDS: usize = 1; fn to_byte_array(&self) -> ::std::result::Result<[u8; 32], ::light_hasher::HasherError> { - #hash_imports - #field_assignment + use ::light_hasher::to_byte_array::ToByteArray; + use ::light_hasher::hash_to_field_size::HashToFieldSize; + #content } } }) } else { + let data_hasher_assignments = &context.data_hasher_assignments; Ok(quote! { impl #impl_gen ::light_hasher::to_byte_array::ToByteArray for #struct_name #type_gen #where_clause { const NUM_FIELDS: usize = #field_count; fn to_byte_array(&self) -> ::std::result::Result<[u8; 32], ::light_hasher::HasherError> { - ::light_hasher::DataHasher::hash::<::light_hasher::Poseidon>(self) - } + use ::light_hasher::to_byte_array::ToByteArray; + use ::light_hasher::hash_to_field_size::HashToFieldSize; + use ::light_hasher::Hasher; + let mut result = #hasher::hashv(&[ + #(#data_hasher_assignments.as_slice(),)* + ])?; + + // Truncate field size for non-Poseidon hashers + if #hasher::ID != ::light_hasher::Poseidon::ID { + result[0] = 0; + } + Ok(result) + } } }) } } + +/// SHA256-specific ToByteArray implementation that serializes the whole struct +pub(crate) fn generate_to_byte_array_impl_sha( + struct_name: &syn::Ident, + generics: &syn::Generics, + field_count: usize, +) -> Result { + let (impl_gen, type_gen, where_clause) = generics.split_for_impl(); + + Ok(quote! { + impl #impl_gen ::light_hasher::to_byte_array::ToByteArray for #struct_name #type_gen #where_clause { + const NUM_FIELDS: usize = #field_count; + + fn to_byte_array(&self) -> ::std::result::Result<[u8; 32], ::light_hasher::HasherError> { + use borsh::BorshSerialize; + use ::light_hasher::Hasher; + + // For SHA256, we can serialize the whole struct and hash it in one go + let serialized = self.try_to_vec().map_err(|_| ::light_hasher::HasherError::BorshError)?; + let mut result = ::light_hasher::Sha256::hash(&serialized)?; + + // Truncate field size for SHA256 + result[0] = 0; + + Ok(result) + } + } + }) +} diff --git a/sdk-libs/macros/src/lib.rs b/sdk-libs/macros/src/lib.rs index 324660c861..61fe3c1fac 100644 --- a/sdk-libs/macros/src/lib.rs +++ b/sdk-libs/macros/src/lib.rs @@ -1,6 +1,6 @@ extern crate proc_macro; use accounts::{process_light_accounts, process_light_system_accounts}; -use hasher::derive_light_hasher; +use hasher::{derive_light_hasher, derive_light_hasher_sha}; use proc_macro::TokenStream; use syn::{parse_macro_input, DeriveInput, ItemMod, ItemStruct}; use traits::process_light_traits; @@ -142,10 +142,10 @@ pub fn light_discriminator(input: TokenStream) -> TokenStream { /// Makes the annotated struct hashable by implementing the following traits: /// -/// - [`AsByteVec`](light_hasher::bytes::AsByteVec), which makes the struct +/// - [`ToByteArray`](light_hasher::to_byte_array::ToByteArray), which makes the struct /// convertable to a 2D byte vector. /// - [`DataHasher`](light_hasher::DataHasher), which makes the struct hashable -/// with the `hash()` method, based on the byte inputs from `AsByteVec` +/// with the `hash()` method, based on the byte inputs from `ToByteArray` /// implementation. /// /// This macro assumes that all the fields of the struct implement the @@ -156,7 +156,7 @@ pub fn light_discriminator(input: TokenStream) -> TokenStream { /// /// 1. The most recommended one - annotating that type with the `light_hasher` /// macro as well. -/// 2. Manually implementing the `AsByteVec` trait. +/// 2. Manually implementing the `ToByteArray` trait. /// /// # Attributes /// @@ -222,36 +222,37 @@ pub fn light_discriminator(input: TokenStream) -> TokenStream { /// } /// ``` /// -/// Compressed account with a type with a custom `AsByteVec` implementation: +#[proc_macro_derive(LightHasher, attributes(skip, hash))] +pub fn light_hasher(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as ItemStruct); + derive_light_hasher(input) + .unwrap_or_else(|err| err.to_compile_error()) + .into() +} + +/// SHA256 variant of the LightHasher derive macro. /// -/// ```ignore -/// #[derive(LightHasher)] -/// pub struct MyCompressedAccount { -/// a: i64 -/// b: Option, -/// c: RData, -/// } +/// This derive macro automatically implements the `DataHasher` and `ToByteArray` traits +/// for structs, using SHA256 as the hashing algorithm instead of Poseidon. /// -/// pub enum RData { -/// A(Ipv4Addr), -/// AAAA(Ipv6Addr), -/// CName(String), -/// } +/// ## Example /// -/// impl AsByteVec for RData { -/// fn as_byte_vec(&self) -> Vec> { -/// match self { -/// Self::A(ipv4_addr) => vec![ipv4_addr.octets().to_vec()], -/// Self::AAAA(ipv6_addr) => vec![ipv6_addr.octets().to_vec()], -/// Self::CName(cname) => cname.as_byte_vec(), -/// } -/// } +/// ```rust +/// use light_sdk_macros::LightHasherSha; +/// use borsh::{BorshSerialize, BorshDeserialize}; +/// use solana_pubkey::Pubkey; +/// +/// #[derive(LightHasherSha, BorshSerialize, BorshDeserialize)] +/// pub struct GameState { +/// pub player: Pubkey, // Will be hashed to 31 bytes +/// pub level: u32, /// } /// ``` -#[proc_macro_derive(LightHasher, attributes(skip, hash))] -pub fn light_hasher(input: TokenStream) -> TokenStream { +#[proc_macro_derive(LightHasherSha, attributes(hash, skip))] +pub fn light_hasher_sha(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as ItemStruct); - derive_light_hasher(input) + + derive_light_hasher_sha(input) .unwrap_or_else(|err| err.to_compile_error()) .into() } diff --git a/sdk-libs/macros/tests/discriminator.rs b/sdk-libs/macros/tests/discriminator.rs new file mode 100644 index 0000000000..a7c70fdfec --- /dev/null +++ b/sdk-libs/macros/tests/discriminator.rs @@ -0,0 +1,16 @@ +use light_account_checks::discriminator::Discriminator as LightDiscriminator; +use light_sdk_macros::LightDiscriminator; + +#[test] +fn test_anchor_discriminator() { + #[cfg(feature = "anchor-discriminator")] + let protocol_config_discriminator = &[96, 176, 239, 146, 1, 254, 99, 146]; + #[cfg(not(feature = "anchor-discriminator"))] + let protocol_config_discriminator = &[254, 235, 147, 47, 205, 77, 97, 201]; + #[derive(LightDiscriminator)] + pub struct ProtocolConfigPda {} + assert_eq!( + protocol_config_discriminator, + &ProtocolConfigPda::LIGHT_DISCRIMINATOR + ); +} diff --git a/sdk-libs/macros/tests/hasher.rs b/sdk-libs/macros/tests/hasher.rs index 4b58c57ab7..1154569be6 100644 --- a/sdk-libs/macros/tests/hasher.rs +++ b/sdk-libs/macros/tests/hasher.rs @@ -1199,11 +1199,15 @@ fn test_solana_program_pubkey() { assert_eq!(manual_hash, hash); // Sha256 - let manual_hash = Sha256::hash( + let mut manual_hash = Sha256::hash( light_compressed_account::hash_to_bn254_field_size_be(manual_bytes.as_slice()) .as_slice(), ) .unwrap(); + // Apply truncation for non-Poseidon hashers + if Sha256::ID != 0 { + manual_hash[0] = 0; + } let hash = pubkey_struct.hash::().unwrap(); assert_eq!(manual_hash, hash); } @@ -1215,7 +1219,11 @@ fn test_solana_program_pubkey() { assert_eq!(manual_hash, hash); // Sha256 - let manual_hash = Sha256::hash([0u8; 32].as_slice()).unwrap(); + let mut manual_hash = Sha256::hash([0u8; 32].as_slice()).unwrap(); + // Apply truncation for non-Poseidon hashers + if Sha256::ID != 0 { + manual_hash[0] = 0; + } let hash = pubkey_struct.hash::().unwrap(); assert_eq!(manual_hash, hash); } @@ -1240,11 +1248,15 @@ fn test_solana_program_pubkey() { assert_eq!(manual_hash, hash); // Sha256 - let manual_hash = Sha256::hash( + let mut manual_hash = Sha256::hash( light_compressed_account::hash_to_bn254_field_size_be(manual_bytes.as_slice()) .as_slice(), ) .unwrap(); + // Apply truncation for non-Poseidon hashers + if Sha256::ID != 0 { + manual_hash[0] = 0; + } let hash = pubkey_struct.hash::().unwrap(); assert_eq!(manual_hash, hash); } @@ -1272,11 +1284,15 @@ fn test_solana_program_pubkey() { assert_eq!(manual_hash, hash); // Sha256 - let manual_hash = Sha256::hash( + let mut manual_hash = Sha256::hash( light_compressed_account::hash_to_bn254_field_size_be(manual_bytes.as_slice()) .as_slice(), ) .unwrap(); + // Apply truncation for non-Poseidon hashers + if Sha256::ID != 0 { + manual_hash[0] = 0; + } let hash = pubkey_struct.hash::().unwrap(); assert_eq!(manual_hash, hash); } @@ -1294,17 +1310,131 @@ fn test_solana_program_pubkey() { assert_eq!(manual_hash, hash); // Sha256 - let manual_hash = Sha256::hash( + let mut manual_hash = Sha256::hash( light_compressed_account::hash_to_bn254_field_size_be(manual_bytes.as_slice()) .as_slice(), ) .unwrap(); + // Apply truncation for non-Poseidon hashers + if Sha256::ID != 0 { + manual_hash[0] = 0; + } let hash = pubkey_struct.hash::().unwrap(); assert_eq!(manual_hash, hash); } } } +#[test] +fn test_light_hasher_sha_macro() { + use light_sdk_macros::LightHasherSha; + + // Test struct with many fields that would exceed Poseidon's limit + #[derive(LightHasherSha, BorshSerialize, BorshDeserialize, Clone)] + struct LargeShaStruct { + pub field1: u64, + pub field2: u64, + pub field3: u64, + pub field4: u64, + pub field5: u64, + pub field6: u64, + pub field7: u64, + pub field8: u64, + pub field9: u64, + pub field10: u64, + pub field11: u64, + pub field12: u64, + pub field13: u64, + pub field14: u64, + pub field15: u64, + pub owner: Pubkey, + pub authority: Pubkey, + } + + let test_struct = LargeShaStruct { + field1: 1, + field2: 2, + field3: 3, + field4: 4, + field5: 5, + field6: 6, + field7: 7, + field8: 8, + field9: 9, + field10: 10, + field11: 11, + field12: 12, + field13: 13, + field14: 14, + field15: 15, + owner: Pubkey::new_unique(), + authority: Pubkey::new_unique(), + }; + + // Verify the hash matches manual SHA256 hashing + let bytes = test_struct.try_to_vec().unwrap(); + let mut ref_hash = Sha256::hash(bytes.as_slice()).unwrap(); + + // Apply truncation for non-Poseidon hashers (ID != 0) + if Sha256::ID != 0 { + ref_hash[0] = 0; + } + + // Test with SHA256 hasher + let hash_result = test_struct.hash::().unwrap(); + assert_eq!( + hash_result, ref_hash, + "SHA256 hash should match manual hash" + ); + + // Test ToByteArray implementation + let byte_array_result = test_struct.to_byte_array().unwrap(); + assert_eq!( + byte_array_result, ref_hash, + "ToByteArray should match SHA256 hash" + ); + + // Test another struct with different values + let test_struct2 = LargeShaStruct { + field1: 100, + field2: 200, + field3: 300, + field4: 400, + field5: 500, + field6: 600, + field7: 700, + field8: 800, + field9: 900, + field10: 1000, + field11: 1100, + field12: 1200, + field13: 1300, + field14: 1400, + field15: 1500, + owner: Pubkey::new_unique(), + authority: Pubkey::new_unique(), + }; + + let bytes2 = test_struct2.try_to_vec().unwrap(); + let mut ref_hash2 = Sha256::hash(bytes2.as_slice()).unwrap(); + + if Sha256::ID != 0 { + ref_hash2[0] = 0; + } + + let hash_result2 = test_struct2.hash::().unwrap(); + assert_eq!( + hash_result2, ref_hash2, + "Second SHA256 hash should match manual hash" + ); + + // Ensure different structs produce different hashes + assert_ne!( + hash_result, hash_result2, + "Different structs should produce different hashes" + ); +} + // Option #[test] fn test_borsh() { @@ -1340,11 +1470,15 @@ fn test_borsh() { assert_eq!(manual_hash, hash); // Sha256 - let manual_hash = Sha256::hash( + let mut manual_hash = Sha256::hash( light_compressed_account::hash_to_bn254_field_size_be(manual_bytes.as_slice()) .as_slice(), ) .unwrap(); + // Apply truncation for non-Poseidon hashers + if Sha256::ID != 0 { + manual_hash[0] = 0; + } let hash = pubkey_struct.hash::().unwrap(); assert_eq!(manual_hash, hash); } @@ -1356,7 +1490,11 @@ fn test_borsh() { assert_eq!(manual_hash, hash); // Sha256 - let manual_hash = Sha256::hash([0u8; 32].as_slice()).unwrap(); + let mut manual_hash = Sha256::hash([0u8; 32].as_slice()).unwrap(); + // Apply truncation for non-Poseidon hashers + if Sha256::ID != 0 { + manual_hash[0] = 0; + } let hash = pubkey_struct.hash::().unwrap(); assert_eq!(manual_hash, hash); } @@ -1383,11 +1521,15 @@ fn test_borsh() { assert_eq!(manual_hash, hash); // Sha256 - let manual_hash = Sha256::hash( + let mut manual_hash = Sha256::hash( light_compressed_account::hash_to_bn254_field_size_be(manual_bytes.as_slice()) .as_slice(), ) .unwrap(); + // Apply truncation for non-Poseidon hashers + if Sha256::ID != 0 { + manual_hash[0] = 0; + } let hash = pubkey_struct.hash::().unwrap(); assert_eq!(manual_hash, hash); } diff --git a/sdk-libs/sdk/Cargo.toml b/sdk-libs/sdk/Cargo.toml index 9afeb4af92..3854dd58c2 100644 --- a/sdk-libs/sdk/Cargo.toml +++ b/sdk-libs/sdk/Cargo.toml @@ -47,3 +47,4 @@ light-zero-copy = { workspace = true } [dev-dependencies] num-bigint = { workspace = true } light-compressed-account = { workspace = true, features = ["new-unique"] } +anchor-lang = { workspace = true } diff --git a/sdk-libs/sdk/src/account.rs b/sdk-libs/sdk/src/account.rs index 8206696040..c0bc56f450 100644 --- a/sdk-libs/sdk/src/account.rs +++ b/sdk-libs/sdk/src/account.rs @@ -5,29 +5,29 @@ //! and wraps the compressed account data so that it is easy to use. //! //! Data structs used with LightAccount must implement the traits: -//! - DataHasher //! - LightDiscriminator //! - BorshSerialize, BorshDeserialize //! - Debug, Default, Clone //! //! ### Account Data Hashing -//! The LightHasher derives a hashing scheme from the compressed account layout. +//! +//! Sha256 data hashing is the recommended for most use cases. +//! Account data is serialized into a vector with borsh and hashed with Sha256. +//! +//! Poseidon data hashing is recommended zk use cases. +//! The data struct needs to implement the DataHasher implementation. +//! The LightHasher derives, the DataHasher trait a hashing scheme from the compressed account layout. //! Alternatively, DataHasher can be implemented manually. +//! Poseidon hashing is CU intensive and has limitations with regards to hash inputs see Poseidon module for details. //! -//! Constraints: -//! - Poseidon hashes can only take up to 12 inputs -//! -> use nested structs for structs with more than 12 fields. -//! - Poseidon hashes inputs must be less than bn254 field size (254 bits). -//! hash_to_field_size methods in light hasher can be used to hash data longer than 253 bits. -//! -> use the `#[hash]` attribute for fields with data types greater than 31 bytes eg Pubkeys. //! -//! ### Compressed account with LightHasher and LightDiscriminator +//! ### Compressed account with LightDiscriminator //! ``` -//! use light_sdk::{LightHasher, LightDiscriminator}; +//! use light_sdk::LightDiscriminator; //! use solana_pubkey::Pubkey; -//! #[derive(Clone, Debug, Default, LightHasher, LightDiscriminator)] +//! use borsh::{BorshSerialize, BorshDeserialize}; +//! #[derive(Clone, Debug, Default, LightDiscriminator, BorshSerialize, BorshDeserialize)] //! pub struct CounterAccount { -//! #[hash] //! pub owner: Pubkey, //! pub counter: u64, //! } @@ -35,37 +35,85 @@ //! //! //! ### Create compressed account -//! ```ignore +//! ```rust +//! use light_sdk::{LightAccount, LightDiscriminator}; +//! use borsh::{BorshSerialize, BorshDeserialize}; +//! use solana_pubkey::Pubkey; +//! +//! #[derive(Clone, Debug, Default, LightDiscriminator, BorshSerialize, BorshDeserialize)] +//! pub struct CounterAccount { +//! pub owner: Pubkey, +//! pub counter: u64, +//! }; +//! +//! let program_id = Pubkey::new_unique(); +//! let address = [0u8; 32]; +//! let output_tree_index = 0u8; +//! let owner = Pubkey::new_unique(); +//! //! let mut my_compressed_account = LightAccount::<'_, CounterAccount>::new_init( -//! &crate::ID, +//! &program_id, //! // Address //! Some(address), //! output_tree_index, //! ); //! // Set data: -//! my_compressed_account.owner = ctx.accounts.signer.key(); +//! my_compressed_account.owner = owner; //! ``` //! ### Update compressed account -//! ```ignore +//! ```rust +//! use light_sdk::{LightAccount, LightDiscriminator}; +//! use light_sdk::instruction::account_meta::CompressedAccountMeta; +//! use borsh::{BorshSerialize, BorshDeserialize}; +//! use solana_pubkey::Pubkey; +//! +//! #[derive(Clone, Debug, Default, LightDiscriminator, BorshSerialize, BorshDeserialize)] +//! pub struct CounterAccount { +//! pub owner: Pubkey, +//! pub counter: u64, +//! }; +//! +//! let program_id = Pubkey::new_unique(); +//! let account_meta = CompressedAccountMeta::default(); +//! let compressed_account_data = CounterAccount::default(); +//! //! let mut my_compressed_account = LightAccount::<'_, CounterAccount>::new_mut( -//! &crate::ID, +//! &program_id, //! &account_meta, -//! my_compressed_account, -//! ); +//! compressed_account_data, +//! ).unwrap(); //! // Increment counter. //! my_compressed_account.counter += 1; //! ``` //! ### Close compressed account -//! ```ignore -//! let mut my_compressed_account = LightAccount::<'_, CounterAccount>::new_close( -//! &crate::ID, +//! ```rust +//! use light_sdk::{LightAccount, LightDiscriminator}; +//! use light_sdk::instruction::account_meta::CompressedAccountMetaClose; +//! use borsh::{BorshSerialize, BorshDeserialize}; +//! use solana_pubkey::Pubkey; +//! +//! #[derive(Clone, Debug, Default, LightDiscriminator, BorshSerialize, BorshDeserialize)] +//! pub struct CounterAccount { +//! pub owner: Pubkey, +//! pub counter: u64, +//! }; +//! +//! let program_id = Pubkey::new_unique(); +//! let account_meta_close = CompressedAccountMetaClose::default(); +//! let compressed_account_data = CounterAccount::default(); +//! +//! let _my_compressed_account = LightAccount::<'_, CounterAccount>::new_close( +//! &program_id, //! &account_meta_close, -//! my_compressed_account, -//! ); +//! compressed_account_data, +//! ).unwrap(); //! ``` // TODO: add example for manual hashing -use std::ops::{Deref, DerefMut}; +use std::{ + marker::PhantomData, + ops::{Deref, DerefMut}, +}; use light_compressed_account::{ compressed_account::PackedMerkleContext, @@ -76,191 +124,524 @@ use solana_pubkey::Pubkey; use crate::{ error::LightSdkError, - light_hasher::{DataHasher, Poseidon}, + light_hasher::{DataHasher, Hasher, Poseidon, Sha256}, AnchorDeserialize, AnchorSerialize, LightDiscriminator, }; -#[derive(Debug, PartialEq)] -pub struct LightAccount< - 'a, - A: AnchorSerialize + AnchorDeserialize + LightDiscriminator + DataHasher + Default, -> { - owner: &'a Pubkey, - pub account: A, - account_info: CompressedAccountInfo, +const DEFAULT_DATA_HASH: [u8; 32] = [0u8; 32]; + +pub trait Size { + fn size(&self) -> usize; +} + +/// SHA256 borsh flat hashed Light Account. +/// This is the recommended account type for most use cases. +pub mod sha { + use super::*; + /// Light Account variant that uses SHA256 hashing with flat borsh serialization. + /// This is the recommended account type for most use cases. + pub type LightAccount<'a, A> = super::LightAccountInner<'a, Sha256, A, true>; +} + +/// Poseidon hashed Light Account. +/// Poseidon hashing is zk friendly and enables you to do zk proofs over your compressed account data. +pub mod poseidon { + use super::*; + /// Light Account type using Poseidon hashing. + /// Poseidon hashing is zk friendly and enables you to do zk proofs over your compressed account. + /// ### Compressed account with LightHasher and LightDiscriminator + /// ```rust + /// use light_sdk::{LightHasher, LightDiscriminator}; + /// use solana_pubkey::Pubkey; + /// #[derive(Clone, Debug, Default, LightHasher, LightDiscriminator)] + /// pub struct CounterAccount { + /// #[hash] + /// pub owner: Pubkey, + /// pub counter: u64, + /// } + /// ``` + /// Constraints: + /// - Poseidon hashes can only take up to 12 inputs + /// -> use nested structs for structs with more than 12 fields. + /// - Poseidon hashes inputs must be less than bn254 field size (254 bits). + /// hash_to_field_size methods in light hasher can be used to hash data longer than 253 bits. + /// -> use the `#[hash]` attribute for fields with data types greater than 31 bytes eg Pubkeys. + pub type LightAccount<'a, A> = super::LightAccountInner<'a, Poseidon, A, false>; } -impl<'a, A: AnchorSerialize + AnchorDeserialize + LightDiscriminator + DataHasher + Default> - LightAccount<'a, A> -{ - pub fn new_init( +#[doc(hidden)] +pub use __internal::LightAccountInner; + +/// INTERNAL IMPLEMENTATION - DO NOT USE DIRECTLY +/// **Use the type aliases instead:** +/// - `LightAccount` for Poseidon hashing +/// - `sha::LightAccount` for SHA256 hashing +#[doc(hidden)] +pub mod __internal { + use light_sdk_types::instruction::account_meta::CompressedAccountMetaClose; + + use super::*; + + #[doc(hidden)] + #[derive(Debug, PartialEq)] + pub struct LightAccountInner< + 'a, + H: Hasher, + A: AnchorSerialize + AnchorDeserialize + LightDiscriminator + Default, + const HASH_FLAT: bool, + > { owner: &'a Pubkey, - address: Option<[u8; 32]>, - output_state_tree_index: u8, - ) -> Self { - let output_account_info = OutAccountInfo { - output_merkle_tree_index: output_state_tree_index, - discriminator: A::LIGHT_DISCRIMINATOR, - ..Default::default() - }; - Self { - owner, - account: A::default(), - account_info: CompressedAccountInfo { - address, - input: None, - output: Some(output_account_info), - }, - } + pub account: A, + account_info: CompressedAccountInfo, + should_remove_data: bool, + _hasher: PhantomData, } - pub fn new_mut( - owner: &'a Pubkey, - input_account_meta: &impl CompressedAccountMetaTrait, - input_account: A, - ) -> Result { - let input_account_info = { - let input_data_hash = input_account.hash::()?; - let tree_info = input_account_meta.get_tree_info(); - InAccountInfo { - data_hash: input_data_hash, - lamports: input_account_meta.get_lamports().unwrap_or_default(), - merkle_context: PackedMerkleContext { - merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, - queue_pubkey_index: tree_info.queue_pubkey_index, - leaf_index: tree_info.leaf_index, - prove_by_index: tree_info.prove_by_index, - }, - root_index: input_account_meta.get_root_index().unwrap_or_default(), - discriminator: A::LIGHT_DISCRIMINATOR, - } - }; - let output_account_info = { - let output_merkle_tree_index = input_account_meta - .get_output_state_tree_index() - .ok_or(LightSdkError::OutputStateTreeIndexIsNone)?; - OutAccountInfo { - lamports: input_account_meta.get_lamports().unwrap_or_default(), - output_merkle_tree_index, + impl< + 'a, + H: Hasher, + A: AnchorSerialize + AnchorDeserialize + LightDiscriminator + Default, + const HASH_FLAT: bool, + > LightAccountInner<'a, H, A, HASH_FLAT> + { + pub fn new_init( + owner: &'a Pubkey, + address: Option<[u8; 32]>, + output_state_tree_index: u8, + ) -> Self { + let output_account_info = OutAccountInfo { + output_merkle_tree_index: output_state_tree_index, discriminator: A::LIGHT_DISCRIMINATOR, ..Default::default() + }; + Self { + owner, + account: A::default(), + account_info: CompressedAccountInfo { + address, + input: None, + output: Some(output_account_info), + }, + should_remove_data: false, + _hasher: PhantomData, } - }; - - Ok(Self { - owner, - account: input_account, - account_info: CompressedAccountInfo { - address: input_account_meta.get_address(), - input: Some(input_account_info), - output: Some(output_account_info), - }, - }) - } + } - pub fn new_close( - owner: &'a Pubkey, - input_account_meta: &impl CompressedAccountMetaTrait, - input_account: A, - ) -> Result { - let input_account_info = { - let input_data_hash = input_account.hash::()?; - let tree_info = input_account_meta.get_tree_info(); - InAccountInfo { - data_hash: input_data_hash, - lamports: input_account_meta.get_lamports().unwrap_or_default(), - merkle_context: PackedMerkleContext { - merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, - queue_pubkey_index: tree_info.queue_pubkey_index, - leaf_index: tree_info.leaf_index, - prove_by_index: tree_info.prove_by_index, - }, - root_index: input_account_meta.get_root_index().unwrap_or_default(), - discriminator: A::LIGHT_DISCRIMINATOR, + pub fn discriminator(&self) -> &[u8; 8] { + &A::LIGHT_DISCRIMINATOR + } + + pub fn lamports(&self) -> u64 { + if let Some(output) = self.account_info.output.as_ref() { + output.lamports + } else if let Some(input) = self.account_info.input.as_ref() { + input.lamports + } else { + 0 } - }; - Ok(Self { - owner, - account: input_account, - account_info: CompressedAccountInfo { - address: input_account_meta.get_address(), - input: Some(input_account_info), - output: None, - }, - }) - } + } - pub fn discriminator(&self) -> &[u8; 8] { - &A::LIGHT_DISCRIMINATOR - } + pub fn lamports_mut(&mut self) -> &mut u64 { + if let Some(output) = self.account_info.output.as_mut() { + &mut output.lamports + } else if let Some(input) = self.account_info.input.as_mut() { + &mut input.lamports + } else { + panic!("No lamports field available in account_info") + } + } - pub fn lamports(&self) -> u64 { - if let Some(output) = self.account_info.output.as_ref() { - output.lamports - } else if let Some(input) = self.account_info.input.as_ref() { - input.lamports - } else { - 0 + pub fn address(&self) -> &Option<[u8; 32]> { + &self.account_info.address } - } - pub fn lamports_mut(&mut self) -> &mut u64 { - if let Some(output) = self.account_info.output.as_mut() { - &mut output.lamports - } else if let Some(input) = self.account_info.input.as_mut() { - &mut input.lamports - } else { - panic!("No lamports field available in account_info") + pub fn owner(&self) -> &Pubkey { + self.owner } - } - pub fn address(&self) -> &Option<[u8; 32]> { - &self.account_info.address - } + pub fn in_account_info(&self) -> &Option { + &self.account_info.input + } - pub fn owner(&self) -> &Pubkey { - self.owner + pub fn out_account_info(&mut self) -> &Option { + &self.account_info.output + } } - pub fn in_account_info(&self) -> &Option { - &self.account_info.input - } + // Specialized implementation for HASH_FLAT = false (structured hashing with DataHasher) + impl< + 'a, + H: Hasher, + A: AnchorSerialize + AnchorDeserialize + LightDiscriminator + DataHasher + Default, + > LightAccountInner<'a, H, A, false> + { + pub fn new_mut( + owner: &'a Pubkey, + input_account_meta: &impl CompressedAccountMetaTrait, + input_account: A, + ) -> Result { + let input_account_info = { + // For HASH_FLAT = false, always use DataHasher + let input_data_hash = input_account.hash::()?; + let tree_info = input_account_meta.get_tree_info(); + InAccountInfo { + data_hash: input_data_hash, + lamports: input_account_meta.get_lamports().unwrap_or_default(), + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, + queue_pubkey_index: tree_info.queue_pubkey_index, + leaf_index: tree_info.leaf_index, + prove_by_index: tree_info.prove_by_index, + }, + root_index: input_account_meta.get_root_index().unwrap_or_default(), + discriminator: A::LIGHT_DISCRIMINATOR, + } + }; + let output_account_info = { + let output_merkle_tree_index = input_account_meta + .get_output_state_tree_index() + .ok_or(LightSdkError::OutputStateTreeIndexIsNone)?; + OutAccountInfo { + lamports: input_account_meta.get_lamports().unwrap_or_default(), + output_merkle_tree_index, + discriminator: A::LIGHT_DISCRIMINATOR, + ..Default::default() + } + }; + + Ok(Self { + owner, + account: input_account, + account_info: CompressedAccountInfo { + address: input_account_meta.get_address(), + input: Some(input_account_info), + output: Some(output_account_info), + }, + should_remove_data: false, + _hasher: PhantomData, + }) + } + + pub fn new_empty( + owner: &'a Pubkey, + input_account_meta: &impl CompressedAccountMetaTrait, + input_account: A, + ) -> Result { + let input_account_info = { + let input_data_hash = DEFAULT_DATA_HASH; + let tree_info = input_account_meta.get_tree_info(); + InAccountInfo { + data_hash: input_data_hash, + lamports: input_account_meta.get_lamports().unwrap_or_default(), + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, + queue_pubkey_index: tree_info.queue_pubkey_index, + leaf_index: tree_info.leaf_index, + prove_by_index: tree_info.prove_by_index, + }, + root_index: input_account_meta.get_root_index().unwrap_or_default(), + discriminator: [0u8; 8], + } + }; + let output_account_info = { + let output_merkle_tree_index = input_account_meta + .get_output_state_tree_index() + .ok_or(LightSdkError::OutputStateTreeIndexIsNone)?; + OutAccountInfo { + lamports: input_account_meta.get_lamports().unwrap_or_default(), + output_merkle_tree_index, + discriminator: A::LIGHT_DISCRIMINATOR, + ..Default::default() + } + }; + + Ok(Self { + owner, + account: input_account, + account_info: CompressedAccountInfo { + address: input_account_meta.get_address(), + input: Some(input_account_info), + output: Some(output_account_info), + }, + should_remove_data: false, + _hasher: PhantomData, + }) + } + + pub fn new_close( + owner: &'a Pubkey, + input_account_meta: &impl CompressedAccountMetaTrait, + input_account: A, + ) -> Result { + let mut account = Self::new_mut(owner, input_account_meta, input_account)?; + account.should_remove_data = true; + + Ok(account) + } + + /// Closes the compressed account. + /// Define whether to close the account permanently or not. + /// The address of an account that is closed permanently cannot be created again. + /// For accounts that are not closed permanently the accounts address + /// continues to exist in an account with discriminator and without data. + pub fn new_close_permanent( + owner: &'a Pubkey, + input_account_meta: &CompressedAccountMetaClose, + input_account: A, + ) -> Result { + let input_account_info = { + // For HASH_FLAT = false, always use DataHasher + let input_data_hash = input_account.hash::()?; + let tree_info = input_account_meta.get_tree_info(); + InAccountInfo { + data_hash: input_data_hash, + lamports: input_account_meta.get_lamports().unwrap_or_default(), + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, + queue_pubkey_index: tree_info.queue_pubkey_index, + leaf_index: tree_info.leaf_index, + prove_by_index: tree_info.prove_by_index, + }, + root_index: input_account_meta.get_root_index().unwrap_or_default(), + discriminator: A::LIGHT_DISCRIMINATOR, + } + }; + + Ok(Self { + owner, + account: input_account, + account_info: CompressedAccountInfo { + address: input_account_meta.get_address(), + input: Some(input_account_info), + output: None, + }, + should_remove_data: false, + _hasher: PhantomData, + }) + } - pub fn out_account_info(&mut self) -> &Option { - &self.account_info.output + pub fn to_account_info(mut self) -> Result { + if let Some(output) = self.account_info.output.as_mut() { + if self.should_remove_data { + // Data should be empty to close account. + if !output.data.is_empty() { + return Err(LightSdkError::ExpectedNoData); + } + output.data_hash = DEFAULT_DATA_HASH; + output.discriminator = [0u8; 8]; + } else { + output.data = self + .account + .try_to_vec() + .map_err(|_| LightSdkError::Borsh)?; + // For HASH_FLAT = false, always use DataHasher + output.data_hash = self.account.hash::()?; + } + } + Ok(self.account_info) + } } - /// 1. Serializes the account data and sets the output data hash. - /// 2. Returns CompressedAccountInfo. - /// - /// Note this is an expensive operation - /// that should only be called once per instruction. - pub fn to_account_info(mut self) -> Result { - if let Some(output) = self.account_info.output.as_mut() { - output.data_hash = self.account.hash::()?; - output.data = self - .account - .try_to_vec() - .map_err(|_| LightSdkError::Borsh)?; + // Specialized implementation for HASH_FLAT = true (flat serialization without DataHasher) + impl<'a, H: Hasher, A: AnchorSerialize + AnchorDeserialize + LightDiscriminator + Default> + LightAccountInner<'a, H, A, true> + { + pub fn new_mut( + owner: &'a Pubkey, + input_account_meta: &impl CompressedAccountMetaTrait, + input_account: A, + ) -> Result { + let input_account_info = { + // For HASH_FLAT = true, use direct serialization + let data = input_account + .try_to_vec() + .map_err(|_| LightSdkError::Borsh)?; + let mut input_data_hash = H::hash(data.as_slice())?; + input_data_hash[0] = 0; + let tree_info = input_account_meta.get_tree_info(); + InAccountInfo { + data_hash: input_data_hash, + lamports: input_account_meta.get_lamports().unwrap_or_default(), + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, + queue_pubkey_index: tree_info.queue_pubkey_index, + leaf_index: tree_info.leaf_index, + prove_by_index: tree_info.prove_by_index, + }, + root_index: input_account_meta.get_root_index().unwrap_or_default(), + discriminator: A::LIGHT_DISCRIMINATOR, + } + }; + let output_account_info = { + let output_merkle_tree_index = input_account_meta + .get_output_state_tree_index() + .ok_or(LightSdkError::OutputStateTreeIndexIsNone)?; + OutAccountInfo { + lamports: input_account_meta.get_lamports().unwrap_or_default(), + output_merkle_tree_index, + discriminator: A::LIGHT_DISCRIMINATOR, + ..Default::default() + } + }; + + Ok(Self { + owner, + account: input_account, + account_info: CompressedAccountInfo { + address: input_account_meta.get_address(), + input: Some(input_account_info), + output: Some(output_account_info), + }, + should_remove_data: false, + _hasher: PhantomData, + }) + } + + pub fn new_empty( + owner: &'a Pubkey, + input_account_meta: &impl CompressedAccountMetaTrait, + input_account: A, + ) -> Result { + let input_account_info = { + let input_data_hash = DEFAULT_DATA_HASH; + let tree_info = input_account_meta.get_tree_info(); + InAccountInfo { + data_hash: input_data_hash, + lamports: input_account_meta.get_lamports().unwrap_or_default(), + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, + queue_pubkey_index: tree_info.queue_pubkey_index, + leaf_index: tree_info.leaf_index, + prove_by_index: tree_info.prove_by_index, + }, + root_index: input_account_meta.get_root_index().unwrap_or_default(), + discriminator: [0u8; 8], + } + }; + let output_account_info = { + let output_merkle_tree_index = input_account_meta + .get_output_state_tree_index() + .ok_or(LightSdkError::OutputStateTreeIndexIsNone)?; + OutAccountInfo { + lamports: input_account_meta.get_lamports().unwrap_or_default(), + output_merkle_tree_index, + discriminator: A::LIGHT_DISCRIMINATOR, + ..Default::default() + } + }; + + Ok(Self { + owner, + account: input_account, + account_info: CompressedAccountInfo { + address: input_account_meta.get_address(), + input: Some(input_account_info), + output: Some(output_account_info), + }, + should_remove_data: false, + _hasher: PhantomData, + }) + } + + pub fn new_close( + owner: &'a Pubkey, + input_account_meta: &impl CompressedAccountMetaTrait, + input_account: A, + ) -> Result { + let mut account = Self::new_mut(owner, input_account_meta, input_account)?; + account.should_remove_data = true; + Ok(account) + } + + /// Closes the compressed account. + /// Define whether to close the account permanently or not. + /// The address of an account that is closed permanently cannot be created again. + /// For accounts that are not closed permanently the accounts address + /// continues to exist in an account without discriminator and data. + pub fn new_close_permanent( + owner: &'a Pubkey, + input_account_meta: &CompressedAccountMetaClose, + input_account: A, + ) -> Result { + let input_account_info = { + // For HASH_FLAT = true, use direct serialization + let data = input_account + .try_to_vec() + .map_err(|_| LightSdkError::Borsh)?; + let mut input_data_hash = H::hash(data.as_slice())?; + input_data_hash[0] = 0; + let tree_info = input_account_meta.get_tree_info(); + InAccountInfo { + data_hash: input_data_hash, + lamports: input_account_meta.get_lamports().unwrap_or_default(), + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, + queue_pubkey_index: tree_info.queue_pubkey_index, + leaf_index: tree_info.leaf_index, + prove_by_index: tree_info.prove_by_index, + }, + root_index: input_account_meta.get_root_index().unwrap_or_default(), + discriminator: A::LIGHT_DISCRIMINATOR, + } + }; + + Ok(Self { + owner, + account: input_account, + account_info: CompressedAccountInfo { + address: input_account_meta.get_address(), + input: Some(input_account_info), + output: None, + }, + should_remove_data: false, + _hasher: PhantomData, + }) + } + + pub fn to_account_info(mut self) -> Result { + if let Some(output) = self.account_info.output.as_mut() { + if self.should_remove_data { + // Data should be empty to close account. + if !output.data.is_empty() { + return Err(LightSdkError::ExpectedNoData); + } + output.data_hash = DEFAULT_DATA_HASH; + output.discriminator = [0u8; 8]; + } else { + output.data = self + .account + .try_to_vec() + .map_err(|_| LightSdkError::Borsh)?; + // For HASH_FLAT = true, use direct serialization + output.data_hash = H::hash(output.data.as_slice())?; + output.data_hash[0] = 0; + } + } + Ok(self.account_info) } - Ok(self.account_info) } -} -impl Deref - for LightAccount<'_, A> -{ - type Target = A; + impl< + H: Hasher, + A: AnchorSerialize + AnchorDeserialize + LightDiscriminator + Default, + const HASH_FLAT: bool, + > Deref for LightAccountInner<'_, H, A, HASH_FLAT> + { + type Target = A; - fn deref(&self) -> &Self::Target { - &self.account + fn deref(&self) -> &Self::Target { + &self.account + } } -} -impl DerefMut - for LightAccount<'_, A> -{ - fn deref_mut(&mut self) -> &mut ::Target { - &mut self.account + impl< + H: Hasher, + A: AnchorSerialize + AnchorDeserialize + LightDiscriminator + Default, + const HASH_FLAT: bool, + > DerefMut for LightAccountInner<'_, H, A, HASH_FLAT> + { + fn deref_mut(&mut self) -> &mut ::Target { + &mut self.account + } } } diff --git a/sdk-libs/sdk/src/error.rs b/sdk-libs/sdk/src/error.rs index 3f797a71a6..9e614b44cb 100644 --- a/sdk-libs/sdk/src/error.rs +++ b/sdk-libs/sdk/src/error.rs @@ -76,6 +76,8 @@ pub enum LightSdkError { InvalidSolPoolPdaAccount, #[error("CpigAccounts accounts slice starts with an invalid account. It should start with LightSystemProgram SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7.")] InvalidCpiAccountsOffset, + #[error("Expected LightAccount to have no data for closure.")] + ExpectedNoData, #[error(transparent)] Hasher(#[from] HasherError), #[error(transparent)] @@ -159,6 +161,7 @@ impl From for u32 { LightSdkError::InvalidCpiContextAccount => 16032, LightSdkError::InvalidSolPoolPdaAccount => 16033, LightSdkError::InvalidCpiAccountsOffset => 16034, + LightSdkError::ExpectedNoData => 16035, LightSdkError::AccountError(e) => e.into(), LightSdkError::Hasher(e) => e.into(), LightSdkError::ZeroCopy(e) => e.into(), diff --git a/sdk-libs/sdk/src/lib.rs b/sdk-libs/sdk/src/lib.rs index b8eef1be97..c4a11ebe4e 100644 --- a/sdk-libs/sdk/src/lib.rs +++ b/sdk-libs/sdk/src/lib.rs @@ -21,7 +21,7 @@ //! Deploy on devnet and mainnet only without v2 features enabled. //! //! ### Example Solana program code to create a compressed account -//! ```ignore +//! ```rust, compile_fail //! use anchor_lang::{prelude::*, Discriminator}; //! use light_sdk::{ //! account::LightAccount, @@ -93,9 +93,8 @@ //! pub fee_payer: Signer<'info>, //! } //! -//! #[derive(Clone, Debug, Default, LightHasher, LightDiscriminator)] +//! #[derive(Clone, Debug, Default, LightDiscriminator)] //!pub struct CounterAccount { -//! #[hash] //! pub owner: Pubkey, //! pub counter: u64 //!} @@ -103,6 +102,15 @@ /// Compressed account abstraction similar to anchor Account. pub mod account; +pub use account::sha::LightAccount; + +/// SHA256-based variants +pub mod sha { + pub use light_sdk_macros::LightHasherSha as LightHasher; + + pub use crate::account::sha::LightAccount; +} + /// Functions to derive compressed account addresses. pub mod address; /// Utilities to invoke the light-system-program via cpi. @@ -123,7 +131,8 @@ use borsh::{BorshDeserialize as AnchorDeserialize, BorshSerialize as AnchorSeria pub use light_account_checks::{self, discriminator::Discriminator as LightDiscriminator}; pub use light_hasher; pub use light_sdk_macros::{ - derive_light_cpi_signer, light_system_accounts, LightDiscriminator, LightHasher, LightTraits, + derive_light_cpi_signer, light_system_accounts, LightDiscriminator, LightHasher, + LightHasherSha, LightTraits, }; pub use light_sdk_types::constants; use solana_account_info::AccountInfo; diff --git a/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs b/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs index ba0242cc30..b50e033f2b 100644 --- a/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs +++ b/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs @@ -3,12 +3,17 @@ use anchor_lang::{prelude::*, Discriminator}; use light_sdk::{ - account::LightAccount, + // anchor test test poseidon LightAccount, native tests sha256 LightAccount + account::poseidon::LightAccount, address::v1::derive_address, cpi::{CpiAccounts, CpiInputs, CpiSigner}, derive_light_cpi_signer, - instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, - LightDiscriminator, LightHasher, + instruction::{ + account_meta::{CompressedAccountMeta, CompressedAccountMetaClose}, + PackedAddressTreeInfo, ValidityProof, + }, + LightDiscriminator, + LightHasher, }; declare_id!("2tzfijPBGbrR5PboyFUFKzfEoLTwdDSHUjANCw929wyt"); @@ -103,6 +108,103 @@ pub mod sdk_anchor_test { Ok(()) } + pub fn close_compressed_account<'info>( + ctx: Context<'_, '_, '_, 'info, UpdateNestedData<'info>>, + proof: ValidityProof, + my_compressed_account: MyCompressedAccount, + account_meta: CompressedAccountMeta, + ) -> Result<()> { + let my_compressed_account = LightAccount::<'_, MyCompressedAccount>::new_close( + &crate::ID, + &account_meta, + my_compressed_account, + ) + .map_err(ProgramError::from)?; + + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let cpi_inputs = CpiInputs::new( + proof, + vec![my_compressed_account + .to_account_info() + .map_err(ProgramError::from)?], + ); + + cpi_inputs + .invoke_light_system_program(light_cpi_accounts) + .map_err(ProgramError::from)?; + + Ok(()) + } + + pub fn reinit_closed_account<'info>( + ctx: Context<'_, '_, '_, 'info, UpdateNestedData<'info>>, + proof: ValidityProof, + account_meta: CompressedAccountMeta, + ) -> Result<()> { + let my_compressed_account = LightAccount::<'_, MyCompressedAccount>::new_empty( + &crate::ID, + &account_meta, + MyCompressedAccount::default(), + ) + .map_err(ProgramError::from)?; + + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let cpi_inputs = CpiInputs::new( + proof, + vec![my_compressed_account + .to_account_info() + .map_err(ProgramError::from)?], + ); + + cpi_inputs + .invoke_light_system_program(light_cpi_accounts) + .map_err(ProgramError::from)?; + + Ok(()) + } + + pub fn close_compressed_account_permanent<'info>( + ctx: Context<'_, '_, '_, 'info, UpdateNestedData<'info>>, + proof: ValidityProof, + account_meta: CompressedAccountMetaClose, + ) -> Result<()> { + let my_compressed_account = LightAccount::<'_, MyCompressedAccount>::new_close_permanent( + &crate::ID, + &account_meta, + MyCompressedAccount::default(), + ) + .map_err(ProgramError::from)?; + + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let cpi_inputs = CpiInputs::new( + proof, + vec![my_compressed_account + .to_account_info() + .map_err(ProgramError::from)?], + ); + + cpi_inputs + .invoke_light_system_program(light_cpi_accounts) + .map_err(ProgramError::from)?; + + Ok(()) + } + pub fn without_compressed_account<'info>( ctx: Context<'_, '_, '_, 'info, WithoutCompressedAccount<'info>>, name: String, diff --git a/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/tests/test.rs b/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/tests/test.rs index 92e268446e..5d5a71cf0b 100644 --- a/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/tests/test.rs +++ b/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/tests/test.rs @@ -1,7 +1,8 @@ -#![cfg(feature = "test-sbf")] +// #![cfg(feature = "test-sbf")] use anchor_lang::AnchorDeserialize; use light_client::indexer::CompressedAccount; +use light_compressed_account::compressed_account::CompressedAccountData; use light_program_test::{ indexer::TestIndexerExtensions, program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, @@ -82,6 +83,63 @@ async fn test_anchor_sdk_test() { .data; let record = MyCompressedAccount::deserialize(&mut &record[..]).unwrap(); assert_eq!(record.nested.one, 2); + + // Test close_compressed_account (non-permanent close - data should be None) + // Get the account fresh from RPC for the correct type + let account_to_close = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value; + + close_compressed_account(&mut rpc, &payer, account_to_close) + .await + .unwrap(); + + // Check that account still exists but data is None + let closed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value; + + // Account should still exist at the address + assert_eq!(closed_account.address.unwrap(), address); + assert_eq!(closed_account.owner, sdk_anchor_test::ID_CONST); + + // Data should be None after close + assert_eq!( + closed_account.data, + Some(CompressedAccountData::default()), + "Data should be zero after close" + ); + + // Now reinit the closed account to test permanent close + reinit_closed_account(&mut rpc, &payer, address) + .await + .unwrap(); + + // Get the reinited account + let reinited_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value; + + // Test close_compressed_account_permanent (account should not exist after) + close_compressed_account_permanent(&mut rpc, &payer, reinited_account) + .await + .unwrap(); + + // Check that account no longer exists at address + // After permanent close, the account should not exist + let result = rpc.get_compressed_account(address, None).await; + + // The query should succeed but return None/null for the account + assert!( + result.is_err(), // || result.unwrap().value.address.is_none(), + "Account should not exist after permanent close" + ); } async fn create_compressed_account( @@ -194,3 +252,163 @@ async fn update_compressed_account( rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) .await } + +async fn close_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + mut compressed_account: CompressedAccount, +) -> Result { + let mut remaining_accounts = PackedAccounts::default(); + + let config = SystemAccountMetaConfig::new(sdk_anchor_test::ID); + remaining_accounts.add_system_accounts(config); + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let my_compressed_account = MyCompressedAccount::deserialize( + &mut compressed_account.data.as_mut().unwrap().data.as_slice(), + ) + .unwrap(); + + let instruction = Instruction { + program_id: sdk_anchor_test::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + sdk_anchor_test::instruction::CloseCompressedAccount { + proof: rpc_result.proof, + my_compressed_account, + account_meta: CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn reinit_closed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + address: [u8; 32], +) -> Result { + let mut remaining_accounts = PackedAccounts::default(); + + let config = SystemAccountMetaConfig::new(sdk_anchor_test::ID); + remaining_accounts.add_system_accounts(config); + + // Get closed account + let closed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value; + + let hash = closed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: sdk_anchor_test::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + sdk_anchor_test::instruction::ReinitClosedAccount { + proof: rpc_result.proof, + account_meta: CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: closed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn close_compressed_account_permanent( + rpc: &mut LightProgramTest, + payer: &Keypair, + compressed_account: CompressedAccount, +) -> Result { + let mut remaining_accounts = PackedAccounts::default(); + + let config = SystemAccountMetaConfig::new(sdk_anchor_test::ID); + remaining_accounts.add_system_accounts(config); + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + // Import CompressedAccountMetaClose + use light_sdk::instruction::account_meta::CompressedAccountMetaClose; + + let instruction = Instruction { + program_id: sdk_anchor_test::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + sdk_anchor_test::instruction::CloseCompressedAccountPermanent { + proof: rpc_result.proof, + account_meta: CompressedAccountMetaClose { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + }, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} diff --git a/sdk-tests/sdk-native-test/src/create_pda.rs b/sdk-tests/sdk-native-test/src/create_pda.rs index 27f3ba12a5..5a02f0e69c 100644 --- a/sdk-tests/sdk-native-test/src/create_pda.rs +++ b/sdk-tests/sdk-native-test/src/create_pda.rs @@ -1,17 +1,16 @@ use borsh::{BorshDeserialize, BorshSerialize}; use light_sdk::{ - account::LightAccount, cpi::{CpiAccounts, CpiAccountsConfig, CpiInputs}, error::LightSdkError, instruction::{PackedAddressTreeInfo, ValidityProof}, light_hasher::hash_to_field_size::hashv_to_bn254_field_size_be_const_array, - LightDiscriminator, LightHasher, + LightAccount, LightDiscriminator, }; use solana_program::{account_info::AccountInfo, msg}; use crate::ARRAY_LEN; -/// TODO: write test program with A8JgviaEAByMVLBhcebpDQ7NMuZpqBTBigC1b83imEsd (inconvenient program id) +/// TODO: write test program with A8JgviaEAByMVLBhcebpDQ7NMuZpqBTBigC1b83imEsd (inconvenient program id) use v2 instruction for this purpose /// CU usage: /// - sdk pre system program cpi 10,942 CU /// - total with V2 tree: 45,758 CU @@ -73,9 +72,8 @@ pub fn create_pda( Ok(()) } -#[derive(Clone, Debug, LightHasher, LightDiscriminator, BorshDeserialize, BorshSerialize)] +#[derive(Clone, Debug, LightDiscriminator, BorshDeserialize, BorshSerialize)] pub struct MyCompressedAccount { - #[hash] pub data: [u8; ARRAY_LEN], } diff --git a/sdk-tests/sdk-native-test/src/update_pda.rs b/sdk-tests/sdk-native-test/src/update_pda.rs index 95d12e4fb7..cda8eb75c6 100644 --- a/sdk-tests/sdk-native-test/src/update_pda.rs +++ b/sdk-tests/sdk-native-test/src/update_pda.rs @@ -1,9 +1,9 @@ use borsh::{BorshDeserialize, BorshSerialize}; use light_sdk::{ - account::LightAccount, cpi::{CpiAccounts, CpiAccountsConfig, CpiInputs}, error::LightSdkError, instruction::{account_meta::CompressedAccountMeta, ValidityProof}, + LightAccount, }; use solana_program::{account_info::AccountInfo, log::sol_log_compute_units};