diff --git a/.github/workflows/light-examples-tests.yml b/.github/workflows/light-examples-tests.yml index 03de67e190..2c0d1d355a 100644 --- a/.github/workflows/light-examples-tests.yml +++ b/.github/workflows/light-examples-tests.yml @@ -56,7 +56,7 @@ jobs: - program: sdk-test-program sub-tests: '["cargo-test-sbf -p sdk-test"]' - program: sdk-anchor-test-program - sub-tests: '["cargo-test-sbf -p sdk-anchor-test"]' + sub-tests: '["cargo-test-sbf -p sdk-anchor-test", "cargo-test-sbf -p sdk-pinocchio-test"]' steps: - name: Checkout sources diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 046a636d7d..c42966e419 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -46,9 +46,9 @@ jobs: test_cmd: | cargo test -p aligned-sized cargo test -p light-bloom-filter - cargo test -p light-hasher - cargo test -p light-compressed-account - cargo test -p light-account-checks + cargo test -p light-hasher --features solana + cargo test -p light-compressed-account --features new-unique + cargo test -p light-account-checks --features solana cargo test -p light-verifier cargo test -p light-merkle-tree-metadata cargo test -p light-zero-copy --features std diff --git a/Cargo.lock b/Cargo.lock index ec82eb694a..a36af9f4e8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -35,6 +35,7 @@ dependencies = [ "rand 0.8.5", "solana-sdk", "solana-security-txt", + "thiserror 2.0.12", "zerocopy 0.8.25", ] @@ -3357,9 +3358,7 @@ dependencies = [ "light-poseidon 0.3.0", "light-zero-copy", "num-bigint 0.4.6", - "pinocchio", "rand 0.8.5", - "solana-program-error", "solana-pubkey", "thiserror 2.0.12", "zerocopy 0.8.25", @@ -3495,7 +3494,6 @@ dependencies = [ "light-compressed-account", "pinocchio", "solana-msg", - "solana-program-error", "solana-sysvar", "thiserror 2.0.12", "zerocopy 0.8.25", @@ -3629,7 +3627,6 @@ dependencies = [ "light-hasher", "light-macros", "light-sdk-macros", - "light-verifier", "num-bigint 0.4.6", "solana-account-info", "solana-cpi", @@ -3644,7 +3641,6 @@ dependencies = [ name = "light-sdk-macros" version = "0.6.0" dependencies = [ - "ark-bn254 0.5.0", "borsh 0.10.4", "light-compressed-account", "light-hasher", @@ -3656,6 +3652,20 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "light-sdk-pinocchio" +version = "0.12.0" +dependencies = [ + "borsh 0.10.4", + "light-account-checks", + "light-compressed-account", + "light-hasher", + "light-macros", + "light-sdk-macros", + "pinocchio", + "thiserror 2.0.12", +] + [[package]] name = "light-sparse-merkle-tree" version = "0.1.0" @@ -3701,6 +3711,7 @@ dependencies = [ "pinocchio", "pinocchio-system", "rand 0.8.5", + "solana-msg", "solana-pubkey", "solana-security-txt", "thiserror 2.0.12", @@ -5284,6 +5295,22 @@ dependencies = [ "tokio", ] +[[package]] +name = "sdk-pinocchio-test" +version = "1.0.0" +dependencies = [ + "borsh 0.10.4", + "light-compressed-account", + "light-hasher", + "light-macros", + "light-program-test", + "light-sdk", + "light-sdk-pinocchio", + "pinocchio", + "solana-sdk", + "tokio", +] + [[package]] name = "sdk-test" version = "1.0.0" diff --git a/Cargo.toml b/Cargo.toml index f5e170aae1..659208b88a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,6 +21,7 @@ members = [ "sdk-libs/client", "sdk-libs/macros", "sdk-libs/sdk", + "sdk-libs/sdk-pinocchio", "sdk-libs/photon-api", "sdk-libs/program-test", "xtask", @@ -38,6 +39,7 @@ members = [ # Issue is that anchor discriminator now returns a slice instead of an array "program-tests/sdk-anchor-test/programs/sdk-anchor-test", "program-tests/sdk-test", + "program-tests/sdk-pinocchio-test", "program-tests/create-address-test-program", "program-tests/utils", "program-tests/merkle-tree", diff --git a/examples/anchor/counter/src/lib.rs b/examples/anchor/counter/src/lib.rs index fc4b4c28ed..f0b26e4670 100644 --- a/examples/anchor/counter/src/lib.rs +++ b/examples/anchor/counter/src/lib.rs @@ -25,7 +25,6 @@ pub mod counter { address_tree_info: PackedAddressTreeInfo, output_state_tree_index: u8, ) -> Result<()> { - let program_id = crate::ID.into(); // LightAccount::new_init will create an account with empty output state (no input state). // Modifying the account will modify the output state that when converted to_account_info() // is hashed with poseidon hashes, serialized with borsh @@ -49,7 +48,7 @@ pub mod counter { let new_address_params = address_tree_info.into_new_address_params_packed(address_seed); let mut counter = LightAccount::<'_, CounterAccount>::new_init( - &program_id, + &crate::ID, Some(address), output_state_tree_index, ); @@ -74,7 +73,6 @@ pub mod counter { counter_value: u64, account_meta: CompressedAccountMeta, ) -> Result<()> { - let program_id = crate::ID.into(); // LightAccount::new_mut will create an account with input state and output state. // The input state is hashed immediately when calling new_mut(). // Modifying the account will modify the output state that when converted to_account_info() @@ -82,7 +80,7 @@ pub mod counter { // and created with invoke_light_system_program by invoking the light-system-program. // The hashing scheme is the account structure derived with LightHasher. let mut counter = LightAccount::<'_, CounterAccount>::new_mut( - &program_id, + &crate::ID, &account_meta, CounterAccount { owner: ctx.accounts.signer.key(), @@ -116,9 +114,8 @@ pub mod counter { counter_value: u64, account_meta: CompressedAccountMeta, ) -> Result<()> { - let program_id = crate::ID.into(); let mut counter = LightAccount::<'_, CounterAccount>::new_mut( - &program_id, + &crate::ID, &account_meta, CounterAccount { owner: ctx.accounts.signer.key(), @@ -158,9 +155,8 @@ pub mod counter { counter_value: u64, account_meta: CompressedAccountMeta, ) -> Result<()> { - let program_id = crate::ID.into(); let mut counter = LightAccount::<'_, CounterAccount>::new_mut( - &program_id, + &crate::ID, &account_meta, CounterAccount { owner: ctx.accounts.signer.key(), @@ -195,12 +191,11 @@ pub mod counter { counter_value: u64, account_meta: CompressedAccountMetaClose, ) -> Result<()> { - let program_id = crate::ID.into(); // LightAccount::new_close() will create an account with only input state and no output state. // By providing no output state the account is closed after the instruction. // The address of a closed account cannot be reused. let counter = LightAccount::<'_, CounterAccount>::new_close( - &program_id, + &crate::ID, &account_meta, CounterAccount { owner: ctx.accounts.signer.key(), diff --git a/examples/anchor/counter/tests/test.rs b/examples/anchor/counter/tests/test.rs index f9843fb04e..ea5da12d73 100644 --- a/examples/anchor/counter/tests/test.rs +++ b/examples/anchor/counter/tests/test.rs @@ -1,4 +1,4 @@ -// #![cfg(feature = "test-sbf")] +#![cfg(feature = "test-sbf")] use anchor_lang::{AnchorDeserialize, InstructionData, ToAccountMetas}; use counter::CounterAccount; diff --git a/examples/anchor/token-escrow/Cargo.toml b/examples/anchor/token-escrow/Cargo.toml index 000cb05147..24b8311f74 100644 --- a/examples/anchor/token-escrow/Cargo.toml +++ b/examples/anchor/token-escrow/Cargo.toml @@ -30,6 +30,7 @@ light-compressed-account = { workspace = true, features = ["anchor"] } [target.'cfg(not(target_os = "solana"))'.dependencies] solana-sdk = { workspace = true } +light-test-utils = { workspace = true, features = ["devenv"] } [dev-dependencies] light-verifier = { workspace = true } diff --git a/examples/anchor/token-escrow/src/escrow_with_compressed_pda/escrow.rs b/examples/anchor/token-escrow/src/escrow_with_compressed_pda/escrow.rs index 8368276f10..88f3c33619 100644 --- a/examples/anchor/token-escrow/src/escrow_with_compressed_pda/escrow.rs +++ b/examples/anchor/token-escrow/src/escrow_with_compressed_pda/escrow.rs @@ -161,19 +161,18 @@ fn create_compressed_pda_data( let compressed_account_data = CompressedAccountData { discriminator: 1u64.to_le_bytes(), data: timelock_compressed_pda.try_to_vec().unwrap(), - data_hash: timelock_compressed_pda - .hash::() - .map_err(ProgramError::from)?, + data_hash: timelock_compressed_pda.hash::().unwrap(), }; let derive_address = derive_address_legacy( &ctx.remaining_accounts[new_address_params.address_merkle_tree_account_index as usize] - .key(), + .key() + .into(), &new_address_params.seed, ) .map_err(|_| ProgramError::InvalidArgument)?; Ok(OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: crate::ID, + owner: crate::ID.into(), lamports: 0, address: Some(derive_address), data: Some(compressed_account_data), diff --git a/examples/anchor/token-escrow/src/escrow_with_compressed_pda/sdk.rs b/examples/anchor/token-escrow/src/escrow_with_compressed_pda/sdk.rs index 2c47133b44..de69bf115d 100644 --- a/examples/anchor/token-escrow/src/escrow_with_compressed_pda/sdk.rs +++ b/examples/anchor/token-escrow/src/escrow_with_compressed_pda/sdk.rs @@ -2,8 +2,7 @@ use anchor_lang::{InstructionData, ToAccountMetas}; use light_compressed_account::{ - address::{add_and_get_remaining_account_indices, pack_new_address_params}, - compressed_account::{pack_merkle_context, CompressedAccount, MerkleContext}, + compressed_account::{CompressedAccount, MerkleContext}, instruction_data::{ compressed_proof::CompressedProof, cpi_context::CompressedCpiContext, data::NewAddressParams, @@ -14,6 +13,9 @@ use light_compressed_token::process_transfer::{ transfer_sdk::{create_inputs_and_remaining_accounts_checked, to_account_metas}, TokenTransferOutputData, }; +use light_test_utils::pack::{ + add_and_get_remaining_account_indices, pack_merkle_context, pack_new_address_params, +}; use solana_sdk::{instruction::Instruction, pubkey::Pubkey}; use crate::escrow_with_compressed_pda::escrow::PackedInputCompressedPda; diff --git a/examples/anchor/token-escrow/src/escrow_with_compressed_pda/withdrawal.rs b/examples/anchor/token-escrow/src/escrow_with_compressed_pda/withdrawal.rs index 1cd91746e2..8b92845cdc 100644 --- a/examples/anchor/token-escrow/src/escrow_with_compressed_pda/withdrawal.rs +++ b/examples/anchor/token-escrow/src/escrow_with_compressed_pda/withdrawal.rs @@ -83,13 +83,11 @@ fn create_compressed_pda_data_based_on_diff( let old_compressed_account_data = CompressedAccountData { discriminator: 1u64.to_le_bytes(), data: old_timelock_compressed_pda.try_to_vec().unwrap(), - data_hash: old_timelock_compressed_pda - .hash::() - .map_err(ProgramError::from)?, + data_hash: old_timelock_compressed_pda.hash::().unwrap(), }; let old_compressed_account = OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: crate::ID, + owner: crate::ID.into(), lamports: 0, address: Some(input_compressed_pda.address), data: Some(old_compressed_account_data), @@ -110,13 +108,11 @@ fn create_compressed_pda_data_based_on_diff( let new_compressed_account_data = CompressedAccountData { discriminator: 1u64.to_le_bytes(), data: new_timelock_compressed_pda.try_to_vec().unwrap(), - data_hash: new_timelock_compressed_pda - .hash::() - .map_err(ProgramError::from)?, + data_hash: new_timelock_compressed_pda.hash::().unwrap(), }; let new_state = OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: crate::ID, + owner: crate::ID.into(), lamports: 0, address: Some(input_compressed_pda.address), data: Some(new_compressed_account_data), @@ -171,7 +167,7 @@ fn cpi_compressed_pda_withdrawal<'info>( }, ) .unwrap(); - verify_borsh(&light_accounts, &inputs_struct).map_err(ProgramError::from)?; + verify_borsh(&light_accounts, &inputs_struct).unwrap(); Ok(()) } diff --git a/examples/anchor/token-escrow/src/escrow_with_pda/sdk.rs b/examples/anchor/token-escrow/src/escrow_with_pda/sdk.rs index 9b51add9b7..11998515c1 100644 --- a/examples/anchor/token-escrow/src/escrow_with_pda/sdk.rs +++ b/examples/anchor/token-escrow/src/escrow_with_pda/sdk.rs @@ -2,7 +2,6 @@ use anchor_lang::{InstructionData, ToAccountMetas}; use light_compressed_account::{ - address::add_and_get_remaining_account_indices, compressed_account::{CompressedAccount, MerkleContext}, instruction_data::compressed_proof::CompressedProof, }; @@ -14,6 +13,7 @@ use light_compressed_token::process_transfer::{ }, TokenTransferOutputData, }; +use light_test_utils::pack::add_and_get_remaining_account_indices; use solana_sdk::{instruction::Instruction, pubkey::Pubkey}; use crate::escrow_with_compressed_pda::sdk::get_token_owner_pda; @@ -133,7 +133,9 @@ pub fn create_withdrawal_escrow_instruction( ); let merkle_tree_indices = add_and_get_remaining_account_indices( - input_params.output_compressed_account_merkle_tree_pubkeys, + input_params.output_compressed_account_merkle_tree_pubkeys, // .iter() + // .map(|pubkey| anchor_lang::prelude::Pubkey::from(pubkey)) + // .collect::>() &mut remaining_accounts, ); diff --git a/examples/anchor/token-escrow/tests/test.rs b/examples/anchor/token-escrow/tests/test.rs index 8d04e86249..c11a5b6cb5 100644 --- a/examples/anchor/token-escrow/tests/test.rs +++ b/examples/anchor/token-escrow/tests/test.rs @@ -195,8 +195,8 @@ pub async fn perform_escrow( leaf_index: compressed_input_account_with_context .merkle_context .leaf_index, - merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree, - queue_pubkey: env.v1_state_trees[0].nullifier_queue, + merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree.into(), + queue_pubkey: env.v1_state_trees[0].nullifier_queue.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -331,8 +331,8 @@ pub async fn perform_withdrawal( leaf_index: compressed_input_account_with_context .merkle_context .leaf_index, - merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree, - queue_pubkey: env.v1_state_trees[0].nullifier_queue, + merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree.into(), + queue_pubkey: env.v1_state_trees[0].nullifier_queue.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], diff --git a/examples/anchor/token-escrow/tests/test_compressed_pda.rs b/examples/anchor/token-escrow/tests/test_compressed_pda.rs index ea76e14617..e077bb569a 100644 --- a/examples/anchor/token-escrow/tests/test_compressed_pda.rs +++ b/examples/anchor/token-escrow/tests/test_compressed_pda.rs @@ -191,7 +191,8 @@ async fn create_escrow_ix( .clone(); let input_compressed_account_hash = compressed_input_account_with_context.hash().unwrap(); - let address = derive_address_legacy(&env.v1_address_trees[0].merkle_tree, &seed).unwrap(); + let address = + derive_address_legacy(&env.v1_address_trees[0].merkle_tree.into(), &seed).unwrap(); let rpc_result = rpc .get_validity_proof( @@ -207,8 +208,8 @@ async fn create_escrow_ix( let new_address_params = NewAddressParams { seed, - address_merkle_tree_pubkey: env.v1_address_trees[0].merkle_tree, - address_queue_pubkey: env.v1_address_trees[0].queue, + address_merkle_tree_pubkey: env.v1_address_trees[0].merkle_tree.into(), + address_queue_pubkey: env.v1_address_trees[0].queue.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], }; let create_ix_inputs = CreateCompressedPdaEscrowInstructionInputs { @@ -221,8 +222,8 @@ async fn create_escrow_ix( leaf_index: compressed_input_account_with_context .merkle_context .leaf_index, - merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree, - queue_pubkey: env.v1_state_trees[0].nullifier_queue, + merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree.into(), + queue_pubkey: env.v1_state_trees[0].nullifier_queue.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -283,29 +284,17 @@ pub async fn assert_escrow( ); let compressed_escrow_pda = test_indexer - .indexer - .as_ref() - .unwrap() - .compressed_accounts - .iter() - .find(|x| x.compressed_account.owner == token_escrow::ID) + .get_compressed_accounts_by_owner(&token_escrow::ID, None, None) + .await .unwrap() + .value + .items[0] .clone(); - let address = derive_address_legacy(&env.v1_address_trees[0].merkle_tree, seed).unwrap(); + let address = derive_address_legacy(&env.v1_address_trees[0].merkle_tree.into(), seed).unwrap(); - assert_eq!( - compressed_escrow_pda.compressed_account.address.unwrap(), - address - ); - assert_eq!( - compressed_escrow_pda.compressed_account.owner, - token_escrow::ID - ); - let compressed_escrow_pda_deserialized = compressed_escrow_pda - .compressed_account - .data - .as_ref() - .unwrap(); + assert_eq!(compressed_escrow_pda.address.unwrap(), address); + assert_eq!(compressed_escrow_pda.owner, token_escrow::ID); + let compressed_escrow_pda_deserialized = compressed_escrow_pda.data.as_ref().unwrap(); let compressed_escrow_pda_data = EscrowTimeLock::deserialize_reader(&mut &compressed_escrow_pda_deserialized.data[..]) .unwrap(); @@ -383,7 +372,7 @@ pub async fn perform_withdrawal( .unwrap() .compressed_accounts .iter() - .find(|x| x.compressed_account.owner == token_escrow::ID) + .find(|x| x.compressed_account.owner.to_bytes() == token_escrow::ID.to_bytes()) .unwrap() .clone(); println!("compressed_escrow_pda {:?}", compressed_escrow_pda); @@ -420,16 +409,16 @@ pub async fn perform_withdrawal( signer: &payer_pubkey, input_token_escrow_merkle_context: MerkleContext { leaf_index: token_escrow_account.merkle_context.leaf_index, - merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree, - queue_pubkey: env.v1_state_trees[0].nullifier_queue, + merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree.into(), + queue_pubkey: env.v1_state_trees[0].nullifier_queue.into(), prove_by_index: false, tree_type: TreeType::StateV1, }, input_cpda_merkle_context: MerkleContext { leaf_index: compressed_escrow_pda.merkle_context.leaf_index, - merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree, - queue_pubkey: env.v1_state_trees[0].nullifier_queue, + merkle_tree_pubkey: env.v1_state_trees[0].merkle_tree.into(), + queue_pubkey: env.v1_state_trees[0].nullifier_queue.into(), prove_by_index: false, tree_type: TreeType::StateV1, }, @@ -490,30 +479,21 @@ pub async fn assert_withdrawal( assert!(withdrawal_account_exits); let compressed_escrow_pda = rpc - .indexer - .as_ref() - .unwrap() - .compressed_accounts - .iter() - .find(|x| x.compressed_account.owner == token_escrow::ID) + .get_compressed_accounts_by_owner(&token_escrow::ID, None, None) + .await .unwrap() + .value + .items[0] .clone(); - let address = - derive_address_legacy(&rpc.test_accounts.v1_address_trees[0].merkle_tree, seed).unwrap(); - assert_eq!( - compressed_escrow_pda.compressed_account.address.unwrap(), - address - ); - assert_eq!( - compressed_escrow_pda.compressed_account.owner, - token_escrow::ID - ); - let compressed_escrow_pda_deserialized = compressed_escrow_pda - .compressed_account - .data - .as_ref() - .unwrap(); + let address = derive_address_legacy( + &rpc.test_accounts.v1_address_trees[0].merkle_tree.into(), + seed, + ) + .unwrap(); + assert_eq!(compressed_escrow_pda.address.unwrap(), address); + assert_eq!(compressed_escrow_pda.owner, token_escrow::ID.into()); + let compressed_escrow_pda_deserialized = compressed_escrow_pda.data.as_ref().unwrap(); let compressed_escrow_pda_data = EscrowTimeLock::deserialize_reader(&mut &compressed_escrow_pda_deserialized.data[..]) .unwrap(); diff --git a/forester-utils/src/instructions/address_batch_update.rs b/forester-utils/src/instructions/address_batch_update.rs index 3c67febee6..830cc8d597 100644 --- a/forester-utils/src/instructions/address_batch_update.rs +++ b/forester-utils/src/instructions/address_batch_update.rs @@ -14,7 +14,7 @@ use light_prover_client::{ proof_client::ProofClient, proof_types::batch_address_append::get_batch_address_append_circuit_inputs, }; -use light_sdk::verifier::CompressedProof; +use light_sdk::light_compressed_account::instruction_data::compressed_proof::CompressedProof; use light_sparse_merkle_tree::{ changelog::ChangelogEntry, indexed_changelog::IndexedChangelogEntry, SparseMerkleTree, }; @@ -45,7 +45,7 @@ where let (leaves_hash_chains, start_index, current_root, batch_size) = { let merkle_tree = BatchedMerkleTreeAccount::address_from_bytes( merkle_tree_account.data.as_mut_slice(), - &merkle_tree_pubkey.into(), + &(*merkle_tree_pubkey).into(), ) .unwrap(); diff --git a/forester/tests/address_v2_test.rs b/forester/tests/address_v2_test.rs index 67cca10f97..a74837c10b 100644 --- a/forester/tests/address_v2_test.rs +++ b/forester/tests/address_v2_test.rs @@ -13,10 +13,8 @@ use light_client::{ rpc::{client::RpcUrl, merkle_tree::MerkleTreeExt, LightClient, Rpc, RpcConfig}, }; use light_compressed_account::{ - address::{derive_address, pack_new_address_params_assigned}, - compressed_account::{ - pack_output_compressed_accounts, PackedCompressedAccountWithMerkleContext, - }, + address::derive_address, + compressed_account::PackedCompressedAccountWithMerkleContext, instruction_data::{ data::{NewAddressParams, NewAddressParamsAssigned, OutputCompressedAccountWithContext}, with_readonly::{InAccount, InstructionDataInvokeCpiWithReadOnly}, @@ -24,8 +22,11 @@ use light_compressed_account::{ }; use light_compressed_token::process_transfer::transfer_sdk::to_account_metas; use light_program_test::{accounts::test_accounts::TestAccounts, Indexer}; -use light_test_utils::create_address_test_program_sdk::{ - create_pda_instruction, CreateCompressedPdaInstructionInputs, +use light_test_utils::{ + create_address_test_program_sdk::{ + create_pda_instruction, CreateCompressedPdaInstructionInputs, + }, + pack::{pack_new_address_params_assigned, pack_output_compressed_accounts}, }; use rand::{prelude::StdRng, Rng, SeedableRng}; use serial_test::serial; @@ -295,8 +296,8 @@ async fn create_v2_addresses( let data: [u8; 31] = [1; 31]; let new_address_params = NewAddressParams { seed: address_seeds[0], - address_merkle_tree_pubkey: *batch_address_merkle_tree, - address_queue_pubkey: *batch_address_merkle_tree, + address_merkle_tree_pubkey: (*batch_address_merkle_tree).into(), + address_queue_pubkey: (*batch_address_merkle_tree).into(), address_merkle_tree_root_index: proof_result.value.get_address_root_indices()[0], }; @@ -329,8 +330,8 @@ async fn create_v2_addresses( .enumerate() .map(|(i, seed)| NewAddressParamsAssigned { seed: *seed, - address_queue_pubkey: *batch_address_merkle_tree, - address_merkle_tree_pubkey: *batch_address_merkle_tree, + address_queue_pubkey: (*batch_address_merkle_tree).into(), + address_merkle_tree_pubkey: (*batch_address_merkle_tree).into(), address_merkle_tree_root_index: proof_result.value.get_address_root_indices()[i], assigned_account_index: None, }) @@ -349,7 +350,7 @@ async fn create_v2_addresses( .as_slice(), output_accounts .iter() - .map(|x| x.merkle_tree) + .map(|x| x.merkle_tree.into()) .collect::>() .as_slice(), &mut remaining_accounts, diff --git a/forester/tests/batched_state_async_indexer_test.rs b/forester/tests/batched_state_async_indexer_test.rs index a740fa372a..d29bc71e19 100644 --- a/forester/tests/batched_state_async_indexer_test.rs +++ b/forester/tests/batched_state_async_indexer_test.rs @@ -854,8 +854,8 @@ async fn transfer( .iter() .map( |x| light_compressed_account::compressed_account::MerkleContext { - merkle_tree_pubkey: x.tree_info.tree, - queue_pubkey: x.tree_info.queue, + merkle_tree_pubkey: x.tree_info.tree.into(), + queue_pubkey: x.tree_info.queue.into(), leaf_index: x.leaf_index, prove_by_index: false, tree_type: TreeType::StateV2, @@ -867,7 +867,7 @@ async fn transfer( let mut compressed_accounts = vec![ CompressedAccount { lamports: lamp, - owner: payer.pubkey(), + owner: payer.pubkey().into(), address: None, data: None, }; @@ -896,7 +896,7 @@ async fn transfer( .iter() .map(|x| CompressedAccount { lamports: x.lamports, - owner: x.owner, + owner: x.owner.into(), address: x.address, data: x.data.clone(), }) @@ -945,7 +945,7 @@ async fn compress( ) -> Signature { let compress_account = CompressedAccount { lamports, - owner: payer.pubkey(), + owner: payer.pubkey().into(), address: None, data: None, }; @@ -1000,7 +1000,11 @@ async fn create_v1_address( for _ in 0..num_addresses { let seed = rng.gen::<[u8; 32]>(); seeds.push(seed); - let address = derive_address_legacy(merkle_tree_pubkey, &seed).unwrap(); + let address = derive_address_legacy( + &light_compressed_account::Pubkey::from(*merkle_tree_pubkey), + &seed, + ) + .unwrap(); address_proof_inputs.push(AddressWithTree { address, tree: *merkle_tree_pubkey, @@ -1018,8 +1022,8 @@ async fn create_v1_address( { new_address_params.push(NewAddressParams { seed: *seed, - address_queue_pubkey: *queue, - address_merkle_tree_pubkey: *merkle_tree_pubkey, + address_queue_pubkey: (*queue).into(), + address_merkle_tree_pubkey: (*merkle_tree_pubkey).into(), address_merkle_tree_root_index: *root_index, }); } diff --git a/program-libs/account-checks/Cargo.toml b/program-libs/account-checks/Cargo.toml index 0eba3dd1ea..f8dab74f6a 100644 --- a/program-libs/account-checks/Cargo.toml +++ b/program-libs/account-checks/Cargo.toml @@ -7,7 +7,7 @@ license = "Apache-2.0" edition = "2021" [features] -default = ["solana"] +default = [] solana = [ "solana-program-error", "solana-sysvar", diff --git a/program-libs/account-checks/src/checks.rs b/program-libs/account-checks/src/checks.rs index 3c4dd97ef8..9edc280981 100644 --- a/program-libs/account-checks/src/checks.rs +++ b/program-libs/account-checks/src/checks.rs @@ -98,6 +98,7 @@ pub fn check_discriminator(bytes: &[u8]) -> Result<(), Account } /// Checks that the account balance is greater or eqal to rent exemption. +#[cfg(any(feature = "pinocchio", feature = "solana"))] pub fn check_account_balance_is_rent_exempt( account_info: &AccountInfo, expected_size: usize, @@ -109,10 +110,12 @@ pub fn check_account_balance_is_rent_exempt( let lamports = account_info.lamports(); #[cfg(target_os = "solana")] { - use crate::Sysvar; - let rent_exemption = (crate::Rent::get() - .map_err(|_| AccountError::FailedBorrowRentSysvar))? - .minimum_balance(expected_size); + #[cfg(all(feature = "pinocchio", not(feature = "solana")))] + use pinocchio::{sysvars::rent::Rent, sysvars::Sysvar}; + #[cfg(all(not(feature = "pinocchio"), feature = "solana"))] + use solana_sysvar::{rent::Rent, Sysvar}; + let rent_exemption = (Rent::get().map_err(|_| AccountError::FailedBorrowRentSysvar))? + .minimum_balance(expected_size); if lamports < rent_exemption { return Err(AccountError::InvalidAccountBalance); } diff --git a/program-libs/account-checks/src/error.rs b/program-libs/account-checks/src/error.rs index cd1f0ce3d7..04fd51ad25 100644 --- a/program-libs/account-checks/src/error.rs +++ b/program-libs/account-checks/src/error.rs @@ -54,8 +54,16 @@ impl From for u32 { } } -impl From for crate::ProgramError { +#[cfg(feature = "pinocchio")] +impl From for pinocchio::program_error::ProgramError { fn from(e: AccountError) -> Self { - crate::ProgramError::Custom(e.into()) + pinocchio::program_error::ProgramError::Custom(e.into()) + } +} + +#[cfg(feature = "solana")] +impl From for solana_program_error::ProgramError { + fn from(e: AccountError) -> Self { + solana_program_error::ProgramError::Custom(e.into()) } } diff --git a/program-libs/account-checks/src/lib.rs b/program-libs/account-checks/src/lib.rs index 1c8d82a078..46358b7f8c 100644 --- a/program-libs/account-checks/src/lib.rs +++ b/program-libs/account-checks/src/lib.rs @@ -1,13 +1,10 @@ +#[cfg(any(feature = "pinocchio", feature = "solana"))] pub mod checks; pub mod discriminator; pub mod error; pub mod test_account_info; #[cfg(feature = "pinocchio")] -use pinocchio::{account_info::AccountInfo, program_error::ProgramError, pubkey::Pubkey}; -#[cfg(all(feature = "pinocchio", target_os = "solana"))] -use pinocchio::{sysvars::rent::Rent, sysvars::Sysvar}; -#[cfg(all(not(feature = "pinocchio"), target_os = "solana"))] -use solana_sysvar::{rent::Rent, Sysvar}; -#[cfg(not(feature = "pinocchio"))] -use {solana_account_info::AccountInfo, solana_program_error::ProgramError, solana_pubkey::Pubkey}; +use pinocchio::{account_info::AccountInfo, pubkey::Pubkey}; +#[cfg(feature = "solana")] +use {solana_account_info::AccountInfo, solana_pubkey::Pubkey}; diff --git a/program-libs/batched-merkle-tree/Cargo.toml b/program-libs/batched-merkle-tree/Cargo.toml index 5bab3a96f6..1fd87ae64c 100644 --- a/program-libs/batched-merkle-tree/Cargo.toml +++ b/program-libs/batched-merkle-tree/Cargo.toml @@ -7,7 +7,7 @@ license = "Apache-2.0" edition = "2021" [features] -default = ["solana"] +default = [] test-only = [] solana = [ "solana-program-error", diff --git a/program-libs/batched-merkle-tree/src/initialize_address_tree.rs b/program-libs/batched-merkle-tree/src/initialize_address_tree.rs index 1559788f7b..f0275f5c6b 100644 --- a/program-libs/batched-merkle-tree/src/initialize_address_tree.rs +++ b/program-libs/batched-merkle-tree/src/initialize_address_tree.rs @@ -1,5 +1,8 @@ use light_account_checks::{checks::check_account_balance_is_rent_exempt, error::AccountError}; -use light_compressed_account::{pubkey::Pubkey, TreeType}; +use light_compressed_account::{ + pubkey::{Pubkey, PubkeyTrait}, + TreeType, +}; use light_merkle_tree_metadata::{ access::AccessMetadata, fee::compute_rollover_fee, merkle_tree::MerkleTreeMetadata, rollover::RolloverMetadata, @@ -81,7 +84,7 @@ pub fn init_batched_address_merkle_tree_from_account_info( params, mt_data, merkle_tree_rent, - (*mt_account_info.key()).into(), + Pubkey::new_from_array(mt_account_info.key().trait_to_bytes()), )?; Ok(()) } diff --git a/program-libs/batched-merkle-tree/src/initialize_state_tree.rs b/program-libs/batched-merkle-tree/src/initialize_state_tree.rs index d87e640921..a80cfaefe3 100644 --- a/program-libs/batched-merkle-tree/src/initialize_state_tree.rs +++ b/program-libs/batched-merkle-tree/src/initialize_state_tree.rs @@ -1,6 +1,9 @@ use borsh::{BorshDeserialize, BorshSerialize}; use light_account_checks::checks::check_account_balance_is_rent_exempt; -use light_compressed_account::{pubkey::Pubkey, QueueType, TreeType}; +use light_compressed_account::{ + pubkey::{Pubkey, PubkeyTrait}, + QueueType, TreeType, +}; use light_merkle_tree_metadata::{ access::AccessMetadata, fee::compute_rollover_fee, merkle_tree::MerkleTreeMetadata, queue::QueueMetadata, rollover::RolloverMetadata, @@ -103,13 +106,13 @@ pub fn init_batched_state_merkle_tree_from_account_info( let mt_data = &mut merkle_tree_account_info.try_borrow_mut_data()?; init_batched_state_merkle_tree_accounts( - owner.into(), + Pubkey::new_from_array(owner.trait_to_bytes()), params, queue_data, - (*queue_account_info.key()).into(), + Pubkey::new_from_array(queue_account_info.key().trait_to_bytes()), queue_rent, mt_data, - (*merkle_tree_account_info.key()).into(), + Pubkey::new_from_array(merkle_tree_account_info.key().trait_to_bytes()), merkle_tree_rent, additional_bytes_rent, )?; diff --git a/program-libs/batched-merkle-tree/src/merkle_tree.rs b/program-libs/batched-merkle-tree/src/merkle_tree.rs index 7a853bb396..27eceffbee 100644 --- a/program-libs/batched-merkle-tree/src/merkle_tree.rs +++ b/program-libs/batched-merkle-tree/src/merkle_tree.rs @@ -5,9 +5,12 @@ use light_account_checks::{ discriminator::{Discriminator, DISCRIMINATOR_LEN}, }; use light_compressed_account::{ - hash_chain::create_hash_chain_from_array, hash_to_bn254_field_size_be, - instruction_data::compressed_proof::CompressedProof, nullifier::create_nullifier, - pubkey::Pubkey, QueueType, TreeType, ADDRESS_MERKLE_TREE_TYPE_V2, ADDRESS_QUEUE_TYPE_V2, + hash_chain::create_hash_chain_from_array, + hash_to_bn254_field_size_be, + instruction_data::compressed_proof::CompressedProof, + nullifier::create_nullifier, + pubkey::{Pubkey, PubkeyTrait}, + QueueType, TreeType, ADDRESS_MERKLE_TREE_TYPE_V2, ADDRESS_QUEUE_TYPE_V2, INPUT_STATE_QUEUE_TYPE_V2, OUTPUT_STATE_QUEUE_TYPE_V2, STATE_MERKLE_TREE_TYPE_V2, }; use light_hasher::Hasher; @@ -164,7 +167,10 @@ impl<'a> BatchedMerkleTreeAccount<'a> { // Necessary to convince the borrow checker. let data_slice: &'a mut [u8] = unsafe { std::slice::from_raw_parts_mut(data.as_mut_ptr(), data.len()) }; - Self::from_bytes::(data_slice, &(*account_info.key()).into()) + Self::from_bytes::( + data_slice, + &Pubkey::new_from_array(account_info.key().trait_to_bytes()), + ) } /// Deserialize a state BatchedMerkleTreeAccount from bytes. diff --git a/program-libs/batched-merkle-tree/src/queue.rs b/program-libs/batched-merkle-tree/src/queue.rs index c27acb08cb..43900bfd63 100644 --- a/program-libs/batched-merkle-tree/src/queue.rs +++ b/program-libs/batched-merkle-tree/src/queue.rs @@ -6,7 +6,9 @@ use light_account_checks::{ discriminator::{Discriminator, DISCRIMINATOR_LEN}, }; use light_compressed_account::{ - hash_to_bn254_field_size_be, pubkey::Pubkey, QueueType, OUTPUT_STATE_QUEUE_TYPE_V2, + hash_to_bn254_field_size_be, + pubkey::{Pubkey, PubkeyTrait}, + QueueType, OUTPUT_STATE_QUEUE_TYPE_V2, }; use light_merkle_tree_metadata::{errors::MerkleTreeMetadataError, queue::QueueMetadata}; use light_zero_copy::{errors::ZeroCopyError, vec::ZeroCopyVecU64}; @@ -167,7 +169,10 @@ impl<'a> BatchedQueueAccount<'a> { let account_data: &'a mut [u8] = unsafe { std::slice::from_raw_parts_mut(account_data.as_mut_ptr(), account_data.len()) }; - Self::from_bytes::(account_data, (*account_info.key()).into()) + Self::from_bytes::( + account_data, + Pubkey::new_from_array(account_info.key().trait_to_bytes()), + ) } /// Deserialize a BatchedQueueAccount from bytes. diff --git a/program-libs/batched-merkle-tree/src/rollover_address_tree.rs b/program-libs/batched-merkle-tree/src/rollover_address_tree.rs index ec5f041e3d..f6c41731c8 100644 --- a/program-libs/batched-merkle-tree/src/rollover_address_tree.rs +++ b/program-libs/batched-merkle-tree/src/rollover_address_tree.rs @@ -1,5 +1,5 @@ use light_account_checks::checks::check_account_balance_is_rent_exempt; -use light_compressed_account::pubkey::Pubkey; +use light_compressed_account::pubkey::{Pubkey, PubkeyTrait}; #[cfg(target_os = "solana")] use light_merkle_tree_metadata::errors::MerkleTreeMetadataError; use light_merkle_tree_metadata::utils::if_equals_none; @@ -32,7 +32,7 @@ pub fn rollover_batched_address_tree_from_account_info( &mut old_merkle_tree, &mut new_mt_data, new_mt_rent, - (*new_account.key()).into(), + Pubkey::new_from_array(new_account.key().trait_to_bytes()), network_fee, )?; Ok(new_mt_rent) diff --git a/program-libs/batched-merkle-tree/src/rollover_state_tree.rs b/program-libs/batched-merkle-tree/src/rollover_state_tree.rs index 8b8cbfcd75..3b15669d8b 100644 --- a/program-libs/batched-merkle-tree/src/rollover_state_tree.rs +++ b/program-libs/batched-merkle-tree/src/rollover_state_tree.rs @@ -1,5 +1,5 @@ use light_account_checks::checks::check_account_balance_is_rent_exempt; -use light_compressed_account::pubkey::Pubkey; +use light_compressed_account::pubkey::{Pubkey, PubkeyTrait}; use light_merkle_tree_metadata::{errors::MerkleTreeMetadataError, utils::if_equals_none}; // Import the appropriately feature-gated types from lib.rs @@ -87,15 +87,15 @@ pub fn rollover_batched_state_tree_from_account_info( let new_mt_data = &mut new_state_merkle_tree.try_borrow_mut_data()?; let params = RolloverBatchStateTreeParams { old_merkle_tree: old_merkle_tree_account, - old_mt_pubkey: (*old_state_merkle_tree.key()).into(), + old_mt_pubkey: Pubkey::new_from_array(old_state_merkle_tree.key().trait_to_bytes()), new_mt_data, new_mt_rent: merkle_tree_rent, - new_mt_pubkey: (*new_state_merkle_tree.key()).into(), + new_mt_pubkey: Pubkey::new_from_array(new_state_merkle_tree.key().trait_to_bytes()), old_output_queue: old_output_queue_account, - old_queue_pubkey: (*old_output_queue.key()).into(), + old_queue_pubkey: Pubkey::new_from_array(old_output_queue.key().trait_to_bytes()), new_output_queue_data: &mut new_output_queue.try_borrow_mut_data()?, new_output_queue_rent: queue_rent, - new_output_queue_pubkey: (*new_output_queue.key()).into(), + new_output_queue_pubkey: Pubkey::new_from_array(new_output_queue.key().trait_to_bytes()), additional_bytes_rent, additional_bytes, network_fee, diff --git a/program-libs/compressed-account/Cargo.toml b/program-libs/compressed-account/Cargo.toml index 383069ef41..45639dad9a 100644 --- a/program-libs/compressed-account/Cargo.toml +++ b/program-libs/compressed-account/Cargo.toml @@ -7,21 +7,10 @@ license = "Apache-2.0" edition = "2021" [features] -default = ["solana"] -solana = [ - "dep:solana-program-error", - "dep:solana-pubkey", - "light-hasher/solana", - "light-zero-copy/solana", - "light-macros/solana", -] -anchor = ["solana", "anchor-lang"] -pinocchio = [ - "dep:pinocchio", - "light-hasher/pinocchio", - "light-zero-copy/pinocchio", - "light-macros/pinocchio", -] +default = [] +solana = ["dep:solana-pubkey"] +anchor = ["anchor-lang"] +pinocchio = [] bytemuck-des = ["bytemuck"] new-unique = ["dep:solana-pubkey"] @@ -33,12 +22,10 @@ light-zero-copy = { workspace = true, features = ["std"] } light-macros = { workspace = true } # Feature-gated dependencies -solana-program-error = { workspace = true, optional = true } -solana-pubkey = { workspace = true, optional = true, features = ["borsh"] } anchor-lang = { workspace = true, optional = true } -pinocchio = { workspace = true, optional = true } -bytemuck = { workspace = true, optional = true } +bytemuck = { workspace = true, optional = true, features = ["derive"] } borsh = { workspace = true } +solana-pubkey = { workspace = true, optional = true } [dev-dependencies] rand = { workspace = true } diff --git a/program-libs/compressed-account/src/constants.rs b/program-libs/compressed-account/src/constants.rs index 871790b4b5..0a0b17b69f 100644 --- a/program-libs/compressed-account/src/constants.rs +++ b/program-libs/compressed-account/src/constants.rs @@ -1,13 +1,11 @@ -#[cfg(feature = "pinocchio")] -use light_macros::pubkey; -#[cfg(not(feature = "pinocchio"))] -use solana_pubkey::pubkey; +use light_macros::pubkey_array; -pub const ACCOUNT_COMPRESSION_PROGRAM_ID: crate::Pubkey = - pubkey!("compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq"); -pub const SYSTEM_PROGRAM_ID: crate::Pubkey = pubkey!("SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7"); -pub const REGISTERED_PROGRAM_PDA: crate::Pubkey = - pubkey!("35hkDgaAKwMCaxRz2ocSZ6NaUrtKkyNqU6c4RV3tYJRh"); +pub const ACCOUNT_COMPRESSION_PROGRAM_ID: [u8; 32] = + pubkey_array!("compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq"); +pub const SYSTEM_PROGRAM_ID: [u8; 32] = + pubkey_array!("SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7"); +pub const REGISTERED_PROGRAM_PDA: [u8; 32] = + pubkey_array!("35hkDgaAKwMCaxRz2ocSZ6NaUrtKkyNqU6c4RV3tYJRh"); pub const CREATE_CPI_CONTEXT_ACCOUNT: [u8; 8] = [233, 112, 71, 66, 121, 33, 178, 188]; pub const ADDRESS_MERKLE_TREE_ACCOUNT_DISCRIMINATOR: [u8; 8] = [11, 161, 175, 9, 212, 229, 73, 73]; diff --git a/program-libs/compressed-account/src/indexer_event/event.rs b/program-libs/compressed-account/src/indexer_event/event.rs index 898ecdb4d3..097cd829d2 100644 --- a/program-libs/compressed-account/src/indexer_event/event.rs +++ b/program-libs/compressed-account/src/indexer_event/event.rs @@ -71,8 +71,8 @@ impl MerkleTreeSequenceNumber { impl From<&InstructionDataSequenceNumber> for MerkleTreeSequenceNumber { fn from(seq: &InstructionDataSequenceNumber) -> Self { Self { - tree_pubkey: seq.tree_pubkey.into(), - queue_pubkey: seq.queue_pubkey.into(), + tree_pubkey: seq.tree_pubkey, + queue_pubkey: seq.queue_pubkey, tree_type: seq.tree_type.into(), seq: seq.seq.into(), } diff --git a/program-libs/compressed-account/src/indexer_event/parse.rs b/program-libs/compressed-account/src/indexer_event/parse.rs index ecb57fb8b5..606d3ad283 100644 --- a/program-libs/compressed-account/src/indexer_event/parse.rs +++ b/program-libs/compressed-account/src/indexer_event/parse.rs @@ -405,7 +405,7 @@ fn deserialize_instruction<'a>( OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { address: x.address, - owner: data.invoking_program_id.into(), + owner: data.invoking_program_id, lamports: account.lamports, data: Some(CompressedAccountData { discriminator: account.discriminator, @@ -426,7 +426,7 @@ fn deserialize_instruction<'a>( PackedCompressedAccountWithMerkleContext { compressed_account: CompressedAccount { address: x.address, - owner: data.invoking_program_id.into(), + owner: data.invoking_program_id, lamports: account.lamports, data: Some(CompressedAccountData { discriminator: account.discriminator, @@ -632,7 +632,7 @@ fn create_nullifier_queue_indices( // 3. increment the sequence number internal_input_sequence_numbers.iter_mut().for_each(|seq| { for (i, merkle_tree_pubkey) in input_merkle_tree_pubkeys.iter().enumerate() { - if crate::pubkey::Pubkey::from(*merkle_tree_pubkey) == seq.tree_pubkey { + if *merkle_tree_pubkey == seq.tree_pubkey { nullifier_queue_indices[i] = seq.seq.into(); seq.seq += 1; } @@ -660,7 +660,7 @@ fn create_address_queue_indices( .iter_mut() .for_each(|seq| { for (i, merkle_tree_pubkey) in address_merkle_tree_pubkeys.iter().enumerate() { - if crate::pubkey::Pubkey::from(*merkle_tree_pubkey) == seq.tree_pubkey { + if *merkle_tree_pubkey == seq.tree_pubkey { address_queue_indices[i] = seq.seq.into(); seq.seq += 1; } diff --git a/program-libs/compressed-account/src/instruction_data/compressed_proof.rs b/program-libs/compressed-account/src/instruction_data/compressed_proof.rs index 3f789363a8..9c79f9ca24 100644 --- a/program-libs/compressed-account/src/instruction_data/compressed_proof.rs +++ b/program-libs/compressed-account/src/instruction_data/compressed_proof.rs @@ -40,3 +40,42 @@ impl<'a> Deserialize<'a> for CompressedProof { Ok(Ref::<&[u8], CompressedProof>::from_prefix(bytes)?) } } + +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, AnchorDeserialize, AnchorSerialize)] +pub struct ValidityProof(pub Option); + +impl ValidityProof { + pub fn new(proof: Option) -> Self { + Self(proof) + } +} + +impl From for ValidityProof { + fn from(proof: CompressedProof) -> Self { + Self(Some(proof)) + } +} + +impl From> for ValidityProof { + fn from(proof: Option) -> Self { + Self(proof) + } +} +impl From<&CompressedProof> for ValidityProof { + fn from(proof: &CompressedProof) -> Self { + Self(Some(*proof)) + } +} + +impl From<&Option> for ValidityProof { + fn from(proof: &Option) -> Self { + Self(*proof) + } +} + +#[allow(clippy::from_over_into)] +impl Into> for ValidityProof { + fn into(self) -> Option { + self.0 + } +} diff --git a/program-libs/compressed-account/src/instruction_data/with_readonly.rs b/program-libs/compressed-account/src/instruction_data/with_readonly.rs index cb6da828d7..59b9c27bd7 100644 --- a/program-libs/compressed-account/src/instruction_data/with_readonly.rs +++ b/program-libs/compressed-account/src/instruction_data/with_readonly.rs @@ -130,7 +130,7 @@ impl InAccount { merkle_context: self.merkle_context, root_index: self.root_index, compressed_account: CompressedAccount { - owner: owner.into(), + owner, address: self.address, lamports: self.lamports, data: Some(CompressedAccountData { @@ -468,7 +468,7 @@ fn test_read_only_zero_copy() { }], output_compressed_accounts: vec![OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: Pubkey::default().into(), + owner: Pubkey::default(), lamports: 2000, address: Some([40; 32]), data: Some(CompressedAccountData { @@ -682,7 +682,7 @@ mod test { .map(|_| { OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: Pubkey::new_unique().into(), + owner: Pubkey::new_unique(), lamports: rng.gen(), address: if rng.gen() { Some(rng.gen()) } else { None }, data: if rng.gen() { diff --git a/program-libs/compressed-account/src/instruction_data/zero_copy.rs b/program-libs/compressed-account/src/instruction_data/zero_copy.rs index 23188088ce..b474110872 100644 --- a/program-libs/compressed-account/src/instruction_data/zero_copy.rs +++ b/program-libs/compressed-account/src/instruction_data/zero_copy.rs @@ -273,7 +273,7 @@ impl From<&ZCompressedAccount<'_>> for CompressedAccount { data_hash: *data.data_hash, }); CompressedAccount { - owner: crate::Pubkey::from(compressed_account.owner), + owner: compressed_account.owner, lamports: compressed_account.lamports.into(), address: compressed_account.address.map(|x| *x), data, @@ -1299,8 +1299,7 @@ pub mod test { let expected_owner: Pubkey = invoke_ref .input_compressed_accounts_with_merkle_context[0] .compressed_account - .owner - .into(); + .owner; assert_eq!(z_copy.owner(), expected_owner); } else { assert_eq!(z_copy.owner(), Pubkey::default()); @@ -1401,7 +1400,7 @@ pub mod test { reference: &CompressedAccount, z_copy: &ZCompressedAccount, ) -> Result<(), CompressedAccountError> { - if reference.owner != z_copy.owner.into() { + if reference.owner.to_bytes() != z_copy.owner.as_bytes() { return Err(CompressedAccountError::InvalidArgument); } if reference.lamports != u64::from(z_copy.lamports) { diff --git a/program-libs/compressed-account/src/lib.rs b/program-libs/compressed-account/src/lib.rs index db25f8e5fc..bd537d76e3 100644 --- a/program-libs/compressed-account/src/lib.rs +++ b/program-libs/compressed-account/src/lib.rs @@ -16,6 +16,7 @@ pub mod nullifier; pub mod pubkey; pub mod tx_hash; +// Re-export Pubkey type #[cfg(feature = "anchor")] use anchor_lang::{AnchorDeserialize, AnchorSerialize}; #[cfg(not(feature = "anchor"))] @@ -24,16 +25,7 @@ pub use light_hasher::{ bigint::bigint_to_be_bytes_array, hash_to_field_size::{hash_to_bn254_field_size_be, hashv_to_bn254_field_size_be}, }; -// Pinocchio framework imports -#[cfg(feature = "pinocchio")] -pub(crate) use pinocchio::program_error::ProgramError; -#[cfg(feature = "pinocchio")] -pub(crate) use pinocchio::pubkey::Pubkey; -// Solana program imports (default framework) -#[cfg(not(feature = "pinocchio"))] -pub(crate) use solana_program_error::ProgramError; -#[cfg(not(feature = "pinocchio"))] -pub(crate) use solana_pubkey::Pubkey; +pub use pubkey::Pubkey; #[derive(Debug, Error, PartialEq)] pub enum CompressedAccountError { @@ -87,13 +79,6 @@ impl From for u32 { } } -// Convert compressed account errors to program errors for both frameworks -impl From for ProgramError { - fn from(e: CompressedAccountError) -> Self { - ProgramError::Custom(e.into()) - } -} - pub const NULLIFIER_QUEUE_TYPE_V1: u64 = 1; pub const ADDRESS_QUEUE_TYPE_V1: u64 = 2; pub const INPUT_STATE_QUEUE_TYPE_V2: u64 = 3; diff --git a/program-libs/compressed-account/src/pubkey.rs b/program-libs/compressed-account/src/pubkey.rs index a5a218ff45..a12281284e 100644 --- a/program-libs/compressed-account/src/pubkey.rs +++ b/program-libs/compressed-account/src/pubkey.rs @@ -11,6 +11,8 @@ use crate::{AnchorDeserialize, AnchorSerialize}; Debug, Copy, PartialEq, + Eq, + Hash, Clone, Immutable, FromBytes, @@ -62,6 +64,18 @@ impl AsRef for Pubkey { } } +impl AsRef<[u8]> for Pubkey { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +impl PartialEq<[u8; 32]> for Pubkey { + fn eq(&self, other: &[u8; 32]) -> bool { + self.0 == *other + } +} + impl<'a> Deserialize<'a> for Pubkey { type Output = Ref<&'a [u8], Pubkey>; @@ -94,16 +108,16 @@ impl From<&[u8; 32]> for Pubkey { } } -#[cfg(not(feature = "anchor"))] -impl From for solana_pubkey::Pubkey { - fn from(pubkey: Pubkey) -> Self { +#[cfg(feature = "anchor")] +impl From<&anchor_lang::prelude::Pubkey> for Pubkey { + fn from(pubkey: &anchor_lang::prelude::Pubkey) -> Self { Self::new_from_array(pubkey.to_bytes()) } } -#[cfg(not(feature = "anchor"))] -impl From<&Pubkey> for solana_pubkey::Pubkey { - fn from(pubkey: &Pubkey) -> Self { +#[cfg(feature = "anchor")] +impl From for Pubkey { + fn from(pubkey: anchor_lang::prelude::Pubkey) -> Self { Self::new_from_array(pubkey.to_bytes()) } } @@ -122,27 +136,8 @@ impl From<&Pubkey> for anchor_lang::prelude::Pubkey { } } -#[cfg(not(feature = "pinocchio"))] -impl From for Pubkey { - fn from(pubkey: crate::Pubkey) -> Self { - Self(pubkey.to_bytes()) - } -} - -#[cfg(not(feature = "pinocchio"))] -impl From<&crate::Pubkey> for Pubkey { - fn from(pubkey: &crate::Pubkey) -> Self { - Self(pubkey.to_bytes()) - } -} - impl Pubkey { - #[cfg(not(feature = "pinocchio"))] - pub fn new_unique() -> Self { - Self(solana_pubkey::Pubkey::new_unique().to_bytes()) - } - - #[cfg(all(feature = "pinocchio", feature = "new-unique"))] + #[cfg(feature = "new-unique")] pub fn new_unique() -> Self { Self(solana_pubkey::Pubkey::new_unique().to_bytes()) } @@ -180,9 +175,19 @@ impl PubkeyTrait for anchor_lang::prelude::Pubkey { } } -#[cfg(not(feature = "anchor"))] +#[cfg(all(feature = "solana", not(feature = "anchor")))] impl PubkeyTrait for solana_pubkey::Pubkey { fn trait_to_bytes(&self) -> [u8; 32] { self.to_bytes() } } + +impl PubkeyTrait for [u8; 32] { + fn trait_to_bytes(&self) -> [u8; 32] { + *self + } + #[cfg(feature = "anchor")] + fn to_anchor_pubkey(&self) -> anchor_lang::prelude::Pubkey { + (*self).into() + } +} diff --git a/program-libs/hasher/Cargo.toml b/program-libs/hasher/Cargo.toml index 8e54319788..5f1c4cbb4b 100644 --- a/program-libs/hasher/Cargo.toml +++ b/program-libs/hasher/Cargo.toml @@ -7,7 +7,7 @@ license = "Apache-2.0" edition = "2021" [features] -default = ["solana"] +default = [] solana = ["solana-program-error", "solana-pubkey"] pinocchio = ["dep:pinocchio"] diff --git a/program-libs/hasher/src/errors.rs b/program-libs/hasher/src/errors.rs index 9849db11d3..9d1dafd486 100644 --- a/program-libs/hasher/src/errors.rs +++ b/program-libs/hasher/src/errors.rs @@ -44,10 +44,3 @@ impl From for u32 { } } } - -#[cfg(any(feature = "solana", feature = "pinocchio"))] -impl From for crate::ProgramError { - fn from(e: HasherError) -> Self { - crate::ProgramError::Custom(e.into()) - } -} diff --git a/program-libs/hasher/src/hash_to_field_size.rs b/program-libs/hasher/src/hash_to_field_size.rs index e01da7862f..2e7b67c4d3 100644 --- a/program-libs/hasher/src/hash_to_field_size.rs +++ b/program-libs/hasher/src/hash_to_field_size.rs @@ -139,7 +139,7 @@ mod tests { assert_eq!(hash, manual_hash); } - #[cfg(not(feature = "pinocchio"))] + #[cfg(feature = "solana")] #[test] fn test_hash_to_bn254_field_size_be() { use solana_pubkey::Pubkey; @@ -160,7 +160,7 @@ mod tests { ); } - #[cfg(not(feature = "pinocchio"))] + #[cfg(feature = "solana")] #[test] fn test_hashv_to_bn254_field_size_be() { use solana_pubkey::Pubkey; diff --git a/program-libs/hasher/src/lib.rs b/program-libs/hasher/src/lib.rs index 54985ee29d..9f4e4758c0 100644 --- a/program-libs/hasher/src/lib.rs +++ b/program-libs/hasher/src/lib.rs @@ -29,8 +29,3 @@ pub trait Hasher { fn zero_bytes() -> ZeroBytes; fn zero_indexed_leaf() -> [u8; 32]; } - -#[cfg(feature = "pinocchio")] -use pinocchio::program_error::ProgramError; -#[cfg(not(feature = "pinocchio"))] -use solana_program_error::ProgramError; diff --git a/program-libs/macros/src/lib.rs b/program-libs/macros/src/lib.rs index f0eb6010da..588850f086 100644 --- a/program-libs/macros/src/lib.rs +++ b/program-libs/macros/src/lib.rs @@ -15,6 +15,15 @@ pub fn pubkey(input: TokenStream) -> TokenStream { .into() } +/// Converts a base58 encoded public key into a raw byte array [u8; 32]. +#[proc_macro] +pub fn pubkey_array(input: TokenStream) -> TokenStream { + let args = parse_macro_input!(input as pubkey::PubkeyArgs); + pubkey::pubkey_array(args) + .unwrap_or_else(|err| err.to_compile_error()) + .into() +} + #[proc_macro_attribute] pub fn heap_neutral(_: TokenStream, input: TokenStream) -> TokenStream { let mut function = parse_macro_input!(input as ItemFn); diff --git a/program-libs/macros/src/pubkey.rs b/program-libs/macros/src/pubkey.rs index 18b60bf2ca..8ea4c8e3e7 100644 --- a/program-libs/macros/src/pubkey.rs +++ b/program-libs/macros/src/pubkey.rs @@ -49,6 +49,27 @@ pub(crate) fn pubkey(args: PubkeyArgs) -> Result { } } +pub(crate) fn pubkey_array(args: PubkeyArgs) -> Result { + let v = decode(args.pubkey.value()) + .into_vec() + .map_err(|_| Error::new(args.pubkey.span(), "Invalid base58 string"))?; + let v_len = v.len(); + + let arr: [u8; PUBKEY_LEN] = v.try_into().map_err(|_| { + Error::new( + args.pubkey.span(), + format!( + "Invalid size of decoded public key, expected 32, got {}", + v_len, + ), + ) + })?; + + Ok(quote! { + [ #(#arr),* ] + }) +} + #[cfg(test)] mod tests { use syn::parse_quote; diff --git a/program-libs/merkle-tree-metadata/Cargo.toml b/program-libs/merkle-tree-metadata/Cargo.toml index 46ee548a1c..06646e827a 100644 --- a/program-libs/merkle-tree-metadata/Cargo.toml +++ b/program-libs/merkle-tree-metadata/Cargo.toml @@ -7,9 +7,8 @@ license = "Apache-2.0" edition = "2021" [features] -default = ["solana"] +default = [] solana = [ - "dep:solana-program-error", "dep:solana-msg", "dep:solana-sysvar", "solana-sysvar/bincode", @@ -17,18 +16,13 @@ solana = [ "light-compressed-account/bytemuck-des", ] anchor = ["solana", "anchor-lang"] -pinocchio = [ - "dep:pinocchio", - "light-compressed-account/pinocchio", - "light-compressed-account/bytemuck-des", -] +pinocchio = ["dep:pinocchio", "light-compressed-account/bytemuck-des"] [dependencies] thiserror = { workspace = true } bytemuck = { workspace = true, features = ["derive"] } zerocopy = { workspace = true, features = ["derive"] } light-compressed-account = { workspace = true, features = ["bytemuck-des"] } -solana-program-error = { workspace = true, optional = true } solana-msg = { workspace = true, optional = true } solana-sysvar = { workspace = true, optional = true } anchor-lang = { workspace = true, optional = true } @@ -38,3 +32,5 @@ borsh = { workspace = true } [lints.rust.unexpected_cfgs] level = "allow" check-cfg = ['cfg(target_os, values("solana"))'] +[dev-dependencies] +light-compressed-account = { workspace = true, features = ["new-unique"] } diff --git a/program-libs/merkle-tree-metadata/src/errors.rs b/program-libs/merkle-tree-metadata/src/errors.rs index c05c22db4d..4024b78630 100644 --- a/program-libs/merkle-tree-metadata/src/errors.rs +++ b/program-libs/merkle-tree-metadata/src/errors.rs @@ -37,9 +37,3 @@ impl From for u32 { } } } - -impl From for crate::ProgramError { - fn from(e: MerkleTreeMetadataError) -> Self { - crate::ProgramError::Custom(e.into()) - } -} diff --git a/program-libs/merkle-tree-metadata/src/lib.rs b/program-libs/merkle-tree-metadata/src/lib.rs index 8cff288627..d936594f4b 100644 --- a/program-libs/merkle-tree-metadata/src/lib.rs +++ b/program-libs/merkle-tree-metadata/src/lib.rs @@ -16,17 +16,17 @@ pub use light_compressed_account::{ NULLIFIER_QUEUE_TYPE_V1, OUTPUT_STATE_QUEUE_TYPE_V2, STATE_MERKLE_TREE_TYPE_V1, STATE_MERKLE_TREE_TYPE_V2, }; -// Pinocchio imports -#[allow(unused_imports)] -#[cfg(feature = "pinocchio")] -pub(crate) use pinocchio::{ - msg, program_error::ProgramError, sysvars::clock::Clock, sysvars::Sysvar, -}; -// Solana imports (default) -#[allow(unused_imports)] -#[cfg(not(feature = "pinocchio"))] -pub(crate) use { - solana_msg::msg, - solana_program_error::ProgramError, - solana_sysvar::{clock::Clock, Sysvar}, -}; +// // Pinocchio imports +// #[allow(unused_imports)] +// #[cfg(feature = "pinocchio")] +// pub(crate) use pinocchio::{ +// msg, program_error::ProgramError, sysvars::clock::Clock, sysvars::Sysvar, +// }; +// // Solana imports (default) +// #[allow(unused_imports)] +// #[cfg(not(feature = "pinocchio"))] +// pub(crate) use { +// solana_msg::msg, +// solana_program_error::ProgramError, +// solana_sysvar::{clock::Clock, Sysvar}, +// }; diff --git a/program-libs/merkle-tree-metadata/src/rollover.rs b/program-libs/merkle-tree-metadata/src/rollover.rs index e64129fc0e..686a6736fa 100644 --- a/program-libs/merkle-tree-metadata/src/rollover.rs +++ b/program-libs/merkle-tree-metadata/src/rollover.rs @@ -70,7 +70,10 @@ impl RolloverMetadata { #[cfg(target_os = "solana")] { - use crate::{Clock, Sysvar}; + #[cfg(feature = "pinocchio")] + use pinocchio::{sysvars::clock::Clock, sysvars::Sysvar}; + #[cfg(feature = "solana")] + use solana_sysvar::{clock::Clock, Sysvar}; self.rolledover_slot = Clock::get().unwrap().slot; } #[cfg(not(target_os = "solana"))] @@ -95,9 +98,9 @@ pub fn check_rollover_fee_sufficient( if (rollover_fee * rollover_threshold * (2u64.pow(height))) / 100 < queue_rent + merkle_tree_rent { - #[cfg(not(feature = "pinocchio"))] + #[cfg(feature = "solana")] { - use crate::msg; + use solana_msg::msg; msg!("rollover_fee: {}", rollover_fee); msg!("rollover_threshold: {}", rollover_threshold); msg!("height: {}", height); diff --git a/program-libs/verifier/Cargo.toml b/program-libs/verifier/Cargo.toml index 04be7e2740..c5d6aa0586 100644 --- a/program-libs/verifier/Cargo.toml +++ b/program-libs/verifier/Cargo.toml @@ -7,7 +7,7 @@ license = "Apache-2.0" edition = "2021" [features] -default = ["solana"] +default = [] solana = [ "solana-program-error", "light-compressed-account/solana", diff --git a/program-libs/verifier/src/lib.rs b/program-libs/verifier/src/lib.rs index 4fdf2f2a5e..b7804175ee 100644 --- a/program-libs/verifier/src/lib.rs +++ b/program-libs/verifier/src/lib.rs @@ -39,7 +39,7 @@ impl From for u32 { } } -#[cfg(not(feature = "pinocchio"))] +#[cfg(feature = "solana")] impl From for solana_program_error::ProgramError { fn from(e: VerifierError) -> Self { solana_program_error::ProgramError::Custom(e.into()) @@ -218,7 +218,7 @@ pub fn select_verifying_key<'a>( num_leaves: usize, num_addresses: usize, ) -> Result<&'a Groth16Verifyingkey<'static>, VerifierError> { - #[cfg(all(not(feature = "pinocchio"), target_os = "solana"))] + #[cfg(all(feature = "solana", target_os = "solana"))] solana_msg::msg!( "select_verifying_key num_leaves: {}, num_addresses: {}", num_leaves, @@ -273,7 +273,7 @@ pub fn verify( let proof_c = decompress_g1(&proof.c).map_err(|_| crate::DecompressG1Failed)?; let mut verifier = Groth16Verifier::new(&proof_a, &proof_b, &proof_c, public_inputs, vk) .map_err(|_| { - #[cfg(all(target_os = "solana", not(feature = "pinocchio")))] + #[cfg(all(target_os = "solana", feature = "solana"))] { use solana_msg::msg; msg!("Proof verification failed"); @@ -285,7 +285,7 @@ pub fn verify( CreateGroth16VerifierFailed })?; verifier.verify().map_err(|_| { - #[cfg(all(target_os = "solana", not(feature = "pinocchio")))] + #[cfg(all(target_os = "solana", feature = "solana"))] { use solana_msg::msg; msg!("Proof verification failed"); diff --git a/program-tests/account-compression-test/tests/address_merkle_tree_tests.rs b/program-tests/account-compression-test/tests/address_merkle_tree_tests.rs index 4f9394ffdd..c969aea830 100644 --- a/program-tests/account-compression-test/tests/address_merkle_tree_tests.rs +++ b/program-tests/account-compression-test/tests/address_merkle_tree_tests.rs @@ -18,7 +18,6 @@ use light_concurrent_merkle_tree::errors::ConcurrentMerkleTreeError; use light_hash_set::{HashSet, HashSetError}; use light_hasher::{bigint::bigint_to_be_bytes_array, Poseidon}; use light_indexed_merkle_tree::errors::IndexedMerkleTreeError; -use light_merkle_tree_metadata::errors::MerkleTreeMetadataError; use light_program_test::{ accounts::address_tree::create_initialize_address_merkle_tree_and_queue_instruction, indexer::address_tree::AddressMerkleTreeBundle, program_test::LightProgramTest, @@ -1285,7 +1284,8 @@ async fn address_merkle_tree_and_queue_rollover( assert_rpc_error( result, 2, - MerkleTreeMetadataError::MerkleTreeAndQueueNotAssociated.into(), + AccountCompressionErrorCode::MerkleTreeMetadataError.into(), + // MerkleTreeMetadataError::MerkleTreeAndQueueNotAssociated.into(), ) .unwrap(); @@ -1304,7 +1304,8 @@ async fn address_merkle_tree_and_queue_rollover( assert_rpc_error( result, 2, - MerkleTreeMetadataError::MerkleTreeAndQueueNotAssociated.into(), + AccountCompressionErrorCode::MerkleTreeMetadataError.into(), + // MerkleTreeMetadataError::MerkleTreeAndQueueNotAssociated.into(), ) .unwrap(); @@ -1356,7 +1357,8 @@ async fn address_merkle_tree_and_queue_rollover( assert_rpc_error( result, 2, - MerkleTreeMetadataError::MerkleTreeAlreadyRolledOver.into(), + AccountCompressionErrorCode::MerkleTreeMetadataError.into(), + // MerkleTreeMetadataError::MerkleTreeAlreadyRolledOver.into(), ) .unwrap(); } diff --git a/program-tests/account-compression-test/tests/merkle_tree_tests.rs b/program-tests/account-compression-test/tests/merkle_tree_tests.rs index f393193520..5684412b87 100644 --- a/program-tests/account-compression-test/tests/merkle_tree_tests.rs +++ b/program-tests/account-compression-test/tests/merkle_tree_tests.rs @@ -12,9 +12,7 @@ use account_compression::{ }; use anchor_lang::{InstructionData, ToAccountMetas}; use light_account_checks::error::AccountError; -use light_compressed_account::instruction_data::{ - data::pack_pubkey, insert_into_queues::InsertIntoQueuesInstructionDataMut, -}; +use light_compressed_account::instruction_data::insert_into_queues::InsertIntoQueuesInstructionDataMut; use light_concurrent_merkle_tree::{ errors::ConcurrentMerkleTreeError, event::MerkleTreeEvent, zero_copy::ConcurrentMerkleTreeZeroCopyMut, @@ -23,7 +21,7 @@ use light_hash_set::HashSetError; use light_hasher::{ bigint::bigint_to_be_bytes_array, zero_bytes::poseidon::ZERO_BYTES, Hasher, Poseidon, }; -use light_merkle_tree_metadata::{errors::MerkleTreeMetadataError, QueueType}; +use light_merkle_tree_metadata::QueueType; use light_merkle_tree_reference::MerkleTree; use light_program_test::{ accounts::state_tree::{ @@ -39,6 +37,7 @@ use light_test_utils::{ assert_queue::assert_nullifier_queue_initialized, create_account_instruction, create_address_merkle_tree_and_queue_account_with_assert, get_concurrent_merkle_tree, get_hash_set, + pack::pack_pubkey, state_tree_rollover::{ assert_rolled_over_pair, perform_state_merkle_tree_roll_over, set_state_merkle_tree_next_index, StateMerkleTreeRolloverMode, @@ -732,7 +731,8 @@ async fn test_init_and_rollover_state_merkle_tree( assert_rpc_error( result, 2, - MerkleTreeMetadataError::MerkleTreeAndQueueNotAssociated.into(), + AccountCompressionErrorCode::MerkleTreeMetadataError.into(), + // MerkleTreeMetadataError::MerkleTreeAndQueueNotAssociated.into(), ) .unwrap(); @@ -751,7 +751,8 @@ async fn test_init_and_rollover_state_merkle_tree( assert_rpc_error( result, 2, - MerkleTreeMetadataError::MerkleTreeAndQueueNotAssociated.into(), + AccountCompressionErrorCode::MerkleTreeMetadataError.into(), + // MerkleTreeMetadataError::MerkleTreeAndQueueNotAssociated.into(), ) .unwrap(); @@ -807,7 +808,8 @@ async fn test_init_and_rollover_state_merkle_tree( assert_rpc_error( result, 2, - MerkleTreeMetadataError::MerkleTreeAlreadyRolledOver.into(), + AccountCompressionErrorCode::MerkleTreeMetadataError.into(), + // MerkleTreeMetadataError::MerkleTreeAlreadyRolledOver.into(), ) .unwrap(); } diff --git a/program-tests/client-test/tests/light_client.rs b/program-tests/client-test/tests/light_client.rs index eb813c85fe..ba86a6b791 100644 --- a/program-tests/client-test/tests/light_client.rs +++ b/program-tests/client-test/tests/light_client.rs @@ -651,7 +651,7 @@ async fn create_address( let output_account = light_compressed_account::compressed_account::CompressedAccount { lamports, - owner, + owner: owner.into(), data: None, address: Some(address), }; @@ -669,8 +669,8 @@ async fn create_address( let new_address_params = NewAddressParams { seed: address_seed, - address_queue_pubkey: address_merkle_tree.queue, - address_merkle_tree_pubkey: address_merkle_tree.tree, + address_queue_pubkey: address_merkle_tree.queue.into(), + address_merkle_tree_pubkey: address_merkle_tree.tree.into(), address_merkle_tree_root_index: rpc_proof_result.value.addresses[0].root_index, }; let compute_budget_ix = ComputeBudgetInstruction::set_compute_unit_limit(500_000); diff --git a/program-tests/client-test/tests/light_program_test.rs b/program-tests/client-test/tests/light_program_test.rs index 82ead2d3a2..374dd6a21c 100644 --- a/program-tests/client-test/tests/light_program_test.rs +++ b/program-tests/client-test/tests/light_program_test.rs @@ -638,7 +638,7 @@ async fn create_address( let output_account = light_compressed_account::compressed_account::CompressedAccount { lamports, - owner, + owner: owner.into(), data: None, address: Some(address), }; @@ -656,8 +656,8 @@ async fn create_address( let new_address_params = NewAddressParams { seed: address_seed, - address_queue_pubkey: address_merkle_tree.queue, - address_merkle_tree_pubkey: address_merkle_tree.tree, + address_queue_pubkey: address_merkle_tree.queue.into(), + address_merkle_tree_pubkey: address_merkle_tree.tree.into(), address_merkle_tree_root_index: rpc_proof_result.value.addresses[0].root_index, }; let compute_budget_ix = ComputeBudgetInstruction::set_compute_unit_limit(500_000); diff --git a/program-tests/compressed-token-test/tests/test.rs b/program-tests/compressed-token-test/tests/test.rs index a7daa67727..d18b13c77a 100644 --- a/program-tests/compressed-token-test/tests/test.rs +++ b/program-tests/compressed-token-test/tests/test.rs @@ -1624,7 +1624,8 @@ async fn test_mint_to_and_burn_from_all_token_pools() { let change_account_merkle_tree = input_compressed_account .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); burn_test( &payer, &mut rpc, @@ -1867,7 +1868,8 @@ async fn test_delegation( let delegated_compressed_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); approve_test( &sender, &mut rpc, @@ -2007,7 +2009,8 @@ async fn test_delegation_mixed() { let delegated_compressed_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); approve_test( &sender, &mut rpc, @@ -2244,7 +2247,8 @@ async fn test_approve_failing() { let delegated_compressed_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); let input_compressed_account_hashes = input_compressed_accounts .iter() @@ -2535,7 +2539,8 @@ async fn test_revoke(num_inputs: usize, mint_amount: u64, delegated_amount: u64) let delegated_compressed_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); approve_test( &sender, &mut rpc, @@ -2572,7 +2577,8 @@ async fn test_revoke(num_inputs: usize, mint_amount: u64, delegated_amount: u64) let delegated_compressed_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); revoke_test( &sender, &mut rpc, @@ -2658,7 +2664,8 @@ async fn test_revoke_failing() { let delegated_compressed_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); approve_test( &sender, &mut rpc, @@ -2885,7 +2892,8 @@ async fn test_burn() { let change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); burn_test( &sender, &mut rpc, @@ -2912,7 +2920,8 @@ async fn test_burn() { let delegated_compressed_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); approve_test( &sender, &mut rpc, @@ -2944,7 +2953,8 @@ async fn test_burn() { let change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); burn_test( &delegate, &mut rpc, @@ -2979,7 +2989,8 @@ async fn test_burn() { let change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); burn_test( &delegate, &mut rpc, @@ -3029,7 +3040,8 @@ async fn test_burn() { let invalid_change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .queue_pubkey; + .queue_pubkey + .into(); let mut additional_token_pool_accounts = (0..4) .map(|x| get_token_pool_pda_with_index(&mint, x)) .collect::>(); @@ -3158,7 +3170,8 @@ async fn failing_tests_burn() { let delegated_compressed_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); approve_test( &sender, &mut rpc, @@ -3185,7 +3198,8 @@ async fn failing_tests_burn() { let change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); let (_, _, _, _, instruction) = create_burn_test_instruction( &sender, &mut rpc, @@ -3218,7 +3232,8 @@ async fn failing_tests_burn() { let change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); let (_, _, _, _, instruction) = create_burn_test_instruction( &delegate, &mut rpc, @@ -3259,7 +3274,8 @@ async fn failing_tests_burn() { let change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); let (_, _, _, _, instruction) = create_burn_test_instruction( &sender, &mut rpc, @@ -3291,7 +3307,8 @@ async fn failing_tests_burn() { let change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); let (_, _, _, _, instruction) = create_burn_test_instruction( &delegate, &mut rpc, @@ -3327,7 +3344,8 @@ async fn failing_tests_burn() { let change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); let (_, _, _, _, instruction) = create_burn_test_instruction( &sender, &mut rpc, @@ -3364,7 +3382,8 @@ async fn failing_tests_burn() { let invalid_change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .queue_pubkey; + .queue_pubkey + .into(); let (_, _, _, _, instruction) = create_burn_test_instruction( &sender, &mut rpc, @@ -3401,7 +3420,8 @@ async fn failing_tests_burn() { let invalid_change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .queue_pubkey; + .queue_pubkey + .into(); let (_, _, _, _, instruction) = create_burn_test_instruction( &sender, &mut rpc, @@ -3433,7 +3453,8 @@ async fn failing_tests_burn() { let invalid_change_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .queue_pubkey; + .queue_pubkey + .into(); let (_, _, _, _, mut instruction) = create_burn_test_instruction( &sender, &mut rpc, @@ -3515,7 +3536,8 @@ async fn test_freeze_and_thaw(mint_amount: u64, delegated_amount: u64) { let output_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); freeze_test( &payer, @@ -3543,7 +3565,8 @@ async fn test_freeze_and_thaw(mint_amount: u64, delegated_amount: u64) { let output_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); thaw_test( &payer, &mut rpc, @@ -3565,7 +3588,8 @@ async fn test_freeze_and_thaw(mint_amount: u64, delegated_amount: u64) { let delegated_compressed_account_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); approve_test( &sender, &mut rpc, @@ -3591,7 +3615,8 @@ async fn test_freeze_and_thaw(mint_amount: u64, delegated_amount: u64) { let output_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); freeze_test( &payer, @@ -3619,7 +3644,8 @@ async fn test_freeze_and_thaw(mint_amount: u64, delegated_amount: u64) { let output_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); thaw_test( &payer, @@ -3706,7 +3732,8 @@ async fn test_failing_freeze() { let outputs_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); let input_compressed_account_hashes = input_compressed_accounts .iter() @@ -3863,7 +3890,8 @@ async fn test_failing_freeze() { let outputs_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); let input_compressed_account_hashes = input_compressed_accounts .iter() @@ -3974,7 +4002,8 @@ async fn test_failing_thaw() { let output_merkle_tree = input_compressed_accounts[0] .compressed_account .merkle_context - .merkle_tree_pubkey; + .merkle_tree_pubkey + .into(); freeze_test( &payer, @@ -4034,7 +4063,7 @@ async fn test_failing_thaw() { .map(|x| &x.compressed_account.compressed_account) .cloned() .collect::>(), - outputs_merkle_tree, + outputs_merkle_tree: outputs_merkle_tree.into(), root_indices: proof_rpc_result.value.get_root_indices().clone(), proof: proof_rpc_result.value.proof.0.unwrap(), }; @@ -4113,7 +4142,7 @@ async fn test_failing_thaw() { .map(|x| &x.compressed_account.compressed_account) .cloned() .collect::>(), - outputs_merkle_tree, + outputs_merkle_tree: outputs_merkle_tree.into(), root_indices: proof_rpc_result.value.get_root_indices().clone(), proof: invalid_proof, }; @@ -4175,7 +4204,7 @@ async fn test_failing_thaw() { .map(|x| &x.compressed_account.compressed_account) .cloned() .collect::>(), - outputs_merkle_tree, + outputs_merkle_tree: outputs_merkle_tree.into(), root_indices: proof_rpc_result.value.get_root_indices().clone(), proof: proof_rpc_result.value.proof.0.unwrap(), }; @@ -5259,8 +5288,8 @@ async fn perform_transfer_failing_test( &input_compressed_accounts .iter() .map(|x| MerkleContext { - merkle_tree_pubkey: *merkle_tree_pubkey, - queue_pubkey: *nullifier_queue_pubkey, + merkle_tree_pubkey: (*merkle_tree_pubkey).into(), + queue_pubkey: (*nullifier_queue_pubkey).into(), leaf_index: x.merkle_context.leaf_index, prove_by_index: false, tree_type: TreeType::StateV1, diff --git a/program-tests/create-address-test-program/src/create_pda.rs b/program-tests/create-address-test-program/src/create_pda.rs index d42f606160..eceefaceeb 100644 --- a/program-tests/create-address-test-program/src/create_pda.rs +++ b/program-tests/create-address-test-program/src/create_pda.rs @@ -97,9 +97,7 @@ fn create_compressed_pda_data( let compressed_account_data = CompressedAccountData { discriminator: 1u64.to_le_bytes(), data: timelock_compressed_pda.try_to_vec().unwrap(), - data_hash: timelock_compressed_pda - .hash::() - .map_err(ProgramError::from)?, + data_hash: timelock_compressed_pda.hash::().unwrap(), }; let mut discriminator_bytes = [0u8; 8]; @@ -117,7 +115,7 @@ fn create_compressed_pda_data( Ok(OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: crate::ID, // should be crate::ID, test can provide an invalid owner + owner: crate::ID.into(), // should be crate::ID, test can provide an invalid owner lamports: 0, address: Some(address), data: Some(compressed_account_data), diff --git a/program-tests/registry-test/tests/tests.rs b/program-tests/registry-test/tests/tests.rs index b197fb9c54..1b470f4184 100644 --- a/program-tests/registry-test/tests/tests.rs +++ b/program-tests/registry-test/tests/tests.rs @@ -1450,7 +1450,11 @@ async fn test_migrate_state() { .get_state_merkle_trees() .iter() .find(|b| { - b.accounts.merkle_tree == compressed_account.merkle_context.merkle_tree_pubkey + b.accounts.merkle_tree.to_bytes() + == compressed_account + .merkle_context + .merkle_tree_pubkey + .to_bytes() }) .unwrap(); assert_eq!(merkle_tree.root(), bundle.merkle_tree.root()); @@ -1498,7 +1502,11 @@ async fn test_migrate_state() { .get_state_merkle_trees_mut() .iter_mut() .find(|b| { - b.accounts.merkle_tree == compressed_account.merkle_context.merkle_tree_pubkey + b.accounts.merkle_tree.to_bytes() + == compressed_account + .merkle_context + .merkle_tree_pubkey + .to_bytes() }) .unwrap(); bundle @@ -1533,7 +1541,11 @@ async fn test_migrate_state() { .get_state_merkle_trees() .iter() .find(|b| { - b.accounts.merkle_tree == compressed_account.merkle_context.merkle_tree_pubkey + b.accounts.merkle_tree.to_bytes() + == compressed_account + .merkle_context + .merkle_tree_pubkey + .to_bytes() }) .unwrap(); assert_eq!(merkle_tree.root(), bundle.merkle_tree.root()); diff --git a/program-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs b/program-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs index 38fccc2a7a..72f9dabcb3 100644 --- a/program-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs +++ b/program-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs @@ -23,7 +23,6 @@ pub mod sdk_anchor_test { output_tree_index: u8, name: String, ) -> Result<()> { - let program_id = crate::ID.into(); let light_cpi_accounts = CpiAccounts::new( ctx.accounts.signer.as_ref(), ctx.remaining_accounts, @@ -46,7 +45,7 @@ pub mod sdk_anchor_test { }; let mut my_compressed_account = LightAccount::<'_, MyCompressedAccount>::new_init( - &program_id, + &crate::ID, Some(address), output_tree_index, ); @@ -76,9 +75,8 @@ pub mod sdk_anchor_test { account_meta: CompressedAccountMeta, nested_data: NestedData, ) -> Result<()> { - let program_id = crate::ID.into(); let mut my_compressed_account = LightAccount::<'_, MyCompressedAccount>::new_mut( - &program_id, + &crate::ID, &account_meta, my_compressed_account, ) diff --git a/program-tests/sdk-pinocchio-test/Cargo.toml b/program-tests/sdk-pinocchio-test/Cargo.toml new file mode 100644 index 0000000000..ecef2eae29 --- /dev/null +++ b/program-tests/sdk-pinocchio-test/Cargo.toml @@ -0,0 +1,43 @@ +[package] +name = "sdk-pinocchio-test" +version = "1.0.0" +description = "Test program using generalized account compression" +repository = "https://github.com/Lightprotocol/light-protocol" +license = "Apache-2.0" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "sdk_pinocchio_test" + +[features] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +test-sbf = [] +default = [] + +[dependencies] +light-sdk-pinocchio = { path = "../../sdk-libs/sdk-pinocchio", features = [ + "v2", +] } +light-hasher = { workspace = true } +pinocchio = { workspace = true } +light-macros = { workspace = true } +borsh = { workspace = true } + +[dev-dependencies] +light-program-test = { workspace = true, features = ["devenv"] } +tokio = { workspace = true } +solana-sdk = { workspace = true } +light-hasher = { workspace = true, features = ["solana"] } +light-compressed-account = { workspace = true, features = ["solana"] } +light-sdk = { workspace = true, features = ["solana"] } + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] diff --git a/program-tests/sdk-pinocchio-test/Xargo.toml b/program-tests/sdk-pinocchio-test/Xargo.toml new file mode 100644 index 0000000000..475fb71ed1 --- /dev/null +++ b/program-tests/sdk-pinocchio-test/Xargo.toml @@ -0,0 +1,2 @@ +[target.bpfel-unknown-unknown.dependencies.std] +features = [] diff --git a/program-tests/sdk-pinocchio-test/src/create_pda.rs b/program-tests/sdk-pinocchio-test/src/create_pda.rs new file mode 100644 index 0000000000..b19d85bc54 --- /dev/null +++ b/program-tests/sdk-pinocchio-test/src/create_pda.rs @@ -0,0 +1,108 @@ +use borsh::{BorshDeserialize, BorshSerialize}; +use light_sdk_pinocchio::{ + account::LightAccount, + cpi::{CpiAccounts, CpiAccountsConfig, CpiInputs}, + error::LightSdkError, + hash_to_field_size::hashv_to_bn254_field_size_be_const_array, + instruction::tree_info::PackedAddressTreeInfo, + LightDiscriminator, LightHasher, NewAddressParamsPacked, ValidityProof, +}; +use pinocchio::{account_info::AccountInfo, msg}; + +/// CU usage: +/// - sdk pre system program cpi 10,942 CU +/// - total with V1 tree: 307,784 CU +/// - total with V2 tree: 138,876 CU +pub fn create_pda( + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), LightSdkError> { + let mut instruction_data = instruction_data; + msg!("create_pda"); + let instruction_data = CreatePdaInstructionData::deserialize(&mut instruction_data) + .map_err(|_| LightSdkError::Borsh)?; + msg!(format!("instruction_data: {:?}", instruction_data).as_str()); + let config = CpiAccountsConfig { + self_program: crate::ID, + cpi_context: false, + sol_pool_pda: false, + sol_compression_recipient: false, + }; + let cpi_accounts = CpiAccounts::new_with_config( + &accounts[0], + &accounts[instruction_data.system_accounts_offset as usize..], + config, + )?; + msg!(format!("cpi_accounts: {}", accounts.len()).as_str()); + + let address_tree_info = instruction_data.address_tree_info; + let (address, address_seed) = if BATCHED { + let tree_acounts = cpi_accounts.tree_accounts(); + msg!(format!("tree_acounts: {}", tree_acounts.len()).as_str()); + let index = tree_acounts[instruction_data + .address_tree_info + .address_merkle_tree_pubkey_index as usize] + .key(); + let address_seed = hashv_to_bn254_field_size_be_const_array::<3>(&[ + b"compressed", + instruction_data.data.as_slice(), + ])?; + msg!(format!("address_seed: {:?}", address_seed).as_str()); + let address = light_sdk_pinocchio::light_compressed_account::address::derive_address( + &address_seed, + index, + &crate::ID, + ); + msg!(format!("address: {:?}", address).as_str()); + (address, address_seed) + } else { + light_sdk_pinocchio::address::v1::derive_address( + &[b"compressed", instruction_data.data.as_slice()], + cpi_accounts.tree_accounts() + [address_tree_info.address_merkle_tree_pubkey_index as usize] + .key(), + &crate::ID, + ) + }; + msg!(format!("address: {:?}", address).as_str()); + let new_address_params = NewAddressParamsPacked { + seed: address_seed, + address_queue_account_index: address_tree_info.address_queue_pubkey_index, + address_merkle_tree_root_index: address_tree_info.root_index, + address_merkle_tree_account_index: address_tree_info.address_merkle_tree_pubkey_index, + }; + + let mut my_compressed_account = LightAccount::<'_, MyCompressedAccount>::new_init( + &crate::ID, + Some(address), + instruction_data.output_merkle_tree_index, + ); + + my_compressed_account.data = instruction_data.data; + msg!(format!("my_compressed_account: {:?}", my_compressed_account).as_str()); + + let cpi_inputs = CpiInputs::new_with_address( + instruction_data.proof, + vec![my_compressed_account.to_account_info()?], + vec![new_address_params], + ); + cpi_inputs.invoke_light_system_program(cpi_accounts)?; + Ok(()) +} + +#[derive( + Clone, Debug, Default, LightHasher, LightDiscriminator, BorshDeserialize, BorshSerialize, +)] +pub struct MyCompressedAccount { + pub data: [u8; 31], +} + +#[derive(Clone, Debug, Default, BorshDeserialize, BorshSerialize)] +pub struct CreatePdaInstructionData { + pub proof: ValidityProof, + pub address_tree_info: PackedAddressTreeInfo, + pub output_merkle_tree_index: u8, + pub data: [u8; 31], + pub system_accounts_offset: u8, + pub tree_accounts_offset: u8, +} diff --git a/program-tests/sdk-pinocchio-test/src/lib.rs b/program-tests/sdk-pinocchio-test/src/lib.rs new file mode 100644 index 0000000000..564603cd38 --- /dev/null +++ b/program-tests/sdk-pinocchio-test/src/lib.rs @@ -0,0 +1,51 @@ +use light_sdk_pinocchio::error::LightSdkError; +use pinocchio::{ + account_info::AccountInfo, entrypoint, msg, program_error::ProgramError, pubkey::Pubkey, +}; + +pub mod create_pda; +pub mod update_pda; + +pub const ID: Pubkey = [ + 135, 152, 63, 145, 194, 241, 126, 41, 180, 254, 157, 105, 170, 129, 15, 255, 138, 167, 39, 151, + 70, 146, 233, 196, 238, 88, 139, 37, 169, 154, 138, 188, +]; + +entrypoint!(process_instruction); + +#[repr(u8)] +pub enum InstructionType { + CreatePdaBorsh = 0, + UpdatePdaBorsh = 1, +} + +impl TryFrom for InstructionType { + type Error = LightSdkError; + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(InstructionType::CreatePdaBorsh), + 1 => Ok(InstructionType::UpdatePdaBorsh), + _ => panic!("Invalid instruction discriminator."), + } + } +} + +pub fn process_instruction( + _program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), ProgramError> { + msg!(format!("instruction_data: {:?}", instruction_data[..8].to_vec()).as_str()); + let discriminator = InstructionType::try_from(instruction_data[0]).unwrap(); + msg!(format!("instruction_data: {:?}", instruction_data[..8].to_vec()).as_str()); + match discriminator { + InstructionType::CreatePdaBorsh => { + create_pda::create_pda::(accounts, &instruction_data[1..]) + } + InstructionType::UpdatePdaBorsh => { + update_pda::update_pda::(accounts, &instruction_data[1..]) + } + }?; + Ok(()) +} diff --git a/program-tests/sdk-pinocchio-test/src/update_pda.rs b/program-tests/sdk-pinocchio-test/src/update_pda.rs new file mode 100644 index 0000000000..807c5a3f9d --- /dev/null +++ b/program-tests/sdk-pinocchio-test/src/update_pda.rs @@ -0,0 +1,66 @@ +use borsh::{BorshDeserialize, BorshSerialize}; +use light_sdk_pinocchio::{ + account::LightAccount, + cpi::{CpiAccounts, CpiAccountsConfig, CpiInputs}, + error::LightSdkError, + instruction::account_meta::CompressedAccountMeta, + ValidityProof, +}; +use pinocchio::account_info::AccountInfo; + +use crate::create_pda::MyCompressedAccount; + +/// CU usage: +/// - sdk pre system program 9,183k CU +/// - total with V2 tree: 50,194 CU (proof by index) +pub fn update_pda( + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), LightSdkError> { + let mut instruction_data = instruction_data; + let instruction_data = UpdatePdaInstructionData::deserialize(&mut instruction_data) + .map_err(|_| LightSdkError::Borsh)?; + + let mut my_compressed_account = LightAccount::<'_, MyCompressedAccount>::new_mut( + &crate::ID, + &instruction_data.my_compressed_account.meta, + MyCompressedAccount { + data: instruction_data.my_compressed_account.data, + }, + )?; + + my_compressed_account.data = instruction_data.new_data; + + let config = CpiAccountsConfig { + self_program: crate::ID, + cpi_context: false, + sol_pool_pda: false, + sol_compression_recipient: false, + }; + let cpi_accounts = CpiAccounts::new_with_config( + &accounts[0], + &accounts[instruction_data.system_accounts_offset as usize..], + config, + )?; + let cpi_inputs = CpiInputs::new( + instruction_data.proof, + vec![my_compressed_account.to_account_info()?], + ); + cpi_inputs.invoke_light_system_program(cpi_accounts)?; + + Ok(()) +} + +#[derive(Clone, Debug, Default, BorshDeserialize, BorshSerialize)] +pub struct UpdatePdaInstructionData { + pub proof: ValidityProof, + pub my_compressed_account: UpdateMyCompressedAccount, + pub new_data: [u8; 31], + pub system_accounts_offset: u8, +} + +#[derive(Clone, Debug, Default, BorshDeserialize, BorshSerialize)] +pub struct UpdateMyCompressedAccount { + pub meta: CompressedAccountMeta, + pub data: [u8; 31], +} diff --git a/program-tests/sdk-pinocchio-test/tests/test.rs b/program-tests/sdk-pinocchio-test/tests/test.rs new file mode 100644 index 0000000000..f530262ef8 --- /dev/null +++ b/program-tests/sdk-pinocchio-test/tests/test.rs @@ -0,0 +1,215 @@ +#![cfg(feature = "test-sbf")] + +use borsh::BorshSerialize; +use light_compressed_account::{ + address::derive_address, compressed_account::CompressedAccountWithMerkleContext, + hashv_to_bn254_field_size_be, +}; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::instruction::{ + account_meta::CompressedAccountMeta, accounts::SystemAccountMetaConfig, + pack_accounts::PackedAccounts, +}; +use sdk_pinocchio_test::{ + create_pda::CreatePdaInstructionData, + update_pda::{UpdateMyCompressedAccount, UpdatePdaInstructionData}, +}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +#[tokio::test] +async fn test_sdk_test() { + let config = ProgramTestConfig::new_v2( + false, + Some(vec![( + "sdk_pinocchio_test", + Pubkey::new_from_array(sdk_pinocchio_test::ID), + )]), + ); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_pubkey = rpc.get_address_merkle_tree_v2(); + let account_data = [1u8; 31]; + + // // V1 trees + // let (address, _) = light_sdk::address::derive_address( + // &[b"compressed", &account_data], + // &address_tree_info, + // &Pubkey::new_from_array(sdk_pinocchio_test::ID), + // ); + // Batched trees + let address_seed = hashv_to_bn254_field_size_be(&[b"compressed", account_data.as_slice()]); + println!("seed {:?}", address_seed); + let address = derive_address( + &address_seed, + &address_tree_pubkey.to_bytes(), + &sdk_pinocchio_test::ID, + ); + println!("address {:?}", address); + println!("address tree pubkey: {:?}", address_tree_pubkey.to_bytes()); + let ouput_queue = rpc.get_state_merkle_tree_v2().output_queue; + println!("ouput_queue tree pubkey: {:?}", ouput_queue.to_bytes()); + + create_pda( + &payer, + &mut rpc, + &ouput_queue, + account_data, + address_tree_pubkey, + address, + ) + .await + .unwrap(); + + let compressed_pda = rpc + .indexer() + .unwrap() + .get_compressed_accounts_by_owner( + &Pubkey::new_from_array(sdk_pinocchio_test::ID), + None, + None, + ) + .await + .unwrap() + .value + .items[0] + .clone(); + assert_eq!(compressed_pda.address.unwrap(), address); + + update_pda(&payer, &mut rpc, [2u8; 31], compressed_pda.into()) + .await + .unwrap(); +} + +pub async fn create_pda( + payer: &Keypair, + rpc: &mut LightProgramTest, + merkle_tree_pubkey: &Pubkey, + account_data: [u8; 31], + address_tree_pubkey: Pubkey, + address: [u8; 32], +) -> Result<(), RpcError> { + let system_account_meta_config = + SystemAccountMetaConfig::new(Pubkey::new_from_array(sdk_pinocchio_test::ID)); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config); + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address, + tree: address_tree_pubkey, + }], + None, + ) + .await? + .value; + + let output_merkle_tree_index = accounts.insert_or_get(*merkle_tree_pubkey); + let packed_address_tree_info = rpc_result.pack_tree_infos(&mut accounts).address_trees[0]; + let (accounts, system_accounts_offset, tree_accounts_offset) = accounts.to_account_metas(); + let instruction_data = CreatePdaInstructionData { + proof: rpc_result.proof, + address_tree_info: light_sdk_pinocchio::PackedAddressTreeInfo { + address_merkle_tree_pubkey_index: packed_address_tree_info + .address_merkle_tree_pubkey_index, + address_queue_pubkey_index: packed_address_tree_info.address_queue_pubkey_index, + root_index: packed_address_tree_info.root_index, + }, + data: account_data, + output_merkle_tree_index, + system_accounts_offset: system_accounts_offset as u8, + tree_accounts_offset: tree_accounts_offset as u8, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + for (index, account) in accounts.iter().enumerate() { + println!("Account {}: {:?}", index, account.pubkey.to_bytes()); + } + let instruction = Instruction { + program_id: Pubkey::new_from_array(sdk_pinocchio_test::ID), + accounts, + data: [&[0u8][..], &inputs[..]].concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn update_pda( + payer: &Keypair, + rpc: &mut LightProgramTest, + new_account_data: [u8; 31], + compressed_account: CompressedAccountWithMerkleContext, +) -> Result<(), RpcError> { + let system_account_meta_config = + SystemAccountMetaConfig::new(Pubkey::new_from_array(sdk_pinocchio_test::ID)); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config); + + let rpc_result = rpc + .get_validity_proof(vec![compressed_account.hash().unwrap()], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let light_sdk_meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + // Convert to pinocchio CompressedAccountMeta + let meta = light_sdk_pinocchio::CompressedAccountMeta { + tree_info: light_sdk_pinocchio::PackedStateTreeInfo { + root_index: light_sdk_meta.tree_info.root_index, + prove_by_index: light_sdk_meta.tree_info.prove_by_index, + merkle_tree_pubkey_index: light_sdk_meta.tree_info.merkle_tree_pubkey_index, + queue_pubkey_index: light_sdk_meta.tree_info.queue_pubkey_index, + leaf_index: light_sdk_meta.tree_info.leaf_index, + }, + address: light_sdk_meta.address, + output_state_tree_index: light_sdk_meta.output_state_tree_index, + }; + + let (accounts, system_accounts_offset, _) = accounts.to_account_metas(); + let instruction_data = UpdatePdaInstructionData { + my_compressed_account: UpdateMyCompressedAccount { + meta, + data: compressed_account + .compressed_account + .data + .unwrap() + .data + .try_into() + .unwrap(), + }, + proof: light_sdk_pinocchio::ValidityProof(None), + new_data: new_account_data, + system_accounts_offset: system_accounts_offset as u8, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: Pubkey::new_from_array(sdk_pinocchio_test::ID), + accounts, + data: [&[1u8][..], &inputs[..]].concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} diff --git a/program-tests/sdk-test/src/create_pda.rs b/program-tests/sdk-test/src/create_pda.rs index cd0b9e8273..980c47678e 100644 --- a/program-tests/sdk-test/src/create_pda.rs +++ b/program-tests/sdk-test/src/create_pda.rs @@ -10,7 +10,7 @@ use light_sdk::{ instruction::tree_info::PackedAddressTreeInfo, LightDiscriminator, LightHasher, NewAddressParamsPacked, ValidityProof, }; -use solana_program::{account_info::AccountInfo, program_error::ProgramError}; +use solana_program::account_info::AccountInfo; /// CU usage: /// - sdk pre system program cpi 10,942 CU @@ -41,7 +41,7 @@ pub fn create_pda( b"compressed", instruction_data.data.as_slice(), ]) - .map_err(ProgramError::from)?; + .unwrap(); let address = light_compressed_account::address::derive_address( &address_seed, &cpi_accounts.tree_accounts()[instruction_data @@ -69,9 +69,8 @@ pub fn create_pda( address_merkle_tree_account_index: address_tree_info.address_merkle_tree_pubkey_index, }; - let program_id = crate::ID.into(); let mut my_compressed_account = LightAccount::<'_, MyCompressedAccount>::new_init( - &program_id, + &crate::ID, Some(address), instruction_data.output_merkle_tree_index, ); diff --git a/program-tests/sdk-test/src/update_pda.rs b/program-tests/sdk-test/src/update_pda.rs index 07c2a0ccb4..6aa4c75cb5 100644 --- a/program-tests/sdk-test/src/update_pda.rs +++ b/program-tests/sdk-test/src/update_pda.rs @@ -24,9 +24,8 @@ pub fn update_pda( let instruction_data = UpdatePdaInstructionData::deserialize(&mut instruction_data) .map_err(|_| LightSdkError::Borsh)?; - let program_id = crate::ID.into(); let mut my_compressed_account = LightAccount::<'_, MyCompressedAccount>::new_mut( - &program_id, + &crate::ID, &instruction_data.my_compressed_account.meta, MyCompressedAccount { data: instruction_data.my_compressed_account.data, diff --git a/program-tests/sdk-test/tests/test.rs b/program-tests/sdk-test/tests/test.rs index 6c223045ce..79f8a7068a 100644 --- a/program-tests/sdk-test/tests/test.rs +++ b/program-tests/sdk-test/tests/test.rs @@ -1,4 +1,4 @@ -// #![cfg(feature = "test-sbf")] +#![cfg(feature = "test-sbf")] use borsh::BorshSerialize; use light_compressed_account::{ diff --git a/program-tests/system-cpi-test/Cargo.toml b/program-tests/system-cpi-test/Cargo.toml index b49b5dfb54..a2bfa77ddb 100644 --- a/program-tests/system-cpi-test/Cargo.toml +++ b/program-tests/system-cpi-test/Cargo.toml @@ -34,12 +34,12 @@ light-account-checks = { workspace = true } [target.'cfg(not(target_os = "solana"))'.dependencies] solana-sdk = { workspace = true } +light-test-utils = { workspace = true, features = ["devenv"] } [dev-dependencies] light-client = { workspace = true, features = ["devenv"] } light-sdk = { workspace = true, features = ["anchor"] } light-program-test = { workspace = true, features = ["devenv"] } -light-test-utils = { workspace = true, features = ["devenv"] } tokio = { workspace = true } light-prover-client = { workspace = true, features = ["devenv"] } light-verifier = { workspace = true } diff --git a/program-tests/system-cpi-test/src/create_pda.rs b/program-tests/system-cpi-test/src/create_pda.rs index 74f1c20e93..a5914ee079 100644 --- a/program-tests/system-cpi-test/src/create_pda.rs +++ b/program-tests/system-cpi-test/src/create_pda.rs @@ -250,7 +250,7 @@ fn cpi_compressed_pda_transfer_as_program<'info>( CreatePdaMode::WriteToAccountNotOwned => { // account with data needs to be owned by the program let mut compressed_pda = compressed_pda; - compressed_pda.compressed_account.owner = ctx.accounts.signer.key(); + compressed_pda.compressed_account.owner = ctx.accounts.signer.key().into(); compressed_pda } CreatePdaMode::NoData => { @@ -489,9 +489,7 @@ fn create_compressed_pda_data( let compressed_account_data = CompressedAccountData { discriminator: 1u64.to_le_bytes(), data: timelock_compressed_pda.try_to_vec().unwrap(), - data_hash: timelock_compressed_pda - .hash::() - .map_err(ProgramError::from)?, + data_hash: timelock_compressed_pda.hash::().unwrap(), }; let discriminator_bytes = &ctx.remaining_accounts [new_address_params.address_merkle_tree_account_index as usize] @@ -500,10 +498,11 @@ fn create_compressed_pda_data( let address = match discriminator_bytes { AddressMerkleTreeAccount::DISCRIMINATOR => derive_address_legacy( &ctx.remaining_accounts[new_address_params.address_merkle_tree_account_index as usize] - .key(), + .key() + .into(), &new_address_params.seed, ) - .map_err(ProgramError::from)?, + .unwrap(), BatchedMerkleTreeAccount::LIGHT_DISCRIMINATOR_SLICE => derive_address( &new_address_params.seed, &ctx.remaining_accounts[new_address_params.address_merkle_tree_account_index as usize] @@ -529,7 +528,7 @@ fn create_compressed_pda_data( Ok(OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: *owner_program, // should be crate::ID, test can provide an invalid owner + owner: owner_program.into(), // should be crate::ID, test can provide an invalid owner lamports: 0, address: Some(address), data: Some(compressed_account_data), diff --git a/program-tests/system-cpi-test/src/invalidate_not_owned_account.rs b/program-tests/system-cpi-test/src/invalidate_not_owned_account.rs index 60fb9cb11d..e44f2a98b7 100644 --- a/program-tests/system-cpi-test/src/invalidate_not_owned_account.rs +++ b/program-tests/system-cpi-test/src/invalidate_not_owned_account.rs @@ -127,7 +127,7 @@ pub fn process_invalidate_not_owned_compressed_account<'info>( vec![OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { data: compressed_account.compressed_account.data.clone(), - owner: light_compressed_token::ID, + owner: light_compressed_token::ID.into(), lamports: 0, address: compressed_account.compressed_account.address, }, @@ -611,7 +611,7 @@ fn write_into_cpi_account<'info>( let compressed_pda = OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { data: compressed_account.compressed_account.data.clone(), - owner: ID, + owner: ID.into(), lamports: 0, address: compressed_account.compressed_account.address, }, diff --git a/program-tests/system-cpi-test/src/sdk.rs b/program-tests/system-cpi-test/src/sdk.rs index 1ebb015e39..7abb189bf8 100644 --- a/program-tests/system-cpi-test/src/sdk.rs +++ b/program-tests/system-cpi-test/src/sdk.rs @@ -22,6 +22,7 @@ use light_compressed_token::{ get_token_pool_pda, process_transfer::transfer_sdk::to_account_metas, }; use light_system_program::utils::get_registered_program_pda; +use light_test_utils::e2e_test_env::to_account_metas_light; use solana_sdk::{instruction::Instruction, pubkey::Pubkey}; use crate::CreatePdaMode; @@ -46,9 +47,11 @@ pub struct CreateCompressedPdaInstructionInputs<'a> { pub fn create_pda_instruction(input_params: CreateCompressedPdaInstructionInputs) -> Instruction { let (cpi_signer, bump) = Pubkey::find_program_address(&[CPI_AUTHORITY_PDA_SEED], &crate::id()); - let mut remaining_accounts = HashMap::new(); + let mut remaining_accounts = HashMap::::new(); remaining_accounts.insert( - *input_params.output_compressed_account_merkle_tree_pubkey, + input_params + .output_compressed_account_merkle_tree_pubkey + .into(), 0, ); let new_address_params = @@ -110,7 +113,7 @@ pub fn create_pda_instruction(input_params: CreateCompressedPdaInstructionInputs cpi_signer, system_program: solana_sdk::system_program::id(), }; - let remaining_accounts = to_account_metas(remaining_accounts); + let remaining_accounts = to_account_metas_light(remaining_accounts); Instruction { program_id: crate::ID, diff --git a/program-tests/system-cpi-test/tests/test.rs b/program-tests/system-cpi-test/tests/test.rs index 95fae7fd49..77dd6d4c7c 100644 --- a/program-tests/system-cpi-test/tests/test.rs +++ b/program-tests/system-cpi-test/tests/test.rs @@ -221,7 +221,8 @@ async fn test_read_only_accounts() { .iter() .find(|x| { x.merkle_context.leaf_index == 101 - && x.merkle_context.merkle_tree_pubkey == env.v2_state_trees[0].merkle_tree + && x.merkle_context.merkle_tree_pubkey.to_bytes() + == env.v2_state_trees[0].merkle_tree.to_bytes() }) .unwrap() .clone(); @@ -232,7 +233,8 @@ async fn test_read_only_accounts() { .iter() .find(|x| { x.merkle_context.leaf_index == 1 - && x.merkle_context.merkle_tree_pubkey == env.v2_state_trees[0].merkle_tree + && x.merkle_context.merkle_tree_pubkey.to_bytes() + == env.v2_state_trees[0].merkle_tree.to_bytes() }) .unwrap() .clone(); @@ -328,7 +330,10 @@ async fn test_read_only_accounts() { .indexer .get_compressed_accounts_with_merkle_context_by_owner(&ID) .iter() - .find(|x| x.merkle_context.merkle_tree_pubkey == env.v1_state_trees[0].merkle_tree) + .find(|x| { + x.merkle_context.merkle_tree_pubkey.to_bytes() + == env.v1_state_trees[0].merkle_tree.to_bytes() + }) .unwrap() .clone(); let result = perform_create_pda_with_event( @@ -614,7 +619,8 @@ async fn test_read_only_accounts() { .iter() .find(|x| { x.merkle_context.leaf_index == 2 - && x.merkle_context.merkle_tree_pubkey == env.v2_state_trees[0].merkle_tree + && x.merkle_context.merkle_tree_pubkey.to_bytes() + == env.v2_state_trees[0].merkle_tree.to_bytes() && x.merkle_context.leaf_index != account_not_in_value_array_and_in_mt .merkle_context @@ -1710,7 +1716,8 @@ async fn perform_create_pda( (address, env.v2_address_trees[0], env.v2_address_trees[0]) } else { - let address = derive_address_legacy(&env.v1_address_trees[0].merkle_tree, &seed).unwrap(); + let address = + derive_address_legacy(&env.v1_address_trees[0].merkle_tree.into(), &seed).unwrap(); ( address, env.v1_address_trees[0].merkle_tree, @@ -1795,14 +1802,14 @@ async fn perform_create_pda( .collect(); let new_address_params = NewAddressParams { seed, - address_merkle_tree_pubkey, - address_queue_pubkey, + address_merkle_tree_pubkey: address_merkle_tree_pubkey.into(), + address_queue_pubkey: address_queue_pubkey.into(), address_merkle_tree_root_index: address_root_indices[0], }; let readonly_adresses = if addresses.len() == 2 && mode != CreatePdaMode::TwoReadOnlyAddresses { let read_only_address = vec![ReadOnlyAddress { address: addresses[1], - address_merkle_tree_pubkey, + address_merkle_tree_pubkey: address_merkle_tree_pubkey.into(), address_merkle_tree_root_index: address_root_indices[1], }]; Some(read_only_address) @@ -1810,12 +1817,12 @@ async fn perform_create_pda( let read_only_address = vec![ ReadOnlyAddress { address: addresses[0], - address_merkle_tree_pubkey, + address_merkle_tree_pubkey: address_merkle_tree_pubkey.into(), address_merkle_tree_root_index: address_root_indices[0], }, ReadOnlyAddress { address: addresses[1], - address_merkle_tree_pubkey, + address_merkle_tree_pubkey: address_merkle_tree_pubkey.into(), address_merkle_tree_root_index: address_root_indices[1], }, ]; @@ -1871,18 +1878,21 @@ pub async fn assert_created_pda( seed: &[u8; 32], data: &[u8; 31], ) { - let compressed_escrow_pda = test_indexer - .get_compressed_accounts_with_merkle_context_by_owner(&ID) - .iter() - .find(|x| x.compressed_account.owner == ID) - .unwrap() - .clone(); - let address = derive_address_legacy(&env.v1_address_trees[0].merkle_tree, seed).unwrap(); + let compressed_escrow_pda = + test_indexer.get_compressed_accounts_with_merkle_context_by_owner(&ID)[0].clone(); + let address = derive_address_legacy(&env.v1_address_trees[0].merkle_tree.into(), seed).unwrap(); assert_eq!( - compressed_escrow_pda.compressed_account.address.unwrap(), + compressed_escrow_pda + .compressed_account + .address + .clone() + .unwrap(), address ); - assert_eq!(compressed_escrow_pda.compressed_account.owner, ID); + assert_eq!( + compressed_escrow_pda.compressed_account.owner.to_bytes(), + ID.to_bytes() + ); let compressed_escrow_pda_deserialized = compressed_escrow_pda .compressed_account .data @@ -1958,7 +1968,7 @@ pub async fn perform_with_input_accounts>(), output_compressed_accounts: output_accounts.to_vec(), sequence_numbers: vec![MerkleTreeSequenceNumberV1 { - tree_pubkey: env.v2_state_trees[0].merkle_tree, + tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), // queue_pubkey: env.v2_state_trees[0].output_queue, // tree_type: TreeType::StateV2 as u64, seq: 0, @@ -96,7 +100,7 @@ async fn parse_batched_event_functional() { message: None, is_compress: false, compress_or_decompress_lamports: None, - pubkey_array: vec![env.v2_state_trees[0].output_queue], + pubkey_array: vec![env.v2_state_trees[0].output_queue.into()], }, address_sequence_numbers: Vec::new(), input_sequence_numbers: Vec::new(), @@ -116,17 +120,17 @@ async fn parse_batched_event_functional() { .map(|i| { get_compressed_input_account(MerkleContext { leaf_index: i, - merkle_tree_pubkey: env.v2_state_trees[0].merkle_tree, + merkle_tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), prove_by_index: true, - queue_pubkey: env.v2_state_trees[0].output_queue, + queue_pubkey: env.v2_state_trees[0].output_queue.into(), tree_type: light_compressed_account::TreeType::StateV2, }) }) .collect::>(); let new_addresses = vec![ - derive_address_legacy(&env.v1_address_trees[0].merkle_tree, &[1u8; 32]).unwrap(), - derive_address_legacy(&env.v1_address_trees[0].merkle_tree, &[2u8; 32]).unwrap(), + derive_address_legacy(&env.v1_address_trees[0].merkle_tree.into(), &[1u8; 32]).unwrap(), + derive_address_legacy(&env.v1_address_trees[0].merkle_tree.into(), &[2u8; 32]).unwrap(), ]; let payer = rpc.get_payer().insecure_clone(); @@ -147,15 +151,15 @@ async fn parse_batched_event_functional() { let new_address_params = vec![ NewAddressParamsAssigned { seed: [1u8; 32], - address_queue_pubkey: env.v1_address_trees[0].queue, - address_merkle_tree_pubkey: env.v1_address_trees[0].merkle_tree, + address_queue_pubkey: env.v1_address_trees[0].queue.into(), + address_merkle_tree_pubkey: env.v1_address_trees[0].merkle_tree.into(), address_merkle_tree_root_index: proof_result.get_address_root_indices()[0], assigned_account_index: None, }, NewAddressParamsAssigned { seed: [2u8; 32], - address_queue_pubkey: env.v1_address_trees[0].queue, - address_merkle_tree_pubkey: env.v1_address_trees[0].merkle_tree, + address_queue_pubkey: env.v1_address_trees[0].queue.into(), + address_merkle_tree_pubkey: env.v1_address_trees[0].merkle_tree.into(), address_merkle_tree_root_index: proof_result.get_address_root_indices()[1], assigned_account_index: None, }, @@ -183,7 +187,11 @@ async fn parse_batched_event_functional() { .enumerate() .map(|(i, x)| { x.compressed_account - .hash(&env.v2_state_trees[0].merkle_tree, &((i + 8) as u32), true) + .hash( + &env.v2_state_trees[0].merkle_tree.into(), + &((i + 8) as u32), + true, + ) .unwrap() }) .collect::>(); @@ -210,13 +218,17 @@ async fn parse_batched_event_functional() { .enumerate() .map(|(i, x)| { x.compressed_account - .hash(&env.v2_state_trees[0].merkle_tree, &((i + 8) as u32), true) + .hash( + &env.v2_state_trees[0].merkle_tree.into(), + &((i + 8) as u32), + true, + ) .unwrap() }) .collect::>(), output_compressed_accounts: output_accounts.to_vec(), sequence_numbers: vec![MerkleTreeSequenceNumberV1 { - tree_pubkey: env.v2_state_trees[0].merkle_tree, + tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), // queue_pubkey: env.v2_state_trees[0].output_queue, // tree_type: TreeType::StateV2 as u64, seq: 8, @@ -226,16 +238,16 @@ async fn parse_batched_event_functional() { is_compress: false, compress_or_decompress_lamports: None, pubkey_array: vec![ - env.v1_address_trees[0].merkle_tree, - env.v1_address_trees[0].queue, - env.v2_state_trees[0].merkle_tree, - env.v2_state_trees[0].output_queue, + env.v1_address_trees[0].merkle_tree.into(), + env.v1_address_trees[0].queue.into(), + env.v2_state_trees[0].merkle_tree.into(), + env.v2_state_trees[0].output_queue.into(), ], }, address_sequence_numbers: Vec::new(), input_sequence_numbers: vec![MerkleTreeSequenceNumber { - tree_pubkey: env.v2_state_trees[0].merkle_tree, - queue_pubkey: env.v2_state_trees[0].output_queue, + tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), + queue_pubkey: env.v2_state_trees[0].output_queue.into(), tree_type: TreeType::StateV2 as u64, seq: 0, }], @@ -244,7 +256,7 @@ async fn parse_batched_event_functional() { .iter() .map(|x| NewAddress { address: *x, - mt_pubkey: env.v1_address_trees[0].merkle_tree, + mt_pubkey: env.v1_address_trees[0].merkle_tree.into(), queue_index: u64::MAX, }) .collect(), @@ -261,9 +273,9 @@ async fn parse_batched_event_functional() { .map(|i| { get_compressed_input_account(MerkleContext { leaf_index: i, - merkle_tree_pubkey: env.v2_state_trees[0].merkle_tree, + merkle_tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), prove_by_index: true, - queue_pubkey: env.v2_state_trees[0].output_queue, + queue_pubkey: env.v2_state_trees[0].output_queue.into(), tree_type: light_compressed_account::TreeType::StateV2, }) }) @@ -300,15 +312,15 @@ async fn parse_batched_event_functional() { let new_address_params = vec![ NewAddressParamsAssigned { seed: [1u8; 32], - address_queue_pubkey: env.v2_address_trees[0], - address_merkle_tree_pubkey: env.v2_address_trees[0], + address_queue_pubkey: env.v2_address_trees[0].into(), + address_merkle_tree_pubkey: env.v2_address_trees[0].into(), address_merkle_tree_root_index: proof_result.get_address_root_indices()[0], assigned_account_index: None, }, NewAddressParamsAssigned { seed: [2u8; 32], - address_queue_pubkey: env.v2_address_trees[0], - address_merkle_tree_pubkey: env.v2_address_trees[0], + address_queue_pubkey: env.v2_address_trees[0].into(), + address_merkle_tree_pubkey: env.v2_address_trees[0].into(), address_merkle_tree_root_index: proof_result.get_address_root_indices()[1], assigned_account_index: None, }, @@ -332,7 +344,7 @@ async fn parse_batched_event_functional() { .map(|x| { x.compressed_account .hash( - &env.v2_state_trees[0].merkle_tree, + &env.v2_state_trees[0].merkle_tree.into(), &x.merkle_context.leaf_index, true, ) @@ -344,7 +356,11 @@ async fn parse_batched_event_functional() { .enumerate() .map(|(i, x)| { x.compressed_account - .hash(&env.v2_state_trees[0].merkle_tree, &((i + 16) as u32), true) + .hash( + &env.v2_state_trees[0].merkle_tree.into(), + &((i + 16) as u32), + true, + ) .unwrap() }) .collect::>(); @@ -371,13 +387,17 @@ async fn parse_batched_event_functional() { .enumerate() .map(|(i, x)| { x.compressed_account - .hash(&env.v2_state_trees[0].merkle_tree, &((i + 16) as u32), true) + .hash( + &env.v2_state_trees[0].merkle_tree.into(), + &((i + 16) as u32), + true, + ) .unwrap() }) .collect::>(), output_compressed_accounts: output_accounts.to_vec(), sequence_numbers: vec![MerkleTreeSequenceNumberV1 { - tree_pubkey: env.v2_state_trees[0].merkle_tree, + tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), // queue_pubkey: env.v2_state_trees[0].output_queue, // tree_type: TreeType::StateV2 as u64, seq: 16, @@ -387,20 +407,20 @@ async fn parse_batched_event_functional() { is_compress: false, compress_or_decompress_lamports: None, pubkey_array: vec![ - env.v2_address_trees[0], - env.v2_state_trees[0].merkle_tree, - env.v2_state_trees[0].output_queue, + env.v2_address_trees[0].into(), + env.v2_state_trees[0].merkle_tree.into(), + env.v2_state_trees[0].output_queue.into(), ], }, address_sequence_numbers: vec![MerkleTreeSequenceNumber { - tree_pubkey: env.v2_address_trees[0], - queue_pubkey: Pubkey::default(), + tree_pubkey: env.v2_address_trees[0].into(), + queue_pubkey: Pubkey::default().into(), tree_type: TreeType::AddressV2 as u64, seq: 0, }], input_sequence_numbers: vec![MerkleTreeSequenceNumber { - tree_pubkey: env.v2_state_trees[0].merkle_tree, - queue_pubkey: env.v2_state_trees[0].output_queue, + tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), + queue_pubkey: env.v2_state_trees[0].output_queue.into(), tree_type: TreeType::StateV2 as u64, seq: 8, }], @@ -410,7 +430,7 @@ async fn parse_batched_event_functional() { .enumerate() .map(|(i, x)| NewAddress { address: *x, - mt_pubkey: env.v2_address_trees[0], + mt_pubkey: env.v2_address_trees[0].into(), queue_index: i as u64, }) .collect(), @@ -462,11 +482,11 @@ async fn parse_multiple_batched_events_functional() { output_leaf_indices: vec![0], output_compressed_account_hashes: vec![output_accounts[0] .compressed_account - .hash(&env.v2_state_trees[0].merkle_tree, &0u32, true) + .hash(&env.v2_state_trees[0].merkle_tree.into(), &0u32, true) .unwrap()], output_compressed_accounts: output_accounts.to_vec(), sequence_numbers: vec![MerkleTreeSequenceNumberV1 { - tree_pubkey: env.v2_state_trees[0].merkle_tree, + tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), // queue_pubkey: env.v2_state_trees[0].output_queue, // tree_type: TreeType::StateV2 as u64, seq: 0, @@ -475,7 +495,7 @@ async fn parse_multiple_batched_events_functional() { message: None, is_compress: false, compress_or_decompress_lamports: None, - pubkey_array: vec![env.v2_state_trees[0].output_queue], + pubkey_array: vec![env.v2_state_trees[0].output_queue.into()], }, address_sequence_numbers: Vec::new(), input_sequence_numbers: Vec::new(), @@ -487,7 +507,7 @@ async fn parse_multiple_batched_events_functional() { for i in 1..num_expected_events { let mut expected_event = expected_batched_event.clone(); expected_event.event.sequence_numbers = vec![MerkleTreeSequenceNumberV1 { - tree_pubkey: env.v2_state_trees[0].merkle_tree, + tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), // queue_pubkey: env.v2_state_trees[0].output_queue, // tree_type: TreeType::StateV2 as u64, seq: i as u64, @@ -495,7 +515,7 @@ async fn parse_multiple_batched_events_functional() { expected_event.event.output_compressed_account_hashes = vec![output_accounts[0] .clone() .compressed_account - .hash(&env.v2_state_trees[0].merkle_tree, &(i as u32), true) + .hash(&env.v2_state_trees[0].merkle_tree.into(), &(i as u32), true) .unwrap()]; expected_event.event.output_leaf_indices = vec![i as u32]; assert_eq!(events[i as usize], expected_event); @@ -553,11 +573,11 @@ async fn generate_photon_test_data_multiple_events() { output_leaf_indices: vec![0], output_compressed_account_hashes: vec![output_accounts[0] .compressed_account - .hash(&env.v2_state_trees[0].merkle_tree, &0u32, true) + .hash(&env.v2_state_trees[0].merkle_tree.into(), &0u32, true) .unwrap()], output_compressed_accounts: output_accounts.to_vec(), sequence_numbers: vec![MerkleTreeSequenceNumberV1 { - tree_pubkey: env.v2_state_trees[0].merkle_tree, + tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), // queue_pubkey: env.v2_state_trees[0].output_queue, // tree_type: TreeType::StateV2 as u64, seq: 0, @@ -566,7 +586,7 @@ async fn generate_photon_test_data_multiple_events() { message: None, is_compress: false, compress_or_decompress_lamports: None, - pubkey_array: vec![env.v2_state_trees[0].output_queue], + pubkey_array: vec![env.v2_state_trees[0].output_queue.into()], }, address_sequence_numbers: Vec::new(), input_sequence_numbers: Vec::new(), @@ -578,7 +598,7 @@ async fn generate_photon_test_data_multiple_events() { for i in 1..num_expected_events { let mut expected_event = expected_batched_event.clone(); expected_event.event.sequence_numbers = vec![MerkleTreeSequenceNumberV1 { - tree_pubkey: env.v2_state_trees[0].merkle_tree, + tree_pubkey: env.v2_state_trees[0].merkle_tree.into(), // queue_pubkey: env.v2_state_trees[0].output_queue, // tree_type: TreeType::StateV2 as u64, seq: i as u64, @@ -586,7 +606,7 @@ async fn generate_photon_test_data_multiple_events() { expected_event.event.output_compressed_account_hashes = vec![output_accounts[0] .clone() .compressed_account - .hash(&env.v2_state_trees[0].merkle_tree, &(i as u32), true) + .hash(&env.v2_state_trees[0].merkle_tree.into(), &(i as u32), true) .unwrap()]; expected_event.event.output_leaf_indices = vec![i as u32]; assert_eq!(events[i as usize], expected_event); @@ -599,7 +619,7 @@ pub fn get_compressed_input_account( ) -> CompressedAccountWithMerkleContext { CompressedAccountWithMerkleContext { compressed_account: CompressedAccount { - owner: create_address_test_program::ID, + owner: create_address_test_program::ID.into(), lamports: 0, address: None, data: Some(CompressedAccountData { @@ -618,7 +638,7 @@ pub fn get_compressed_output_account( ) -> OutputCompressedAccountWithContext { OutputCompressedAccountWithContext { compressed_account: CompressedAccount { - owner: create_address_test_program::ID, + owner: create_address_test_program::ID.into(), lamports: 0, address: None, data: if data { @@ -631,7 +651,7 @@ pub fn get_compressed_output_account( None }, }, - merkle_tree, + merkle_tree: merkle_tree.into(), } } @@ -669,7 +689,7 @@ pub async fn perform_test_transaction( .as_slice(), output_accounts .iter() - .map(|x| x.merkle_tree) + .map(|x| x.merkle_tree.into()) .collect::>() .as_slice(), &mut remaining_accounts, diff --git a/program-tests/system-cpi-v2-test/tests/invoke_cpi_with_read_only.rs b/program-tests/system-cpi-v2-test/tests/invoke_cpi_with_read_only.rs index d7cf3ac63b..980021e43d 100644 --- a/program-tests/system-cpi-v2-test/tests/invoke_cpi_with_read_only.rs +++ b/program-tests/system-cpi-v2-test/tests/invoke_cpi_with_read_only.rs @@ -288,7 +288,7 @@ async fn functional_read_only() { .iter() .zip(read_only_addresses) .map(|(root_index, address)| ReadOnlyAddress { - address_merkle_tree_pubkey: address_tree, + address_merkle_tree_pubkey: address_tree.into(), address, address_merkle_tree_root_index: *root_index, }) @@ -590,7 +590,7 @@ async fn functional_account_infos() { .iter() .zip(read_only_addresses) .map(|(root_index, address)| ReadOnlyAddress { - address_merkle_tree_pubkey: address_tree, + address_merkle_tree_pubkey: address_tree.into(), address, address_merkle_tree_root_index: *root_index, }) @@ -717,7 +717,7 @@ async fn create_addresses_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let seed1 = [2u8; 32]; @@ -728,7 +728,7 @@ async fn create_addresses_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed1).unwrap() + derive_address_legacy(&address_tree.into(), &seed1).unwrap() }; let account_info = CompressedAccountInfo { address: Some(address), @@ -759,15 +759,15 @@ async fn create_addresses_with_account_info() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: Some(0), }; let new_address_params1 = NewAddressParamsAssigned { seed: seed1, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[1], assigned_account_index: Some(1), }; @@ -955,7 +955,7 @@ async fn create_addresses_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let seed1 = [4u8; 32]; @@ -966,7 +966,7 @@ async fn create_addresses_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed1).unwrap() + derive_address_legacy(&address_tree.into(), &seed1).unwrap() }; let rpc_result = rpc .get_validity_proof( @@ -987,15 +987,15 @@ async fn create_addresses_with_account_info() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: None, }; let new_address_params1 = NewAddressParamsAssigned { seed: seed1, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[1], assigned_account_index: None, }; @@ -1034,7 +1034,7 @@ async fn create_addresses_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let rpc_result = rpc @@ -1050,8 +1050,8 @@ async fn create_addresses_with_account_info() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: None, }; @@ -1093,7 +1093,7 @@ async fn create_addresses_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let seed1 = [7u8; 32]; @@ -1104,7 +1104,7 @@ async fn create_addresses_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed1).unwrap() + derive_address_legacy(&address_tree.into(), &seed1).unwrap() }; let account_info = CompressedAccountInfo { address: Some(address1), @@ -1131,15 +1131,15 @@ async fn create_addresses_with_account_info() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: None, }; let new_address_params1 = NewAddressParamsAssigned { seed: seed1, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[1], assigned_account_index: Some(0), }; @@ -1181,7 +1181,7 @@ async fn create_addresses_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let account_info = CompressedAccountInfo { @@ -1203,8 +1203,8 @@ async fn create_addresses_with_account_info() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: Some(0), }; @@ -1314,7 +1314,7 @@ async fn create_addresses_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let seed1 = [2u8; 32]; @@ -1325,7 +1325,7 @@ async fn create_addresses_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed1).unwrap() + derive_address_legacy(&address_tree.into(), &seed1).unwrap() }; let mut output_1 = get_compressed_output_account(true, if batched { queue } else { tree }); output_1.compressed_account.address = Some(address); @@ -1350,15 +1350,15 @@ async fn create_addresses_with_read_only() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: Some(0), }; let new_address_params1 = NewAddressParamsAssigned { seed: seed1, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[1], assigned_account_index: Some(1), }; @@ -1553,7 +1553,7 @@ async fn create_addresses_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let seed1 = [4u8; 32]; @@ -1564,7 +1564,7 @@ async fn create_addresses_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed1).unwrap() + derive_address_legacy(&address_tree.into(), &seed1).unwrap() }; let rpc_result = rpc .get_validity_proof( @@ -1585,15 +1585,15 @@ async fn create_addresses_with_read_only() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: None, }; let new_address_params1 = NewAddressParamsAssigned { seed: seed1, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[1], assigned_account_index: None, }; @@ -1633,7 +1633,7 @@ async fn create_addresses_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let rpc_result = rpc @@ -1649,8 +1649,8 @@ async fn create_addresses_with_read_only() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: None, }; @@ -1693,7 +1693,7 @@ async fn create_addresses_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let seed1 = [7u8; 32]; @@ -1704,7 +1704,7 @@ async fn create_addresses_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed1).unwrap() + derive_address_legacy(&address_tree.into(), &seed1).unwrap() }; output_accounts[0].compressed_account.address = Some(address1); @@ -1727,15 +1727,15 @@ async fn create_addresses_with_read_only() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: None, }; let new_address_params1 = NewAddressParamsAssigned { seed: seed1, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[1], assigned_account_index: Some(0), }; @@ -1778,7 +1778,7 @@ async fn create_addresses_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; output_accounts[0].compressed_account.address = Some(address); @@ -1796,8 +1796,8 @@ async fn create_addresses_with_read_only() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: Some(0), }; @@ -2113,7 +2113,7 @@ async fn cpi_context_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let seed1 = [2u8; 32]; @@ -2124,7 +2124,7 @@ async fn cpi_context_with_read_only() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed1).unwrap() + derive_address_legacy(&address_tree.into(), &seed1).unwrap() }; let addresses_with_tree = vec![ AddressWithTree { @@ -2143,15 +2143,15 @@ async fn cpi_context_with_read_only() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: Some(0), }; let new_address_params1 = NewAddressParamsAssigned { seed: seed1, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[1], assigned_account_index: Some(1), }; @@ -2159,15 +2159,15 @@ async fn cpi_context_with_read_only() { // Insert into cpi context. { let input_accounts = vec![get_compressed_input_account(MerkleContext { - merkle_tree_pubkey: tree, - queue_pubkey: queue, + merkle_tree_pubkey: tree.into(), + queue_pubkey: queue.into(), leaf_index: 2, prove_by_index: true, tree_type: TreeType::StateV2, })]; let mut output_account = get_compressed_output_account(false, queue); output_account.compressed_account.address = Some(address1); - output_account.compressed_account.owner = owner_account1; + output_account.compressed_account.owner = owner_account1.into(); local_sdk::perform_test_transaction( &mut rpc, &mut test_indexer, @@ -2203,14 +2203,14 @@ async fn cpi_context_with_read_only() { // Insert into cpi context 2. { let input_accounts = vec![get_compressed_input_account(MerkleContext { - merkle_tree_pubkey: tree, - queue_pubkey: queue, + merkle_tree_pubkey: tree.into(), + queue_pubkey: queue.into(), leaf_index: 0, prove_by_index: true, tree_type: TreeType::StateV2, })]; let mut output_account = get_compressed_output_account(false, queue); - output_account.compressed_account.owner = owner_account2; + output_account.compressed_account.owner = owner_account2.into(); local_sdk::perform_test_transaction( &mut rpc, &mut test_indexer, @@ -2246,8 +2246,8 @@ async fn cpi_context_with_read_only() { // Execute cpi context. { let input_accounts = vec![get_compressed_input_account(MerkleContext { - merkle_tree_pubkey: tree, - queue_pubkey: queue, + merkle_tree_pubkey: tree.into(), + queue_pubkey: queue.into(), leaf_index: 1, prove_by_index: true, tree_type: TreeType::StateV2, @@ -2409,7 +2409,7 @@ async fn cpi_context_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed).unwrap() + derive_address_legacy(&address_tree.into(), &seed).unwrap() }; let seed1 = [2u8; 32]; @@ -2420,7 +2420,7 @@ async fn cpi_context_with_account_info() { &create_address_test_program::ID.to_bytes(), ) } else { - derive_address_legacy(&address_tree, &seed1).unwrap() + derive_address_legacy(&address_tree.into(), &seed1).unwrap() }; let addresses_with_tree = vec![ AddressWithTree { @@ -2439,15 +2439,15 @@ async fn cpi_context_with_account_info() { .unwrap(); let new_address_params = NewAddressParamsAssigned { seed, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[0], assigned_account_index: Some(0), }; let new_address_params1 = NewAddressParamsAssigned { seed: seed1, - address_queue_pubkey: address_queue, - address_merkle_tree_pubkey: address_tree, + address_queue_pubkey: address_queue.into(), + address_merkle_tree_pubkey: address_tree.into(), address_merkle_tree_root_index: rpc_result.value.get_address_root_indices()[1], assigned_account_index: Some(2), }; @@ -2734,8 +2734,8 @@ async fn compress_sol_with_read_only() { // 2.Decompress sol { let mut input_account = get_compressed_input_account(MerkleContext { - merkle_tree_pubkey: tree, - queue_pubkey: queue, + merkle_tree_pubkey: tree.into(), + queue_pubkey: queue.into(), leaf_index: 0, prove_by_index: true, tree_type: TreeType::StateV2, @@ -2830,12 +2830,7 @@ pub mod local_sdk { use create_address_test_program::create_invoke_read_only_account_info_instruction; use light_client::indexer::Indexer; use light_compressed_account::{ - address::{ - pack_new_address_params_assigned, pack_read_only_accounts, - pack_read_only_address_params, - }, compressed_account::{ - pack_compressed_accounts, pack_output_compressed_accounts, CompressedAccountWithMerkleContext, MerkleContext, PackedCompressedAccountWithMerkleContext, ReadOnlyCompressedAccount, }, @@ -2851,11 +2846,18 @@ pub mod local_sdk { use light_program_test::indexer::TestIndexerExtensions; use light_sdk::{ cpi::CpiAccountsConfig, find_cpi_signer_macro, instruction::accounts::SystemAccountPubkeys, - pack_pubkey_usize, NewAddressParamsAssigned, OutputCompressedAccountWithContext, + NewAddressParamsAssigned, OutputCompressedAccountWithContext, OutputCompressedAccountWithPackedContext, ReadOnlyAddress, CPI_AUTHORITY_PDA_SEED, }; use light_system_program::constants::SOL_POOL_PDA_SEED; - use light_test_utils::{Rpc, RpcError}; + use light_test_utils::{ + pack::{ + pack_compressed_accounts, pack_new_address_params_assigned, + pack_output_compressed_accounts, pack_pubkey_usize, pack_read_only_accounts, + pack_read_only_address_params, + }, + Rpc, RpcError, + }; use solana_sdk::{ pubkey::Pubkey, signature::{Keypair, Signer}, @@ -2917,7 +2919,7 @@ pub mod local_sdk { .as_slice(), output_accounts .iter() - .map(|x| x.merkle_tree) + .map(|x| x.merkle_tree.into()) .collect::>() .as_slice(), &mut remaining_accounts, diff --git a/program-tests/system-test/tests/test.rs b/program-tests/system-test/tests/test.rs index 6b82e2bcf3..e846c3d990 100644 --- a/program-tests/system-test/tests/test.rs +++ b/program-tests/system-test/tests/test.rs @@ -257,7 +257,7 @@ pub async fn failing_transaction_inputs( }; output_compressed_accounts.push(CompressedAccount { lamports: output_amount, - owner: payer.pubkey(), + owner: payer.pubkey().into(), data: None, address, }); @@ -422,7 +422,7 @@ pub async fn failing_transaction_inputs_inner( let mut inputs_struct = inputs_struct.clone(); inputs_struct.input_compressed_accounts_with_merkle_context[num_inputs - 1] .compressed_account - .owner = Keypair::new().pubkey(); + .owner = Keypair::new().pubkey().into(); create_instruction_and_failing_transaction( rpc, @@ -558,12 +558,13 @@ fn create_address_test_inputs( for address_seed in address_seeds.iter() { new_address_params.push(NewAddressParams { seed: *address_seed, - address_queue_pubkey: env.v1_address_trees[0].queue, - address_merkle_tree_pubkey: env.v1_address_trees[0].merkle_tree, + address_queue_pubkey: env.v1_address_trees[0].queue.into(), + address_merkle_tree_pubkey: env.v1_address_trees[0].merkle_tree.into(), address_merkle_tree_root_index: 0, }); let derived_address = - derive_address_legacy(&env.v1_address_trees[0].merkle_tree, address_seed).unwrap(); + derive_address_legacy(&env.v1_address_trees[0].merkle_tree.into(), address_seed) + .unwrap(); derived_addresses.push(derived_address); } (new_address_params, derived_addresses) @@ -890,7 +891,7 @@ async fn invoke_test() { ); let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: None, }]; @@ -940,7 +941,7 @@ async fn invoke_test() { let input_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: None, }]; @@ -951,9 +952,9 @@ async fn invoke_test() { &input_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: 0, - queue_pubkey: nullifier_queue_pubkey, + queue_pubkey: nullifier_queue_pubkey.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -975,7 +976,7 @@ async fn invoke_test() { // check invalid signer for in compressed_account let invalid_signer_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: Keypair::new().pubkey(), + owner: Keypair::new().pubkey().into(), data: None, address: None, }]; @@ -986,9 +987,9 @@ async fn invoke_test() { &invalid_signer_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: 0, - queue_pubkey: nullifier_queue_pubkey, + queue_pubkey: nullifier_queue_pubkey.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -1029,9 +1030,9 @@ async fn invoke_test() { &input_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: 0, - queue_pubkey: nullifier_queue_pubkey, + queue_pubkey: nullifier_queue_pubkey.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -1071,7 +1072,7 @@ async fn invoke_test() { println!("Double spend -------------------------"); let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: Keypair::new().pubkey(), + owner: Keypair::new().pubkey().into(), data: None, address: None, }]; @@ -1082,9 +1083,9 @@ async fn invoke_test() { &input_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: 0, - queue_pubkey: nullifier_queue_pubkey, + queue_pubkey: nullifier_queue_pubkey.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -1108,7 +1109,7 @@ async fn invoke_test() { assert!(res.is_err()); let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: Keypair::new().pubkey(), + owner: Keypair::new().pubkey().into(), data: None, address: None, }]; @@ -1119,9 +1120,9 @@ async fn invoke_test() { &input_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: 1, - queue_pubkey: nullifier_queue_pubkey, + queue_pubkey: nullifier_queue_pubkey.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -1168,13 +1169,13 @@ async fn test_with_address() { let address_seed = [1u8; 32]; let derived_address = derive_address_legacy( - &rpc.test_accounts.v1_address_trees[0].merkle_tree, + &rpc.test_accounts.v1_address_trees[0].merkle_tree.into(), &address_seed, ) .unwrap(); let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: Some(derived_address), // this should not be sent, only derived on-chain }]; @@ -1213,15 +1214,15 @@ async fn test_with_address() { ); let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: Some(derived_address), // this should not be sent, only derived on-chain }]; let address_params = vec![NewAddressParams { seed: address_seed, - address_queue_pubkey: rpc.test_accounts.v1_address_trees[0].queue, - address_merkle_tree_pubkey: rpc.test_accounts.v1_address_trees[0].merkle_tree, + address_queue_pubkey: rpc.test_accounts.v1_address_trees[0].queue.into(), + address_merkle_tree_pubkey: rpc.test_accounts.v1_address_trees[0].merkle_tree.into(), address_merkle_tree_root_index: 0, }]; let instruction = create_invoke_instruction( @@ -1260,14 +1261,14 @@ async fn test_with_address() { ); let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: Some(derived_address), // this should not be sent, only derived on-chain }]; let address_params = vec![NewAddressParams { seed: address_seed, - address_queue_pubkey: rpc.test_accounts.v2_address_trees[0], - address_merkle_tree_pubkey: rpc.test_accounts.v2_address_trees[0], + address_queue_pubkey: rpc.test_accounts.v2_address_trees[0].into(), + address_merkle_tree_pubkey: rpc.test_accounts.v2_address_trees[0].into(), address_merkle_tree_root_index: 0, }]; @@ -1329,7 +1330,8 @@ async fn test_with_address() { &[recipient_pubkey], &[compressed_account_with_context .merkle_context - .merkle_tree_pubkey], + .merkle_tree_pubkey + .into()], None, ) .await @@ -1344,8 +1346,11 @@ async fn test_with_address() { derived_address ); assert_eq!( - indexer.compressed_accounts[0].compressed_account.owner, - recipient_pubkey + indexer.compressed_accounts[0] + .compressed_account + .owner + .to_bytes(), + recipient_pubkey.to_bytes() ); (*rpc.indexer_mut().unwrap()) = indexer; @@ -1463,7 +1468,7 @@ async fn test_with_compression() { let compress_amount = 1_000_000; let output_compressed_accounts = vec![CompressedAccount { lamports: compress_amount + 1, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: None, }]; @@ -1496,7 +1501,7 @@ async fn test_with_compression() { .unwrap(); let output_compressed_accounts = vec![CompressedAccount { lamports: compress_amount, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: None, }]; @@ -1565,7 +1570,7 @@ async fn test_with_compression() { let recipient_pubkey = Keypair::new().pubkey(); let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: recipient_pubkey, + owner: recipient_pubkey.into(), data: None, address: None, }]; @@ -1576,9 +1581,9 @@ async fn test_with_compression() { &input_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: 0, - queue_pubkey: nullifier_queue_pubkey, + queue_pubkey: nullifier_queue_pubkey.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -1849,7 +1854,7 @@ async fn batch_invoke_test() { let output_queue_pubkey = env.v2_state_trees[0].output_queue; let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: payer.pubkey(), + owner: payer.pubkey().into(), data: None, address: None, }]; @@ -1865,7 +1870,7 @@ async fn batch_invoke_test() { ); let input_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: None, }]; @@ -1876,9 +1881,9 @@ async fn batch_invoke_test() { &input_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: 0, - queue_pubkey: output_queue_pubkey, + queue_pubkey: output_queue_pubkey.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -1906,7 +1911,7 @@ async fn batch_invoke_test() { // 3. Should fail: input compressed account with invalid signer. let invalid_signer_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: Keypair::new().pubkey(), + owner: Keypair::new().pubkey().into(), data: None, address: None, }]; @@ -1917,9 +1922,9 @@ async fn batch_invoke_test() { &invalid_signer_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: 0, - queue_pubkey: output_queue_pubkey, + queue_pubkey: output_queue_pubkey.into(), prove_by_index: false, tree_type: TreeType::StateV1, }], @@ -1968,9 +1973,9 @@ async fn batch_invoke_test() { &input_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: compressed_account_with_context.merkle_context.leaf_index, - queue_pubkey: output_queue_pubkey, + queue_pubkey: output_queue_pubkey.into(), prove_by_index: true, tree_type: TreeType::StateV2, }], @@ -2007,7 +2012,7 @@ async fn batch_invoke_test() { { let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: Keypair::new().pubkey(), + owner: Keypair::new().pubkey().into(), data: None, address: None, }]; @@ -2017,9 +2022,9 @@ async fn batch_invoke_test() { &input_compressed_accounts, &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: 0, - queue_pubkey: output_queue_pubkey, + queue_pubkey: output_queue_pubkey.into(), prove_by_index: true, tree_type: TreeType::StateV2, }], @@ -2047,13 +2052,13 @@ async fn batch_invoke_test() { let input_compressed_account = rpc .get_compressed_accounts_with_merkle_context_by_owner(&payer_pubkey) .iter() - .filter(|x| x.merkle_context.queue_pubkey == output_queue_pubkey) + .filter(|x| x.merkle_context.queue_pubkey.to_bytes() == output_queue_pubkey.to_bytes()) .last() .unwrap() .clone(); let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: Keypair::new().pubkey(), + owner: Keypair::new().pubkey().into(), data: None, address: None, }]; @@ -2063,9 +2068,9 @@ async fn batch_invoke_test() { &[input_compressed_account.compressed_account], &output_compressed_accounts, &[MerkleContext { - merkle_tree_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), leaf_index: input_compressed_account.merkle_context.leaf_index - 1, - queue_pubkey: output_queue_pubkey, + queue_pubkey: output_queue_pubkey.into(), prove_by_index: true, tree_type: TreeType::StateV2, }], @@ -2105,8 +2110,8 @@ async fn batch_invoke_test() { .compressed_accounts .iter() .filter(|x| { - x.compressed_account.owner == payer_pubkey - && x.merkle_context.queue_pubkey == output_queue_pubkey + x.compressed_account.owner.to_bytes() == payer_pubkey.to_bytes() + && x.merkle_context.queue_pubkey.to_bytes() == output_queue_pubkey.to_bytes() }) .cloned() .collect::>() @@ -2121,8 +2126,9 @@ async fn batch_invoke_test() { .compressed_accounts .iter() .filter(|x| { - x.compressed_account.owner == payer_pubkey - && x.merkle_context.queue_pubkey == env.v1_state_trees[0].nullifier_queue + x.compressed_account.owner.to_bytes() == payer_pubkey.to_bytes() + && x.merkle_context.queue_pubkey.to_bytes() + == env.v1_state_trees[0].nullifier_queue.to_bytes() }) .collect::>()[0] .clone(); @@ -2152,13 +2158,13 @@ async fn batch_invoke_test() { let output_compressed_accounts = vec![ CompressedAccount { lamports: 0, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: None, }, CompressedAccount { lamports: 0, - owner: payer_pubkey, + owner: payer_pubkey.into(), data: None, address: None, }, @@ -2172,8 +2178,8 @@ async fn batch_invoke_test() { &output_compressed_accounts, merkle_context.as_slice(), &[ - merkle_context_1.queue_pubkey, // output queue - merkle_context_2.merkle_tree_pubkey, + merkle_context_1.queue_pubkey.into(), // output queue + merkle_context_2.merkle_tree_pubkey.into(), ], &proof_rpc_result .value @@ -2210,8 +2216,8 @@ async fn batch_invoke_test() { .compressed_accounts .iter() .filter(|x| { - x.compressed_account.owner == payer_pubkey - && x.merkle_context.queue_pubkey == output_queue_pubkey + x.compressed_account.owner.to_bytes() == payer_pubkey.to_bytes() + && x.merkle_context.queue_pubkey.to_bytes() == output_queue_pubkey.to_bytes() }) .last() .unwrap() @@ -2243,8 +2249,8 @@ async fn batch_invoke_test() { .compressed_accounts .iter() .filter(|x| { - x.compressed_account.owner == payer_pubkey - && x.merkle_context.queue_pubkey == output_queue_pubkey + x.compressed_account.owner.to_bytes() == payer_pubkey.to_bytes() + && x.merkle_context.queue_pubkey.to_bytes() == output_queue_pubkey.to_bytes() }) .last() .unwrap() @@ -2276,8 +2282,8 @@ async fn batch_invoke_test() { .compressed_accounts .iter() .filter(|x| { - x.compressed_account.owner == payer_pubkey - && x.merkle_context.queue_pubkey == output_queue_pubkey + x.compressed_account.owner.to_bytes() == payer_pubkey.to_bytes() + && x.merkle_context.queue_pubkey.to_bytes() == output_queue_pubkey.to_bytes() }) .next_back() .unwrap() @@ -2309,8 +2315,8 @@ async fn batch_invoke_test() { .compressed_accounts .iter() .filter(|x| { - x.compressed_account.owner == payer_pubkey - && x.merkle_context.queue_pubkey == output_queue_pubkey + x.compressed_account.owner.to_bytes() == payer_pubkey.to_bytes() + && x.merkle_context.queue_pubkey.to_bytes() == output_queue_pubkey.to_bytes() }) .next_back() .unwrap() @@ -2397,8 +2403,8 @@ async fn batch_invoke_test() { .compressed_accounts .iter() .filter(|x| { - x.compressed_account.owner == payer_pubkey - && x.merkle_context.queue_pubkey != output_queue_pubkey + x.compressed_account.owner.to_bytes() == payer_pubkey.to_bytes() + && x.merkle_context.queue_pubkey.to_bytes() != output_queue_pubkey.to_bytes() }) .next_back() .unwrap() @@ -2412,7 +2418,7 @@ async fn batch_invoke_test() { &input_compressed_accounts, &output_compressed_accounts, &[merkle_context], - &[merkle_context.merkle_tree_pubkey], + &[merkle_context.merkle_tree_pubkey.into()], &[None], &Vec::new(), None, @@ -2460,7 +2466,7 @@ pub async fn double_spend_compressed_account( let input_compressed_accounts = vec![compressed_account_with_context_1.compressed_account]; let output_compressed_accounts = vec![CompressedAccount { lamports: 0, - owner: payer.pubkey(), + owner: payer.pubkey().into(), data: None, address: None, }]; @@ -2471,7 +2477,7 @@ pub async fn double_spend_compressed_account( &input_compressed_accounts, &output_compressed_accounts, &[merkle_context_1], - &[merkle_context_1.queue_pubkey], + &[merkle_context_1.queue_pubkey.into()], &proof_rpc_result .value .accounts @@ -2495,7 +2501,7 @@ pub async fn double_spend_compressed_account( &input_compressed_accounts, &output_compressed_accounts, &[merkle_context], - &[merkle_context.queue_pubkey], + &[merkle_context.queue_pubkey.into()], &[None], &Vec::new(), None, @@ -2574,7 +2580,7 @@ pub async fn create_output_accounts( let output_compressed_accounts = vec![ CompressedAccount { lamports: 0, - owner: payer.pubkey(), + owner: payer.pubkey().into(), data: None, address: None, }; diff --git a/program-tests/utils/src/assert_compressed_tx.rs b/program-tests/utils/src/assert_compressed_tx.rs index f5e7865a81..6f0595d1ca 100644 --- a/program-tests/utils/src/assert_compressed_tx.rs +++ b/program-tests/utils/src/assert_compressed_tx.rs @@ -232,10 +232,9 @@ pub fn assert_created_compressed_accounts( && x.owner == output_account.compressed_account.owner && x.data == output_account.compressed_account.data && x.address == output_account.compressed_account.address),); - assert!(output_merkle_tree_pubkeys - .iter() - .any(|x| *x == output_account.merkle_context.merkle_tree_pubkey - || *x == output_account.merkle_context.queue_pubkey),); + assert!(output_merkle_tree_pubkeys.iter().any(|x| *x + == output_account.merkle_context.merkle_tree_pubkey.into() + || *x == output_account.merkle_context.queue_pubkey.into()),); } } @@ -258,9 +257,9 @@ pub fn assert_public_transaction_event( for account in event.output_compressed_accounts.iter() { assert!( output_merkle_tree_accounts.iter().any(|x| x.merkle_tree - == event.pubkey_array[account.merkle_tree_index as usize] + == event.pubkey_array[account.merkle_tree_index as usize].into() // handle output queue - || x.nullifier_queue == event.pubkey_array[account.merkle_tree_index as usize]), + || x.nullifier_queue == event.pubkey_array[account.merkle_tree_index as usize].into()), "assert_public_transaction_event: output state merkle tree account index mismatch" ); } @@ -349,8 +348,8 @@ pub async fn assert_merkle_tree_after_tx( ); assert_eq!( - merkle_tree_account.metadata.next_merkle_tree, - Pubkey::default().into() + merkle_tree_account.metadata.next_merkle_tree.to_bytes(), + [0u8; 32] ); assert_eq!( - merkle_tree_account.metadata.access_metadata.owner, - (*payer_pubkey).into() + merkle_tree_account + .metadata + .access_metadata + .owner + .to_bytes(), + (*payer_pubkey).to_bytes() ); assert_eq!( - merkle_tree_account.metadata.access_metadata.program_owner, - Pubkey::default().into() + merkle_tree_account + .metadata + .access_metadata + .program_owner + .to_bytes(), + [0u8; 32] ); assert_eq!( - merkle_tree_account.metadata.associated_queue, - (*queue_pubkey).into() + merkle_tree_account.metadata.associated_queue.to_bytes(), + (*queue_pubkey).to_bytes() ); let merkle_tree = get_concurrent_merkle_tree::( diff --git a/program-tests/utils/src/assert_rollover.rs b/program-tests/utils/src/assert_rollover.rs index 0fb86d0383..7fa91145bb 100644 --- a/program-tests/utils/src/assert_rollover.rs +++ b/program-tests/utils/src/assert_rollover.rs @@ -90,12 +90,12 @@ pub fn assert_rolledover_merkle_trees_metadata( ); assert_eq!( - new_merkle_tree_metadata.associated_queue, - (*new_queue_pubkey).into() + new_merkle_tree_metadata.associated_queue.to_bytes(), + (*new_queue_pubkey).to_bytes() ); assert_eq!( - new_merkle_tree_metadata.next_merkle_tree, - Pubkey::default().into() + new_merkle_tree_metadata.next_merkle_tree.to_bytes(), + Pubkey::default().to_bytes() ); } @@ -142,10 +142,13 @@ pub fn assert_rolledover_queues_metadata( new_queue_metadata.access_metadata.program_owner ); assert_eq!( - new_queue_metadata.associated_merkle_tree, - (*new_merkle_tree_pubkey).into() + new_queue_metadata.associated_merkle_tree.to_bytes(), + (*new_merkle_tree_pubkey).to_bytes() + ); + assert_eq!( + old_queue_metadata.next_queue, + light_compressed_account::Pubkey::from(*new_queue_pubkey) ); - assert_eq!(old_queue_metadata.next_queue, (*new_queue_pubkey).into()); assert_eq!( old_merkle_tree_lamports, new_merkle_tree_lamports + new_queue_lamports + old_merkle_tree_lamports diff --git a/program-tests/utils/src/assert_token_tx.rs b/program-tests/utils/src/assert_token_tx.rs index 4654d48c34..0934d869bd 100644 --- a/program-tests/utils/src/assert_token_tx.rs +++ b/program-tests/utils/src/assert_token_tx.rs @@ -166,8 +166,9 @@ pub fn assert_compressed_token_accounts( ); assert_eq!( - merkle_tree_account.metadata.next_merkle_tree, - Pubkey::default().into() + merkle_tree_account.metadata.next_merkle_tree.to_bytes(), + [0u8; 32] ); let expected_access_meta_data = AccessMetadata { owner: (*owner_pubkey).into(), @@ -193,8 +193,8 @@ pub async fn assert_address_merkle_tree_initialized( expected_access_meta_data ); assert_eq!( - merkle_tree_account.metadata.associated_queue, - (*queue_pubkey).into() + merkle_tree_account.metadata.associated_queue.to_bytes(), + (*queue_pubkey).to_bytes() ); let merkle_tree = get_indexed_merkle_tree::< diff --git a/program-tests/utils/src/create_address_test_program_sdk.rs b/program-tests/utils/src/create_address_test_program_sdk.rs index c175605788..a7282f6b76 100644 --- a/program-tests/utils/src/create_address_test_program_sdk.rs +++ b/program-tests/utils/src/create_address_test_program_sdk.rs @@ -10,10 +10,11 @@ use light_compressed_account::{ address::{derive_address, pack_new_address_params}, instruction_data::{compressed_proof::CompressedProof, data::NewAddressParams}, }; -use light_compressed_token::process_transfer::transfer_sdk::to_account_metas; use light_program_test::{accounts::test_accounts::TestAccounts, indexer::TestIndexerExtensions}; use solana_sdk::{instruction::Instruction, pubkey::Pubkey, signature::Keypair, signer::Signer}; +use crate::e2e_test_env::to_account_metas_light; + #[derive(Debug, Clone)] pub struct CreateCompressedPdaInstructionInputs<'a> { pub data: [u8; 31], @@ -29,9 +30,9 @@ pub fn create_pda_instruction(input_params: CreateCompressedPdaInstructionInputs &[CPI_AUTHORITY_PDA_SEED], &create_address_test_program::id(), ); - let mut remaining_accounts = HashMap::new(); + let mut remaining_accounts = HashMap::::new(); remaining_accounts.insert( - *input_params.output_compressed_account_merkle_tree_pubkey, + (*input_params.output_compressed_account_merkle_tree_pubkey).into(), 0, ); let new_address_params = @@ -58,7 +59,7 @@ pub fn create_pda_instruction(input_params: CreateCompressedPdaInstructionInputs cpi_signer, system_program: solana_sdk::system_program::id(), }; - let remaining_accounts = to_account_metas(remaining_accounts); + let remaining_accounts = to_account_metas_light(remaining_accounts); Instruction { program_id: create_address_test_program::ID, @@ -116,8 +117,8 @@ pub async fn perform_create_pda_with_event< let new_address_params = NewAddressParams { seed, - address_merkle_tree_pubkey: env.v2_address_trees[0], - address_queue_pubkey: env.v2_address_trees[0], + address_merkle_tree_pubkey: env.v2_address_trees[0].into(), + address_queue_pubkey: env.v2_address_trees[0].into(), address_merkle_tree_root_index: rpc_result.value.addresses[0].root_index, }; let create_ix_inputs = CreateCompressedPdaInstructionInputs { diff --git a/program-tests/utils/src/e2e_test_env.rs b/program-tests/utils/src/e2e_test_env.rs index d05853b7f2..a853e2afbf 100644 --- a/program-tests/utils/src/e2e_test_env.rs +++ b/program-tests/utils/src/e2e_test_env.rs @@ -70,7 +70,7 @@ use account_compression::{ AddressMerkleTreeConfig, AddressQueueConfig, NullifierQueueConfig, StateMerkleTreeConfig, SAFETY_MARGIN, }; -use anchor_lang::AnchorSerialize; +use anchor_lang::{prelude::AccountMeta, AnchorSerialize}; use create_address_test_program::create_invoke_cpi_instruction; use forester_utils::{ account_zero_copy::AccountZeroCopy, @@ -96,12 +96,17 @@ use light_client::{ // refactor all tests to work with that so that we can run all tests with a test validator and concurrency use light_compressed_account::{ address::{ - derive_address, pack_new_address_params, pack_read_only_accounts, - pack_read_only_address_params, + derive_address, + // pack_new_address_params, pack_read_only_accounts, + // pack_read_only_address_params, }, compressed_account::{ - pack_compressed_accounts, pack_output_compressed_accounts, CompressedAccount, - CompressedAccountData, CompressedAccountWithMerkleContext, ReadOnlyCompressedAccount, + // pack_compressed_accounts, pack_output_compressed_accounts, + CompressedAccount, + CompressedAccountData, + CompressedAccountWithMerkleContext, + + ReadOnlyCompressedAccount, }, instruction_data::{ compressed_proof::CompressedProof, @@ -177,6 +182,7 @@ use crate::{ assert_finalized_epoch_registration, assert_report_work, fetch_epoch_and_forester_pdas, }, create_address_merkle_tree_and_queue_account_with_assert, + pack::*, spl::{ approve_test, burn_test, compress_test, compressed_transfer_test, create_mint_helper, create_token_account, decompress_test, freeze_test, mint_tokens_helper, revoke_test, @@ -2553,8 +2559,8 @@ where .enumerate() .map(|(index, seed)| { NewAddressParams { - address_merkle_tree_pubkey: address_merkle_tree[index], - address_queue_pubkey: queues[index], + address_merkle_tree_pubkey: address_merkle_tree[index].into(), + address_queue_pubkey: queues[index].into(), seed: *seed, address_merkle_tree_root_index: 0, // set after proof generation } @@ -2583,7 +2589,7 @@ where ); proof_input_addresses.push((address, address_merkle_tree[index])); ReadOnlyAddress { - address_merkle_tree_pubkey: address_merkle_tree[index], + address_merkle_tree_pubkey: address_merkle_tree[index].into(), address, address_merkle_tree_root_index: 0, // set after proof generation } @@ -2615,7 +2621,7 @@ where None }; let account = CompressedAccount { - owner: create_address_test_program::ID, + owner: create_address_test_program::ID.into(), data: Some(CompressedAccountData { data: rnd_data.to_vec(), discriminator: [1; 8], @@ -2867,7 +2873,10 @@ where .indexer .get_state_merkle_trees() .iter() - .find(|x| x.accounts.merkle_tree == first_account.merkle_context.merkle_tree_pubkey) + .find(|x| { + x.accounts.merkle_tree.to_bytes() + == first_account.merkle_context.merkle_tree_pubkey.to_bytes() + }) .unwrap() .tree_type; let input_compressed_accounts_with_same_version = input_compressed_accounts @@ -2876,7 +2885,10 @@ where self.indexer .get_state_merkle_trees() .iter() - .find(|y| y.accounts.merkle_tree == x.merkle_context.merkle_tree_pubkey) + .find(|y| { + y.accounts.merkle_tree.to_bytes() + == x.merkle_context.merkle_tree_pubkey.to_bytes() + }) .unwrap() .tree_type == first_mt @@ -3140,7 +3152,13 @@ where a.compressed_account .merkle_context .merkle_tree_pubkey - .cmp(&b.compressed_account.merkle_context.merkle_tree_pubkey) + .to_bytes() + .cmp( + &b.compressed_account + .merkle_context + .merkle_tree_pubkey + .to_bytes(), + ) }); (mint, get_random_subset_of_token_accounts) } @@ -3363,3 +3381,28 @@ impl GeneralActionConfig { } } } + +pub fn to_account_metas_light( + remaining_accounts: HashMap, +) -> Vec { + let mut remaining_accounts = remaining_accounts + .iter() + .map(|(k, i)| { + ( + AccountMeta { + pubkey: (*k).into(), + is_signer: false, + is_writable: true, + }, + *i, + ) + }) + .collect::>(); + // hash maps are not sorted so we need to sort manually and collect into a vector again + remaining_accounts.sort_by(|a, b| a.1.cmp(&b.1)); + let remaining_accounts = remaining_accounts + .iter() + .map(|(k, _)| k.clone()) + .collect::>(); + remaining_accounts +} diff --git a/program-tests/utils/src/lib.rs b/program-tests/utils/src/lib.rs index e2e44820ff..889b6556a7 100644 --- a/program-tests/utils/src/lib.rs +++ b/program-tests/utils/src/lib.rs @@ -29,6 +29,7 @@ pub mod conversions; pub mod create_address_test_program_sdk; pub mod e2e_test_env; pub mod mock_batched_forester; +pub mod pack; pub mod registered_program_accounts_v1; pub mod setup_accounts; #[allow(unused)] diff --git a/program-tests/utils/src/pack.rs b/program-tests/utils/src/pack.rs new file mode 100644 index 0000000000..a86b4495fe --- /dev/null +++ b/program-tests/utils/src/pack.rs @@ -0,0 +1,228 @@ +use std::collections::HashMap; + +use light_compressed_account::{ + compressed_account::{ + CompressedAccount, CompressedAccountWithMerkleContext, MerkleContext, + PackedCompressedAccountWithMerkleContext, PackedMerkleContext, + PackedReadOnlyCompressedAccount, ReadOnlyCompressedAccount, + }, + instruction_data::data::{NewAddressParams, ReadOnlyAddress}, +}; +use light_sdk::{ + NewAddressParamsAssigned, NewAddressParamsAssignedPacked, NewAddressParamsPacked, + OutputCompressedAccountWithPackedContext, PackedReadOnlyAddress, +}; +use solana_sdk::pubkey::Pubkey; + +pub fn add_and_get_remaining_account_indices( + pubkeys: &[Pubkey], + remaining_accounts: &mut HashMap, +) -> Vec { + let mut vec = Vec::new(); + let mut next_index: usize = remaining_accounts.len(); + for pubkey in pubkeys.iter() { + match remaining_accounts.get(pubkey) { + Some(_) => {} + None => { + remaining_accounts.insert(*pubkey, next_index); + next_index += 1; + } + }; + vec.push(*remaining_accounts.get(pubkey).unwrap() as u8); + } + vec +} + +pub fn pack_merkle_context( + merkle_context: &[MerkleContext], + remaining_accounts: &mut HashMap, +) -> Vec { + merkle_context + .iter() + .map(|merkle_context| PackedMerkleContext { + leaf_index: merkle_context.leaf_index, + merkle_tree_pubkey_index: pack_account( + merkle_context.merkle_tree_pubkey.into(), + remaining_accounts, + ), + queue_pubkey_index: pack_account( + merkle_context.queue_pubkey.into(), + remaining_accounts, + ), + prove_by_index: merkle_context.prove_by_index, + }) + .collect::>() +} + +pub fn pack_account(pubkey: Pubkey, remaining_accounts: &mut HashMap) -> u8 { + match remaining_accounts.get(&pubkey) { + Some(index) => *index as u8, + None => { + let next_index = remaining_accounts.len(); + remaining_accounts.insert(pubkey, next_index); + next_index as u8 + } + } +} + +pub fn pack_read_only_accounts( + accounts: &[ReadOnlyCompressedAccount], + remaining_accounts: &mut HashMap, +) -> Vec { + accounts + .iter() + .map(|x| PackedReadOnlyCompressedAccount { + account_hash: x.account_hash, + merkle_context: pack_merkle_context(&[x.merkle_context], remaining_accounts)[0], + root_index: x.root_index, + }) + .collect::>() +} + +pub fn pack_new_address_params( + new_address_params: &[NewAddressParams], + remaining_accounts: &mut HashMap, +) -> Vec { + let mut new_address_params_packed = new_address_params + .iter() + .map(|x| NewAddressParamsPacked { + seed: x.seed, + address_merkle_tree_root_index: x.address_merkle_tree_root_index, + address_merkle_tree_account_index: 0, // will be assigned later + address_queue_account_index: 0, // will be assigned later + }) + .collect::>(); + let mut next_index: usize = remaining_accounts.len(); + for (i, params) in new_address_params.iter().enumerate() { + match remaining_accounts.get(¶ms.address_merkle_tree_pubkey.into()) { + Some(_) => {} + None => { + remaining_accounts.insert(params.address_merkle_tree_pubkey.into(), next_index); + next_index += 1; + } + }; + new_address_params_packed[i].address_merkle_tree_account_index = *remaining_accounts + .get(¶ms.address_merkle_tree_pubkey.into()) + .unwrap() + as u8; + } + + for (i, params) in new_address_params.iter().enumerate() { + match remaining_accounts.get(¶ms.address_queue_pubkey.into()) { + Some(_) => {} + None => { + remaining_accounts.insert(params.address_queue_pubkey.into(), next_index); + next_index += 1; + } + }; + new_address_params_packed[i].address_queue_account_index = *remaining_accounts + .get(¶ms.address_queue_pubkey.into()) + .unwrap() as u8; + } + new_address_params_packed +} + +pub fn pack_read_only_address_params( + new_address_params: &[ReadOnlyAddress], + remaining_accounts: &mut HashMap, +) -> Vec { + new_address_params + .iter() + .map(|x| PackedReadOnlyAddress { + address: x.address, + address_merkle_tree_root_index: x.address_merkle_tree_root_index, + address_merkle_tree_account_index: pack_account( + x.address_merkle_tree_pubkey.into(), + remaining_accounts, + ), + }) + .collect::>() +} + +pub fn pack_output_compressed_accounts( + compressed_accounts: &[CompressedAccount], + merkle_trees: &[Pubkey], + remaining_accounts: &mut HashMap, +) -> Vec { + compressed_accounts + .iter() + .zip(merkle_trees.iter()) + .map(|(x, tree)| OutputCompressedAccountWithPackedContext { + compressed_account: x.clone(), + merkle_tree_index: pack_account(*tree, remaining_accounts), + }) + .collect::>() +} + +pub fn pack_compressed_accounts( + compressed_accounts: &[CompressedAccountWithMerkleContext], + root_indices: &[Option], + remaining_accounts: &mut HashMap, +) -> Vec { + compressed_accounts + .iter() + .zip(root_indices.iter()) + .map(|(x, root_index)| { + let mut merkle_context = x.merkle_context; + let root_index = if let Some(root) = root_index { + *root + } else { + merkle_context.prove_by_index = true; + 0 + }; + + PackedCompressedAccountWithMerkleContext { + compressed_account: x.compressed_account.clone(), + merkle_context: pack_merkle_context(&[merkle_context], remaining_accounts)[0], + root_index, + read_only: false, + } + }) + .collect::>() +} +pub fn pack_new_address_params_assigned( + new_address_params: &[NewAddressParamsAssigned], + remaining_accounts: &mut HashMap, +) -> Vec { + let mut vec = Vec::new(); + for new_address_param in new_address_params.iter() { + let address_merkle_tree_account_index = pack_pubkey_usize( + &new_address_param.address_merkle_tree_pubkey.into(), + remaining_accounts, + ); + let address_queue_account_index = pack_pubkey_usize( + &new_address_param.address_queue_pubkey.into(), + remaining_accounts, + ); + vec.push(NewAddressParamsAssignedPacked { + seed: new_address_param.seed, + address_queue_account_index, + address_merkle_tree_root_index: new_address_param.address_merkle_tree_root_index, + address_merkle_tree_account_index, + assigned_to_account: new_address_param.assigned_account_index.is_some(), + assigned_account_index: new_address_param.assigned_account_index.unwrap_or_default(), + }); + } + + vec +} +pub fn pack_pubkey_usize(pubkey: &Pubkey, hash_set: &mut HashMap) -> u8 { + match hash_set.get(pubkey) { + Some(index) => (*index) as u8, + None => { + let index = hash_set.len(); + hash_set.insert(*pubkey, index); + index as u8 + } + } +} +pub fn pack_pubkey(pubkey: &Pubkey, hash_set: &mut HashMap) -> u8 { + match hash_set.get(pubkey) { + Some(index) => *index, + None => { + let index = hash_set.len() as u8; + hash_set.insert(*pubkey, index); + index + } + } +} diff --git a/program-tests/utils/src/spl.rs b/program-tests/utils/src/spl.rs index d144f2b8f0..ceb4ee04d1 100644 --- a/program-tests/utils/src/spl.rs +++ b/program-tests/utils/src/spl.rs @@ -594,7 +594,7 @@ pub async fn compressed_transfer_22_test< } let input_merkle_tree_pubkeys: Vec = input_merkle_tree_context .iter() - .map(|x| x.merkle_tree_pubkey) + .map(|x| x.merkle_tree_pubkey.into()) .collect(); println!("{:?}", input_compressed_accounts); println!( @@ -769,7 +769,12 @@ pub async fn decompress_test>(); let input_merkle_tree_pubkeys = input_compressed_accounts .iter() - .map(|x| x.compressed_account.merkle_context.merkle_tree_pubkey) + .map(|x| { + x.compressed_account + .merkle_context + .merkle_tree_pubkey + .into() + }) .collect::>(); let proof_rpc_result = rpc .get_validity_proof(input_compressed_account_hashes.clone(), vec![], None) @@ -1141,7 +1146,12 @@ pub async fn approve_test>(); let input_merkle_tree_pubkeys = input_compressed_accounts .iter() - .map(|x| x.compressed_account.merkle_context.merkle_tree_pubkey) + .map(|x| { + x.compressed_account + .merkle_context + .merkle_tree_pubkey + .into() + }) .collect::>(); println!( "input_compressed_account_hashes: {:?}", @@ -1223,7 +1233,7 @@ pub async fn approve_test(rpc, output_merkle_tree_accounts.as_slice()).await; let input_merkle_tree_accounts = - test_indexer.get_state_merkle_tree_accounts(&input_merkle_tree_pubkeys); + test_indexer.get_state_merkle_tree_accounts(input_merkle_tree_pubkeys.as_slice()); let input_merkle_tree_test_snapshots = get_merkle_tree_snapshots::(rpc, input_merkle_tree_accounts.as_slice()).await; let context_payer = rpc.get_payer().insecure_clone(); @@ -1310,7 +1320,12 @@ pub async fn revoke_test>(); let input_merkle_tree_pubkeys = input_compressed_accounts .iter() - .map(|x| x.compressed_account.merkle_context.merkle_tree_pubkey) + .map(|x| { + x.compressed_account + .merkle_context + .merkle_tree_pubkey + .into() + }) .collect::>(); let proof_rpc_result = rpc .get_validity_proof(input_compressed_account_hashes.clone(), vec![], None) @@ -1471,7 +1486,12 @@ pub async fn freeze_or_thaw_test< .collect::>(); let input_merkle_tree_pubkeys = input_compressed_accounts .iter() - .map(|x| x.compressed_account.merkle_context.merkle_tree_pubkey) + .map(|x| { + x.compressed_account + .merkle_context + .merkle_tree_pubkey + .into() + }) .collect::>(); let proof_rpc_result = rpc .get_validity_proof(input_compressed_account_hashes.clone(), vec![], None) @@ -1757,7 +1777,12 @@ pub async fn create_burn_test_instruction>(); let input_merkle_tree_pubkeys = input_compressed_accounts .iter() - .map(|x| x.compressed_account.merkle_context.merkle_tree_pubkey) + .map(|x| { + x.compressed_account + .merkle_context + .merkle_tree_pubkey + .into() + }) .collect::>(); let proof_rpc_result = rpc .get_validity_proof(input_compressed_account_hashes.clone(), vec![], None) diff --git a/program-tests/utils/src/system_program.rs b/program-tests/utils/src/system_program.rs index 50b3fd3bdd..40755d54dd 100644 --- a/program-tests/utils/src/system_program.rs +++ b/program-tests/utils/src/system_program.rs @@ -56,15 +56,15 @@ pub async fn create_addresses_test< let mut derived_addresses = Vec::new(); for (i, address_seed) in address_seeds.iter().enumerate() { let derived_address = - derive_address_legacy(&address_merkle_tree_pubkeys[i], address_seed).unwrap(); + derive_address_legacy(&address_merkle_tree_pubkeys[i].into(), address_seed).unwrap(); derived_addresses.push(derived_address); } let mut address_params = Vec::new(); for (i, seed) in address_seeds.iter().enumerate() { let new_address_params = NewAddressParams { - address_queue_pubkey: address_merkle_tree_queue_pubkeys[i], - address_merkle_tree_pubkey: address_merkle_tree_pubkeys[i], + address_queue_pubkey: address_merkle_tree_queue_pubkeys[i].into(), + address_merkle_tree_pubkey: address_merkle_tree_pubkeys[i].into(), seed: *seed, address_merkle_tree_root_index: 0, }; @@ -75,7 +75,7 @@ pub async fn create_addresses_test< for address in derived_addresses.iter() { output_compressed_accounts.push(CompressedAccount { lamports: 0, - owner: rpc.get_payer().pubkey(), + owner: rpc.get_payer().pubkey().into(), data: None, address: Some(*address), }); @@ -85,11 +85,12 @@ pub async fn create_addresses_test< for compressed_account in input_compressed_accounts.iter() { output_compressed_accounts.push(CompressedAccount { lamports: 0, - owner: rpc.get_payer().pubkey(), + owner: rpc.get_payer().pubkey().into(), data: None, address: compressed_account.compressed_account.address, }); - output_merkle_tree_pubkeys.push(compressed_account.merkle_context.merkle_tree_pubkey); + output_merkle_tree_pubkeys + .push(compressed_account.merkle_context.merkle_tree_pubkey.into()); } } @@ -138,7 +139,7 @@ pub async fn compress_sol_test>(); let state_input_merkle_trees = if state_input_merkle_trees.is_empty() { None @@ -345,7 +347,7 @@ pub async fn compressed_transaction_test< .enumerate() .map(|(i, x)| AddressWithTree { address: inputs.created_addresses.as_ref().unwrap()[i], - tree: x.address_merkle_tree_pubkey, + tree: x.address_merkle_tree_pubkey.into(), }) .collect::>(); let proof_rpc_res = inputs @@ -467,7 +469,7 @@ pub async fn compressed_transaction_test< address_queue_pubkeys: &inputs .new_address_params .iter() - .map(|x| x.address_queue_pubkey) + .map(|x| x.address_queue_pubkey.into()) .collect::>(), }; assert_compressed_transaction(input).await; @@ -564,10 +566,10 @@ pub fn create_invoke_instruction_data_and_remaining_accounts( }) .collect::>(); for (i, context) in merkle_context.iter().enumerate() { - match remaining_accounts.get(&context.merkle_tree_pubkey) { + match remaining_accounts.get(&context.merkle_tree_pubkey.into()) { Some(_) => {} None => { - remaining_accounts.insert(context.merkle_tree_pubkey, index); + remaining_accounts.insert(context.merkle_tree_pubkey.into(), index); index += 1; } }; @@ -581,7 +583,7 @@ pub fn create_invoke_instruction_data_and_remaining_accounts( compressed_account: input_compressed_accounts[i].clone(), merkle_context: PackedMerkleContext { merkle_tree_pubkey_index: *remaining_accounts - .get(&context.merkle_tree_pubkey) + .get(&context.merkle_tree_pubkey.into()) .unwrap() as u8, queue_pubkey_index: 0, leaf_index: context.leaf_index, @@ -593,16 +595,18 @@ pub fn create_invoke_instruction_data_and_remaining_accounts( } for (i, context) in merkle_context.iter().enumerate() { - match remaining_accounts.get(&context.queue_pubkey) { + match remaining_accounts.get(&context.queue_pubkey.into()) { Some(_) => {} None => { - remaining_accounts.insert(context.queue_pubkey, index); + remaining_accounts.insert(context.queue_pubkey.into(), index); index += 1; } }; _input_compressed_accounts[i] .merkle_context - .queue_pubkey_index = *remaining_accounts.get(&context.queue_pubkey).unwrap() as u8; + .queue_pubkey_index = *remaining_accounts + .get(&context.queue_pubkey.into()) + .unwrap() as u8; } let mut output_compressed_accounts_with_context: Vec = @@ -627,31 +631,35 @@ pub fn create_invoke_instruction_data_and_remaining_accounts( } for (i, params) in new_address_params.iter().enumerate() { - match remaining_accounts.get(¶ms.address_merkle_tree_pubkey) { + match remaining_accounts.get(¶ms.address_merkle_tree_pubkey.into()) { Some(_) => {} None => { - remaining_accounts.insert(params.address_merkle_tree_pubkey, index); + remaining_accounts.insert(params.address_merkle_tree_pubkey.into(), index); index += 1; } }; new_address_params_packed[i].address_merkle_tree_account_index = *remaining_accounts - .get(¶ms.address_merkle_tree_pubkey) + .get(¶ms.address_merkle_tree_pubkey.into()) .unwrap() as u8; } for (i, params) in new_address_params.iter().enumerate() { - match remaining_accounts.get(¶ms.address_queue_pubkey) { + match remaining_accounts.get(¶ms.address_queue_pubkey.into()) { Some(_) => {} None => { - remaining_accounts.insert(params.address_queue_pubkey, index); + remaining_accounts.insert(params.address_queue_pubkey.into(), index); index += 1; } }; new_address_params_packed[i].address_queue_account_index = *remaining_accounts - .get(¶ms.address_queue_pubkey) + .get(¶ms.address_queue_pubkey.into()) .unwrap() as u8; } + // let mut remaining_accounts = remaining_accounts + // .iter() + // .map(|(k, i)| (AccountMeta::new(*k, false), *i)) + // .collect::>(); let mut remaining_accounts = remaining_accounts .iter() .map(|(k, i)| (AccountMeta::new(*k, false), *i)) @@ -689,13 +697,13 @@ mod test { let input_compressed_accounts = vec![ CompressedAccount { lamports: 100, - owner: payer, + owner: payer.into(), address: None, data: None, }, CompressedAccount { lamports: 100, - owner: payer, + owner: payer.into(), address: None, data: None, }, @@ -703,13 +711,13 @@ mod test { let output_compressed_accounts = vec![ CompressedAccount { lamports: 50, - owner: payer, + owner: payer.into(), address: None, data: None, }, CompressedAccount { lamports: 150, - owner: recipient, + owner: recipient.into(), address: None, data: None, }, @@ -721,15 +729,15 @@ mod test { let nullifier_array_pubkey = Keypair::new().pubkey(); let input_merkle_context = vec![ MerkleContext { - merkle_tree_pubkey, - queue_pubkey: nullifier_array_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), + queue_pubkey: nullifier_array_pubkey.into(), leaf_index: 0, prove_by_index: false, tree_type: light_compressed_account::TreeType::StateV1, }, MerkleContext { - merkle_tree_pubkey, - queue_pubkey: nullifier_array_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), + queue_pubkey: nullifier_array_pubkey.into(), leaf_index: 1, prove_by_index: false, tree_type: light_compressed_account::TreeType::StateV1, diff --git a/programs/account-compression/Cargo.toml b/programs/account-compression/Cargo.toml index 36bbe1021c..222110f664 100644 --- a/programs/account-compression/Cargo.toml +++ b/programs/account-compression/Cargo.toml @@ -41,6 +41,7 @@ light-batched-merkle-tree = { workspace = true, features = ["solana"] } light-merkle-tree-metadata = { workspace = true, features = ["anchor"] } light-zero-copy = { workspace = true } zerocopy = { workspace = true, features = ["derive"] } +thiserror = { workspace = true } [target.'cfg(not(target_os = "solana"))'.dependencies] solana-sdk = { workspace = true } diff --git a/programs/account-compression/src/errors.rs b/programs/account-compression/src/errors.rs index c39071adf0..2af9bf81e9 100644 --- a/programs/account-compression/src/errors.rs +++ b/programs/account-compression/src/errors.rs @@ -1,5 +1,11 @@ use anchor_lang::prelude::*; +use light_batched_merkle_tree::errors::BatchedMerkleTreeError; +use light_concurrent_merkle_tree::errors::ConcurrentMerkleTreeError; +use light_indexed_merkle_tree::errors::IndexedMerkleTreeError; +use light_merkle_tree_metadata::errors::MerkleTreeMetadataError; +// use thiserror::Error; +// #[derive(Error, Debug, Clone, Eq, PartialEq)] #[error_code] pub enum AccountCompressionErrorCode { AddressMerkleTreeAccountDiscriminatorMismatch, @@ -66,4 +72,46 @@ pub enum AccountCompressionErrorCode { UnsupportedHeight, UnsupportedParameters, V1AccountMarkedAsProofByIndex, + #[msg("MerkleTreeMetadataError")] + MerkleTreeMetadataError, + #[msg("BatchedMerkleTreeError")] + BatchedMerkleTreeError, + #[msg("ConcurrentMerkleTreeError")] + ConcurrentMerkleTreeError, + #[msg("IndexedMerkleTreeError")] + IndexedMerkleTreeError, } + +impl From for AccountCompressionErrorCode { + fn from(err: MerkleTreeMetadataError) -> Self { + msg!("Merkle tree metadata error {}", err); + AccountCompressionErrorCode::MerkleTreeMetadataError + } +} + +impl From for AccountCompressionErrorCode { + fn from(err: BatchedMerkleTreeError) -> Self { + msg!("Batched merkle tree error {}", err); + AccountCompressionErrorCode::BatchedMerkleTreeError + } +} + +impl From for AccountCompressionErrorCode { + fn from(err: ConcurrentMerkleTreeError) -> Self { + msg!("Concurrent merkle tree error {}", err); + AccountCompressionErrorCode::ConcurrentMerkleTreeError + } +} + +impl From for AccountCompressionErrorCode { + fn from(err: IndexedMerkleTreeError) -> Self { + msg!("Indexed merkle tree error {}", err); + AccountCompressionErrorCode::IndexedMerkleTreeError + } +} + +// impl From for ProgramError { +// fn from(e: AccountCompressionErrorCode) -> ProgramError { +// ProgramError::Custom(e as u32 + 6000) +// } +// } diff --git a/programs/account-compression/src/instructions/nullify_leaves.rs b/programs/account-compression/src/instructions/nullify_leaves.rs index 4f77fdec01..10cd88471a 100644 --- a/programs/account-compression/src/instructions/nullify_leaves.rs +++ b/programs/account-compression/src/instructions/nullify_leaves.rs @@ -106,7 +106,11 @@ fn insert_nullifier<'a, 'c: 'info, 'info>( { let merkle_tree = ctx.accounts.merkle_tree.load()?; - if merkle_tree.metadata.associated_queue != ctx.accounts.nullifier_queue.key().into() { + if merkle_tree.metadata.associated_queue + != light_compressed_account::Pubkey::new_from_array( + ctx.accounts.nullifier_queue.key().to_bytes(), + ) + { msg!( "Merkle tree and nullifier queue are not associated. Merkle tree associated nullifier queue {:?} != nullifier queue {}", merkle_tree.metadata.associated_queue, diff --git a/programs/account-compression/src/instructions/rollover_address_merkle_tree_and_queue.rs b/programs/account-compression/src/instructions/rollover_address_merkle_tree_and_queue.rs index 42e6842152..115b15c91c 100644 --- a/programs/account-compression/src/instructions/rollover_address_merkle_tree_and_queue.rs +++ b/programs/account-compression/src/instructions/rollover_address_merkle_tree_and_queue.rs @@ -3,6 +3,7 @@ use light_account_checks::checks::check_account_balance_is_rent_exempt; use crate::{ address_merkle_tree_from_bytes_zero_copy, + errors::AccountCompressionErrorCode, processor::{ initialize_address_merkle_tree::process_initialize_address_merkle_tree, initialize_address_queue::process_initialize_address_queue, @@ -85,14 +86,14 @@ pub fn process_rollover_address_merkle_tree_and_queue<'a, 'b, 'c: 'info, 'info>( ctx.accounts.old_queue.key().into(), ctx.accounts.new_address_merkle_tree.key().into(), ) - .map_err(ProgramError::from)?; + .map_err(AccountCompressionErrorCode::from)?; queue_account_loaded .metadata .rollover( ctx.accounts.old_address_merkle_tree.key().into(), ctx.accounts.new_queue.key().into(), ) - .map_err(ProgramError::from)?; + .map_err(AccountCompressionErrorCode::from)?; let merkle_tree_metadata = merkle_tree_account_loaded.metadata; let queue_metadata = queue_account_loaded.metadata; diff --git a/programs/account-compression/src/instructions/rollover_batched_state_merkle_tree.rs b/programs/account-compression/src/instructions/rollover_batched_state_merkle_tree.rs index 275d5db3dd..1b15020cbe 100644 --- a/programs/account-compression/src/instructions/rollover_batched_state_merkle_tree.rs +++ b/programs/account-compression/src/instructions/rollover_batched_state_merkle_tree.rs @@ -6,6 +6,7 @@ use light_batched_merkle_tree::{ use light_merkle_tree_metadata::errors::MerkleTreeMetadataError; use crate::{ + errors::AccountCompressionErrorCode, utils::{ check_signer_is_registered_or_authority::{ check_signer_is_registered_or_authority, GroupAccounts, @@ -91,7 +92,10 @@ pub fn process_rollover_batched_state_merkle_tree<'a, 'b, 'c: 'info, 'info>( rent, )?; if ctx.accounts.old_output_queue.to_account_info().lamports() == 0 { - return Err(ProgramError::from(MerkleTreeMetadataError::NotReadyForRollover).into()); + return Err(AccountCompressionErrorCode::from( + MerkleTreeMetadataError::NotReadyForRollover, + ) + .into()); } Ok(()) } diff --git a/programs/account-compression/src/instructions/rollover_state_merkle_tree_and_queue.rs b/programs/account-compression/src/instructions/rollover_state_merkle_tree_and_queue.rs index 7c8c5bb306..6e98354331 100644 --- a/programs/account-compression/src/instructions/rollover_state_merkle_tree_and_queue.rs +++ b/programs/account-compression/src/instructions/rollover_state_merkle_tree_and_queue.rs @@ -3,6 +3,7 @@ use light_account_checks::checks::check_account_balance_is_rent_exempt; use light_merkle_tree_metadata::errors::MerkleTreeMetadataError; use crate::{ + errors::AccountCompressionErrorCode, processor::{ initialize_concurrent_merkle_tree::process_initialize_state_merkle_tree, initialize_nullifier_queue::process_initialize_nullifier_queue, @@ -91,14 +92,14 @@ pub fn process_rollover_state_merkle_tree_nullifier_queue_pair<'a, 'b, 'c: 'info ctx.accounts.old_nullifier_queue.key().into(), ctx.accounts.new_state_merkle_tree.key().into(), ) - .map_err(ProgramError::from)?; + .map_err(AccountCompressionErrorCode::from)?; queue_account_loaded .metadata .rollover( ctx.accounts.old_state_merkle_tree.key().into(), ctx.accounts.new_nullifier_queue.key().into(), ) - .map_err(ProgramError::from)?; + .map_err(AccountCompressionErrorCode::from)?; let merkle_tree_metadata = merkle_tree_account_loaded.metadata; let queue_metadata = queue_account_loaded.metadata; @@ -190,7 +191,10 @@ pub fn process_rollover_state_merkle_tree_nullifier_queue_pair<'a, 'b, 'c: 'info .lamports() == 0 { - return Err(ProgramError::from(MerkleTreeMetadataError::NotReadyForRollover).into()); + return Err(AccountCompressionErrorCode::from( + MerkleTreeMetadataError::NotReadyForRollover, + ) + .into()); } Ok(()) } diff --git a/programs/account-compression/src/instructions/update_address_merkle_tree.rs b/programs/account-compression/src/instructions/update_address_merkle_tree.rs index 81587eecf2..921b2d18ff 100644 --- a/programs/account-compression/src/instructions/update_address_merkle_tree.rs +++ b/programs/account-compression/src/instructions/update_address_merkle_tree.rs @@ -56,7 +56,9 @@ pub fn process_update_address_merkle_tree<'info>( { let merkle_tree = ctx.accounts.merkle_tree.load_mut()?; - if merkle_tree.metadata.associated_queue != ctx.accounts.queue.key().into() { + if merkle_tree.metadata.associated_queue + != light_compressed_account::Pubkey::from(ctx.accounts.queue.key()) + { msg!( "Merkle tree and nullifier queue are not associated. Merkle tree associated address queue {:?} != provided queue {}", merkle_tree.metadata.associated_queue, diff --git a/programs/account-compression/src/processor/initialize_address_queue.rs b/programs/account-compression/src/processor/initialize_address_queue.rs index 316982b3d8..8db3425427 100644 --- a/programs/account-compression/src/processor/initialize_address_queue.rs +++ b/programs/account-compression/src/processor/initialize_address_queue.rs @@ -6,7 +6,10 @@ use light_merkle_tree_metadata::{ QueueType, }; -use crate::state::{queue_from_bytes_zero_copy_init, QueueAccount}; +use crate::{ + errors::AccountCompressionErrorCode, + state::{queue_from_bytes_zero_copy_init, QueueAccount}, +}; pub fn process_initialize_address_queue<'info>( queue_account_info: &AccountInfo<'info>, @@ -33,9 +36,9 @@ pub fn process_initialize_address_queue<'info>( let queue_rent = queue_account_info.lamports(); let rollover_fee = if let Some(rollover_threshold) = rollover_threshold { let rollover_fee = compute_rollover_fee(rollover_threshold, height, merkle_tree_rent) - .map_err(ProgramError::from)? + .map_err(AccountCompressionErrorCode::from)? + compute_rollover_fee(rollover_threshold, height, queue_rent) - .map_err(ProgramError::from)?; + .map_err(AccountCompressionErrorCode::from)?; check_rollover_fee_sufficient( rollover_fee, queue_rent, @@ -43,7 +46,7 @@ pub fn process_initialize_address_queue<'info>( rollover_threshold, height, ) - .map_err(ProgramError::from)?; + .map_err(AccountCompressionErrorCode::from)?; msg!("address queue rollover_fee: {}", rollover_fee); rollover_fee } else { diff --git a/programs/account-compression/src/processor/initialize_concurrent_merkle_tree.rs b/programs/account-compression/src/processor/initialize_concurrent_merkle_tree.rs index a120826e4b..4823f67b53 100644 --- a/programs/account-compression/src/processor/initialize_concurrent_merkle_tree.rs +++ b/programs/account-compression/src/processor/initialize_concurrent_merkle_tree.rs @@ -5,7 +5,10 @@ use light_merkle_tree_metadata::{ rollover::{check_rollover_fee_sufficient, RolloverMetadata}, }; -use crate::{state::StateMerkleTreeAccount, state_merkle_tree_from_bytes_zero_copy_init}; +use crate::{ + errors::AccountCompressionErrorCode, state::StateMerkleTreeAccount, + state_merkle_tree_from_bytes_zero_copy_init, +}; #[allow(unused_variables)] pub fn process_initialize_state_merkle_tree( @@ -33,9 +36,9 @@ pub fn process_initialize_state_merkle_tree( Some(rollover_threshold) => { let rollover_fee = compute_rollover_fee(rollover_threshold, *height, merkle_tree_rent) - .map_err(ProgramError::from)? + .map_err(AccountCompressionErrorCode::from)? + compute_rollover_fee(rollover_threshold, *height, queue_rent) - .map_err(ProgramError::from)?; + .map_err(AccountCompressionErrorCode::from)?; check_rollover_fee_sufficient( rollover_fee, queue_rent, @@ -43,7 +46,7 @@ pub fn process_initialize_state_merkle_tree( rollover_threshold, *height, ) - .map_err(ProgramError::from)?; + .map_err(AccountCompressionErrorCode::from)?; msg!(" state Merkle tree rollover_fee: {}", rollover_fee); rollover_fee } diff --git a/programs/account-compression/src/processor/insert_addresses.rs b/programs/account-compression/src/processor/insert_addresses.rs index ed4792b700..1531cea04a 100644 --- a/programs/account-compression/src/processor/insert_addresses.rs +++ b/programs/account-compression/src/processor/insert_addresses.rs @@ -136,7 +136,9 @@ fn process_address_v1<'info>( let queue_data = address_queue.try_borrow_data()?; let queue = bytemuck::from_bytes::(&queue_data[8..QueueAccount::LEN]); // 3. Check queue and Merkle tree are associated. - if queue.metadata.associated_merkle_tree != (*merkle_pubkey).into() { + if queue.metadata.associated_merkle_tree + != light_compressed_account::Pubkey::from(*merkle_pubkey) + { msg!( "Queue account {:?} is not associated with Merkle tree {:?}", address_queue.key(), diff --git a/programs/account-compression/src/processor/insert_nullifiers.rs b/programs/account-compression/src/processor/insert_nullifiers.rs index 20ecaaf2ce..18eea8266d 100644 --- a/programs/account-compression/src/processor/insert_nullifiers.rs +++ b/programs/account-compression/src/processor/insert_nullifiers.rs @@ -175,7 +175,9 @@ fn process_nullifiers_v1<'info>( // Discriminator is already checked in try_from_account_infos. let queue = bytemuck::from_bytes::(&queue_data[8..QueueAccount::LEN]); // 3. Check queue and Merkle tree are associated. - if queue.metadata.associated_merkle_tree != (*merkle_pubkey).into() { + if queue.metadata.associated_merkle_tree + != light_compressed_account::Pubkey::from(*merkle_pubkey) + { msg!( "Queue account {:?} is not associated with Merkle tree {:?}", nullifier_queue.key(), diff --git a/programs/compressed-token/src/freeze.rs b/programs/compressed-token/src/freeze.rs index c016f571bb..dc8d160e15 100644 --- a/programs/compressed-token/src/freeze.rs +++ b/programs/compressed-token/src/freeze.rs @@ -175,7 +175,7 @@ fn create_token_output_accounts( BATCHED_DISCRIMINATOR => token_data.hash(), _ => panic!(), } - .map_err(ProgramError::from)?; + .map_err(|_| crate::ErrorCode::HashToFieldError)?; let data: CompressedAccountData = CompressedAccountData { discriminator: TOKEN_COMPRESSED_ACCOUNT_DISCRIMINATOR, @@ -184,7 +184,7 @@ fn create_token_output_accounts( }; output_compressed_accounts[i] = OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: crate::ID, + owner: crate::ID.into(), lamports: token_data_with_context.lamports.unwrap_or(0), data: Some(data), address: None, @@ -522,7 +522,7 @@ pub mod test_freeze { }; expected_compressed_output_accounts.push(OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: crate::ID, + owner: crate::ID.into(), lamports: 0, data: Some(change_data_struct), address: None, diff --git a/programs/compressed-token/src/process_mint.rs b/programs/compressed-token/src/process_mint.rs index c0aa0ecc38..5df075be8a 100644 --- a/programs/compressed-token/src/process_mint.rs +++ b/programs/compressed-token/src/process_mint.rs @@ -570,7 +570,7 @@ mod test { output_compressed_accounts[i] = OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: crate::ID, + owner: crate::ID.into(), lamports, data: Some(data), address: None, @@ -635,7 +635,7 @@ mod test { output_compressed_accounts[i] = OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: crate::ID, + owner: crate::ID.into(), lamports, data: Some(data), address: None, diff --git a/programs/compressed-token/src/process_transfer.rs b/programs/compressed-token/src/process_transfer.rs index 49d45cfbac..b47f2caf61 100644 --- a/programs/compressed-token/src/process_transfer.rs +++ b/programs/compressed-token/src/process_transfer.rs @@ -154,7 +154,7 @@ pub fn process_transfer<'a, 'b, 'c, 'info: 'b + 'c>( new_len, OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: ctx.accounts.authority.key(), + owner: ctx.accounts.authority.key().into(), lamports: change_lamports, data: None, address: None, @@ -272,7 +272,7 @@ pub fn create_output_compressed_accounts( &amount_bytes, &hashed_delegate, ) - .map_err(ProgramError::from)?; + .map_err(|_| crate::ErrorCode::HashToFieldError)?; let data = CompressedAccountData { discriminator: TOKEN_COMPRESSED_ACCOUNT_DISCRIMINATOR, data: token_data_bytes, @@ -287,7 +287,7 @@ pub fn create_output_compressed_accounts( sum_lamports += lamports.into(); output_compressed_accounts[i] = OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: crate::ID, + owner: crate::ID.into(), lamports: lamports.into(), data: Some(data), address: None, @@ -361,7 +361,7 @@ pub fn add_data_hash_to_input_compressed_accounts( &amount_bytes, &hashed_delegate, ) - .map_err(ProgramError::from)? + .map_err(|_| crate::ErrorCode::HashToFieldError)? } else { TokenData::hash_frozen_with_hashed_values( hashed_mint, @@ -369,7 +369,7 @@ pub fn add_data_hash_to_input_compressed_accounts( &amount_bytes, &hashed_delegate, ) - .map_err(ProgramError::from)? + .map_err(|_| crate::ErrorCode::HashToFieldError)? }; } Ok(()) @@ -1000,10 +1000,11 @@ pub mod transfer_sdk { let mut input_token_data_with_context: Vec = Vec::new(); for (i, token_data) in input_token_data.iter().enumerate() { - match remaining_accounts.get(&input_merkle_context[i].merkle_tree_pubkey) { + match remaining_accounts.get(&input_merkle_context[i].merkle_tree_pubkey.into()) { Some(_) => {} None => { - remaining_accounts.insert(input_merkle_context[i].merkle_tree_pubkey, index); + remaining_accounts + .insert(input_merkle_context[i].merkle_tree_pubkey.into(), index); index += 1; } }; @@ -1031,7 +1032,7 @@ pub mod transfer_sdk { delegate_index, merkle_context: PackedMerkleContext { merkle_tree_pubkey_index: *remaining_accounts - .get(&input_merkle_context[i].merkle_tree_pubkey) + .get(&input_merkle_context[i].merkle_tree_pubkey.into()) .unwrap() as u8, queue_pubkey_index: 0, leaf_index: input_merkle_context[i].leaf_index, @@ -1044,17 +1045,17 @@ pub mod transfer_sdk { input_token_data_with_context.push(token_data_with_context); } for (i, _) in input_token_data.iter().enumerate() { - match remaining_accounts.get(&input_merkle_context[i].queue_pubkey) { + match remaining_accounts.get(&input_merkle_context[i].queue_pubkey.into()) { Some(_) => {} None => { - remaining_accounts.insert(input_merkle_context[i].queue_pubkey, index); + remaining_accounts.insert(input_merkle_context[i].queue_pubkey.into(), index); index += 1; } }; input_token_data_with_context[i] .merkle_context .queue_pubkey_index = *remaining_accounts - .get(&input_merkle_context[i].queue_pubkey) + .get(&input_merkle_context[i].queue_pubkey.into()) .unwrap() as u8; } let mut _output_compressed_accounts: Vec = diff --git a/programs/registry/src/lib.rs b/programs/registry/src/lib.rs index 43e0b45449..cb9eace16c 100644 --- a/programs/registry/src/lib.rs +++ b/programs/registry/src/lib.rs @@ -682,7 +682,7 @@ pub fn check_forester( return err!(RegistryError::InvalidNetworkFee); } Ok(()) - } else if metadata.access_metadata.forester == authority.into() { + } else if metadata.access_metadata.forester.to_bytes() == authority.to_bytes() { Ok(()) } else { err!(RegistryError::InvalidSigner) diff --git a/programs/system/Cargo.toml b/programs/system/Cargo.toml index a214c0fbd1..6e81012759 100644 --- a/programs/system/Cargo.toml +++ b/programs/system/Cargo.toml @@ -40,6 +40,7 @@ light-account-checks = { workspace = true, features = ["pinocchio"] } pinocchio = { workspace = true } pinocchio-system = { version = "0.2.3" } solana-pubkey = { workspace = true, features = ["curve25519", "sha2"] } +solana-msg = { workspace = true } [dev-dependencies] rand = { workspace = true } diff --git a/programs/system/src/context.rs b/programs/system/src/context.rs index 7c7bc531ae..3a80014d3f 100644 --- a/programs/system/src/context.rs +++ b/programs/system/src/context.rs @@ -336,7 +336,7 @@ impl<'a, T: InstructionData<'a>> WrappedInstructionData<'a, T> { } let input_account = PackedCompressedAccountWithMerkleContext { compressed_account: CompressedAccount { - owner: input.owner().into(), + owner: *input.owner(), lamports: input.lamports(), address: input.address(), data: input.data(), @@ -356,7 +356,7 @@ impl<'a, T: InstructionData<'a>> WrappedInstructionData<'a, T> { } let output_account = OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: output.owner().into(), + owner: output.owner(), lamports: output.lamports(), address: output.address(), data: output.data(), diff --git a/programs/system/src/errors.rs b/programs/system/src/errors.rs index 735d56ce7d..d22947f456 100644 --- a/programs/system/src/errors.rs +++ b/programs/system/src/errors.rs @@ -1,5 +1,6 @@ // use anchor_lang::error_code; use pinocchio::program_error::ProgramError; +use solana_msg::msg; use thiserror::Error; #[derive(Debug, Error, PartialEq)] @@ -108,6 +109,10 @@ pub enum SystemProgramError { InvalidTreeHeight, #[error("TooManyOutputAccounts")] TooManyOutputAccounts, + #[error("CompressedAccountError")] + CompressedAccountError, + #[error("HasherError")] + HasherError, } impl From for ProgramError { @@ -115,3 +120,17 @@ impl From for ProgramError { ProgramError::Custom(e as u32 + 6000) } } + +impl From for SystemProgramError { + fn from(err: light_compressed_account::CompressedAccountError) -> Self { + msg!("Compressed account error {}", err); + SystemProgramError::CompressedAccountError + } +} + +impl From for SystemProgramError { + fn from(err: light_hasher::HasherError) -> Self { + msg!("Hasher error {}", err); + SystemProgramError::HasherError + } +} diff --git a/programs/system/src/invoke/verify_signer.rs b/programs/system/src/invoke/verify_signer.rs index 98a94015c6..18cd38b1d9 100644 --- a/programs/system/src/invoke/verify_signer.rs +++ b/programs/system/src/invoke/verify_signer.rs @@ -47,7 +47,7 @@ mod test { let compressed_account_with_context = PackedCompressedAccountWithMerkleContext { compressed_account: CompressedAccount { - owner: authority, + owner: authority.into(), ..CompressedAccount::default() }, ..PackedCompressedAccountWithMerkleContext::default() @@ -70,7 +70,7 @@ mod test { let invalid_compressed_account_with_context = PackedCompressedAccountWithMerkleContext { compressed_account: CompressedAccount { - owner: solana_pubkey::Pubkey::new_unique().to_bytes(), + owner: solana_pubkey::Pubkey::new_unique().to_bytes().into(), ..CompressedAccount::default() }, ..PackedCompressedAccountWithMerkleContext::default() diff --git a/programs/system/src/invoke_cpi/process_cpi_context.rs b/programs/system/src/invoke_cpi/process_cpi_context.rs index ecb2a7b8c3..1b12e9b1e5 100644 --- a/programs/system/src/invoke_cpi/process_cpi_context.rs +++ b/programs/system/src/invoke_cpi/process_cpi_context.rs @@ -188,7 +188,9 @@ fn validate_cpi_context_associated_with_merkle_tree<'a, 'info, T: InstructionDat return Err(SystemProgramError::NoInputs.into()); }; - if *cpi_context_account.associated_merkle_tree != first_merkle_tree_pubkey.into() { + if *cpi_context_account.associated_merkle_tree + != light_compressed_account::Pubkey::from(first_merkle_tree_pubkey) + { msg!(format!( "first_merkle_tree_pubkey {:?} != associated_merkle_tree {:?}", first_merkle_tree_pubkey, cpi_context_account.associated_merkle_tree @@ -277,7 +279,7 @@ mod tests { input_compressed_accounts_with_merkle_context: vec![ PackedCompressedAccountWithMerkleContext { compressed_account: CompressedAccount { - owner: solana_pubkey::Pubkey::new_unique().to_bytes(), + owner: solana_pubkey::Pubkey::new_unique().to_bytes().into(), lamports: iter.into(), address: None, data: None, @@ -294,7 +296,7 @@ mod tests { ], output_compressed_accounts: vec![OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: solana_pubkey::Pubkey::new_unique().to_bytes(), + owner: solana_pubkey::Pubkey::new_unique().to_bytes().into(), lamports: iter.into(), address: None, data: None, diff --git a/programs/system/src/processor/create_address_cpi_data.rs b/programs/system/src/processor/create_address_cpi_data.rs index ee4a28a947..3d67702cb3 100644 --- a/programs/system/src/processor/create_address_cpi_data.rs +++ b/programs/system/src/processor/create_address_cpi_data.rs @@ -37,8 +37,8 @@ pub fn derive_new_addresses<'info, 'a, 'b: 'a, const ADDRESS_ASSIGNMENT: bool>( ); ( - derive_address_legacy(&pubkey.into(), &new_address_params.seed()) - .map_err(ProgramError::from)?, + derive_address_legacy(pubkey, &new_address_params.seed()) + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))?, context .get_legacy_merkle_context(new_address_params.address_queue_index()) .unwrap() diff --git a/programs/system/src/processor/create_inputs_cpi_data.rs b/programs/system/src/processor/create_inputs_cpi_data.rs index 824e62952c..ae9bf5db2a 100644 --- a/programs/system/src/processor/create_inputs_cpi_data.rs +++ b/programs/system/src/processor/create_inputs_cpi_data.rs @@ -102,7 +102,7 @@ pub fn create_inputs_cpi_data<'a, 'info, T: InstructionData<'a>>( &merkle_context.leaf_index.into(), is_batched, ) - .map_err(ProgramError::from)?, + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))?, leaf_index: merkle_context.leaf_index, prove_by_index: merkle_context.prove_by_index() as u8, queue_index, @@ -112,7 +112,7 @@ pub fn create_inputs_cpi_data<'a, 'info, T: InstructionData<'a>>( hash_chain = cpi_ix_data.nullifiers[j].account_hash; } else { hash_chain = Poseidon::hashv(&[&hash_chain, &cpi_ix_data.nullifiers[j].account_hash]) - .map_err(ProgramError::from)?; + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))?; } } // TODO: benchmark the chaining. diff --git a/programs/system/src/processor/create_outputs_cpi_data.rs b/programs/system/src/processor/create_outputs_cpi_data.rs index 97a2489f69..9544b88ed5 100644 --- a/programs/system/src/processor/create_outputs_cpi_data.rs +++ b/programs/system/src/processor/create_outputs_cpi_data.rs @@ -169,7 +169,7 @@ pub fn create_outputs_cpi_data<'a, 'info, T: InstructionData<'a>>( &cpi_ix_data.output_leaf_indices[j].into(), is_batched, ) - .map_err(ProgramError::from)?; + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))?; cpi_ix_data.leaves[j].account_index = index_merkle_tree_account_account - 1; if !cpi_ix_data.nullifiers.is_empty() { @@ -177,7 +177,7 @@ pub fn create_outputs_cpi_data<'a, 'info, T: InstructionData<'a>>( hash_chain = cpi_ix_data.leaves[j].leaf; } else { hash_chain = Poseidon::hashv(&[&hash_chain, &cpi_ix_data.leaves[j].leaf]) - .map_err(ProgramError::from)?; + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))?; } } context.set_rollover_fee(current_index as u8, rollover_fee); diff --git a/programs/system/src/processor/process.rs b/programs/system/src/processor/process.rs index 872ae6d9df..e3fe64278e 100644 --- a/programs/system/src/processor/process.rs +++ b/programs/system/src/processor/process.rs @@ -209,7 +209,7 @@ pub fn process< &output_compressed_account_hashes, current_slot, ) - .map_err(ProgramError::from)?; + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))?; } } // 11. Sum check --------------------------------------------------- diff --git a/programs/system/src/processor/sum_check.rs b/programs/system/src/processor/sum_check.rs index da8a59ef23..522d9f4b63 100644 --- a/programs/system/src/processor/sum_check.rs +++ b/programs/system/src/processor/sum_check.rs @@ -172,7 +172,7 @@ mod test { }; inputs.push(PackedCompressedAccountWithMerkleContext { compressed_account: CompressedAccount { - owner: Pubkey::new_unique().into(), + owner: Pubkey::new_unique(), lamports: *i, address: None, data: None, @@ -193,7 +193,7 @@ mod test { for amount in output_amounts.iter() { outputs.push(OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner: Pubkey::new_unique().into(), + owner: Pubkey::new_unique(), lamports: *amount, address: None, data: None, diff --git a/programs/system/src/processor/verify_proof.rs b/programs/system/src/processor/verify_proof.rs index 51bbbfabb7..fb1a832aef 100644 --- a/programs/system/src/processor/verify_proof.rs +++ b/programs/system/src/processor/verify_proof.rs @@ -168,18 +168,20 @@ pub fn verify_proof( { let public_input_hash = if !leaves.is_empty() && !addresses.is_empty() { // combined inclusion & non-inclusion proof - let inclusion_hash = - create_two_inputs_hash_chain(roots, leaves).map_err(ProgramError::from)?; + let inclusion_hash = create_two_inputs_hash_chain(roots, leaves) + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))?; let non_inclusion_hash = create_two_inputs_hash_chain(address_roots, addresses) - .map_err(ProgramError::from)?; + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))?; create_hash_chain_from_slice(&[inclusion_hash, non_inclusion_hash]) - .map_err(ProgramError::from)? + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))? } else if !leaves.is_empty() { // inclusion proof - create_two_inputs_hash_chain(roots, leaves).map_err(ProgramError::from)? + create_two_inputs_hash_chain(roots, leaves) + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))? } else { // non-inclusion proof - create_two_inputs_hash_chain(address_roots, addresses).map_err(ProgramError::from)? + create_two_inputs_hash_chain(address_roots, addresses) + .map_err(|e| ProgramError::from(SystemProgramError::from(e)))? }; let vk = select_verifying_key(leaves.len(), addresses.len()) diff --git a/sdk-libs/client/src/indexer/types.rs b/sdk-libs/client/src/indexer/types.rs index 7457650e24..4966c477ce 100644 --- a/sdk-libs/client/src/indexer/types.rs +++ b/sdk-libs/client/src/indexer/types.rs @@ -11,8 +11,8 @@ use light_sdk::{ pack_accounts::PackedAccounts, tree_info::{PackedAddressTreeInfo, PackedStateTreeInfo}, }, + light_compressed_account::instruction_data::compressed_proof::CompressedProof, token::{AccountState, TokenData}, - verifier::CompressedProof, ValidityProof, }; use num_bigint::BigUint; @@ -455,9 +455,10 @@ impl TreeInfo { leaf_index: u32, prove_by_index: bool, ) -> light_compressed_account::compressed_account::MerkleContext { + use light_compressed_account::Pubkey; light_compressed_account::compressed_account::MerkleContext { - merkle_tree_pubkey: self.tree, - queue_pubkey: self.queue, + merkle_tree_pubkey: Pubkey::new_from_array(self.tree.to_bytes()), + queue_pubkey: Pubkey::new_from_array(self.queue.to_bytes()), leaf_index, tree_type: self.tree_type, prove_by_index, @@ -498,13 +499,13 @@ impl TryFrom for CompressedAccount { lamports: account.compressed_account.lamports, leaf_index: account.merkle_context.leaf_index, tree_info: TreeInfo { - tree: account.merkle_context.merkle_tree_pubkey, - queue: account.merkle_context.queue_pubkey, + tree: Pubkey::new_from_array(account.merkle_context.merkle_tree_pubkey.to_bytes()), + queue: Pubkey::new_from_array(account.merkle_context.queue_pubkey.to_bytes()), tree_type: account.merkle_context.tree_type, cpi_context: None, next_tree_info: None, }, - owner: account.compressed_account.owner, + owner: Pubkey::new_from_array(account.compressed_account.owner.to_bytes()), prove_by_index: account.merkle_context.prove_by_index, seq: None, slot_created: u64::MAX, @@ -514,8 +515,9 @@ impl TryFrom for CompressedAccount { impl From for CompressedAccountWithMerkleContext { fn from(account: CompressedAccount) -> Self { + use light_compressed_account::Pubkey; let compressed_account = ProgramCompressedAccount { - owner: account.owner, + owner: Pubkey::new_from_array(account.owner.to_bytes()), lamports: account.lamports, address: account.address, data: account.data, diff --git a/sdk-libs/client/src/rpc/client.rs b/sdk-libs/client/src/rpc/client.rs index 5469284d96..f5d7fc2f0b 100644 --- a/sdk-libs/client/src/rpc/client.rs +++ b/sdk-libs/client/src/rpc/client.rs @@ -208,9 +208,16 @@ impl LightClient { let mut vec_accounts = Vec::new(); let mut program_ids = Vec::new(); instructions_vec.iter().for_each(|x| { - program_ids.push(x.program_id); + program_ids.push(light_compressed_account::Pubkey::new_from_array( + x.program_id.to_bytes(), + )); vec.push(x.data.clone()); - vec_accounts.push(x.accounts.iter().map(|x| x.pubkey).collect()); + vec_accounts.push( + x.accounts + .iter() + .map(|x| light_compressed_account::Pubkey::new_from_array(x.pubkey.to_bytes())) + .collect(), + ); }); { let rpc_transaction_config = RpcTransactionConfig { @@ -265,13 +272,18 @@ impl LightClient { ) })?; vec.push(data); - program_ids.push( - account_keys[ui_compiled_instruction.program_id_index as usize], - ); + program_ids.push(light_compressed_account::Pubkey::new_from_array( + account_keys[ui_compiled_instruction.program_id_index as usize] + .to_bytes(), + )); vec_accounts.push( accounts .iter() - .map(|x| account_keys[(*x) as usize]) + .map(|x| { + light_compressed_account::Pubkey::new_from_array( + account_keys[(*x) as usize].to_bytes(), + ) + }) .collect(), ); } diff --git a/sdk-libs/macros/Cargo.toml b/sdk-libs/macros/Cargo.toml index 47b14366a0..90db169c1f 100644 --- a/sdk-libs/macros/Cargo.toml +++ b/sdk-libs/macros/Cargo.toml @@ -12,7 +12,7 @@ quote = { workspace = true } syn = { workspace = true } light-hasher = { workspace = true } -ark-bn254 = { workspace = true } +# ark-bn254 = { workspace = true } light-poseidon = { workspace = true } [dev-dependencies] diff --git a/sdk-libs/macros/src/discriminator.rs b/sdk-libs/macros/src/discriminator.rs index 5498dfc49a..176749dcff 100644 --- a/sdk-libs/macros/src/discriminator.rs +++ b/sdk-libs/macros/src/discriminator.rs @@ -13,7 +13,7 @@ pub(crate) fn discriminator(input: ItemStruct) -> Result { let discriminator: proc_macro2::TokenStream = format!("{discriminator:?}").parse().unwrap(); Ok(quote! { - impl #impl_gen ::light_sdk::LightDiscriminator for #account_name #type_gen #where_clause { + impl #impl_gen LightDiscriminator for #account_name #type_gen #where_clause { const LIGHT_DISCRIMINATOR: [u8; 8] = #discriminator; const LIGHT_DISCRIMINATOR_SLICE: &'static [u8] = &Self::LIGHT_DISCRIMINATOR; diff --git a/sdk-libs/program-test/src/indexer/test_indexer.rs b/sdk-libs/program-test/src/indexer/test_indexer.rs index 219e415ddc..088d7a1c8b 100644 --- a/sdk-libs/program-test/src/indexer/test_indexer.rs +++ b/sdk-libs/program-test/src/indexer/test_indexer.rs @@ -1124,7 +1124,10 @@ impl TestIndexerExtensions for TestIndexer { ) -> Vec { self.compressed_accounts .iter() - .filter(|x| x.compressed_account.owner == *owner) + .filter(|x| { + x.compressed_account.owner + == light_compressed_account::Pubkey::from(owner.to_bytes()) + }) .cloned() .collect() } @@ -1562,7 +1565,10 @@ impl TestIndexer { pub fn get_compressed_balance(&self, owner: &Pubkey) -> u64 { self.compressed_accounts .iter() - .filter(|x| x.compressed_account.owner == *owner) + .filter(|x| { + x.compressed_account.owner + == light_compressed_account::Pubkey::from(owner.to_bytes()) + }) .map(|x| x.compressed_account.lamports) .sum() } @@ -1572,7 +1578,8 @@ impl TestIndexer { self.token_compressed_accounts .iter() .filter(|x| { - x.compressed_account.compressed_account.owner == *owner + x.compressed_account.compressed_account.owner + == light_compressed_account::Pubkey::from(owner.to_bytes()) && x.token_data.mint == *mint }) .map(|x| x.token_data.amount) @@ -1634,7 +1641,10 @@ impl TestIndexer { let bundle = &mut ::get_state_merkle_trees_mut(self) .iter_mut() - .find(|x| x.accounts.merkle_tree == merkle_tree_pubkey) + .find(|x| { + x.accounts.merkle_tree + == solana_pubkey::Pubkey::from(merkle_tree_pubkey.to_bytes()) + }) .unwrap(); // Store leaf indices of input accounts for batched trees if bundle.tree_type == TreeType::StateV2 { @@ -1656,8 +1666,11 @@ impl TestIndexer { } let merkle_tree = self.state_merkle_trees.iter().find(|x| { x.accounts.merkle_tree - == event.pubkey_array - [event.output_compressed_accounts[i].merkle_tree_index as usize] + == solana_pubkey::Pubkey::from( + event.pubkey_array + [event.output_compressed_accounts[i].merkle_tree_index as usize] + .to_bytes(), + ) }); // Check for output queue let merkle_tree = if let Some(merkle_tree) = merkle_tree { @@ -1667,8 +1680,12 @@ impl TestIndexer { .iter() .find(|x| { x.accounts.nullifier_queue - == event.pubkey_array - [event.output_compressed_accounts[i].merkle_tree_index as usize] + == solana_pubkey::Pubkey::from( + event.pubkey_array[event.output_compressed_accounts[i] + .merkle_tree_index + as usize] + .to_bytes(), + ) }) .unwrap() }; @@ -1679,7 +1696,7 @@ impl TestIndexer { // new accounts are inserted in front so that the newest accounts are found first match compressed_account.compressed_account.data.as_ref() { Some(data) => { - if compressed_account.compressed_account.owner == light_compressed_token::ID + if compressed_account.compressed_account.owner == light_compressed_account::Pubkey::from(light_compressed_token::ID.to_bytes()) && data.discriminator == light_compressed_token::constants::TOKEN_COMPRESSED_ACCOUNT_DISCRIMINATOR { if let Ok(token_data) = TokenData::deserialize(&mut data.data.as_slice()) { @@ -1691,8 +1708,8 @@ impl TestIndexer { .clone(), merkle_context: MerkleContext { leaf_index: event.output_leaf_indices[i], - merkle_tree_pubkey, - queue_pubkey: nullifier_queue_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), + queue_pubkey: nullifier_queue_pubkey.into(), prove_by_index: false, tree_type:merkle_tree.tree_type, }, @@ -1706,8 +1723,8 @@ impl TestIndexer { compressed_account: compressed_account.compressed_account.clone(), merkle_context: MerkleContext { leaf_index: event.output_leaf_indices[i], - merkle_tree_pubkey, - queue_pubkey: nullifier_queue_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), + queue_pubkey: nullifier_queue_pubkey.into(), prove_by_index: false, tree_type: merkle_tree.tree_type }, @@ -1721,8 +1738,8 @@ impl TestIndexer { compressed_account: compressed_account.compressed_account.clone(), merkle_context: MerkleContext { leaf_index: event.output_leaf_indices[i], - merkle_tree_pubkey, - queue_pubkey: nullifier_queue_pubkey, + merkle_tree_pubkey: merkle_tree_pubkey.into(), + queue_pubkey: nullifier_queue_pubkey.into(), prove_by_index: false, tree_type: merkle_tree.tree_type, }, @@ -1733,8 +1750,11 @@ impl TestIndexer { }; let merkle_tree = &mut self.state_merkle_trees.iter_mut().find(|x| { x.accounts.merkle_tree - == event.pubkey_array - [event.output_compressed_accounts[i].merkle_tree_index as usize] + == solana_pubkey::Pubkey::from( + event.pubkey_array + [event.output_compressed_accounts[i].merkle_tree_index as usize] + .to_bytes(), + ) }); if merkle_tree.is_some() { let merkle_tree = merkle_tree.as_mut().unwrap(); @@ -1757,8 +1777,12 @@ impl TestIndexer { .iter_mut() .find(|x| { x.accounts.nullifier_queue - == event.pubkey_array - [event.output_compressed_accounts[i].merkle_tree_index as usize] + == solana_pubkey::Pubkey::from( + event.pubkey_array[event.output_compressed_accounts[i] + .merkle_tree_index + as usize] + .to_bytes(), + ) }) .unwrap(); @@ -1780,7 +1804,9 @@ impl TestIndexer { .address_merkle_trees .iter_mut() .enumerate() - .find(|(_, x)| x.accounts.merkle_tree == *pubkey) + .find(|(_, x)| { + x.accounts.merkle_tree == solana_pubkey::Pubkey::from(pubkey.to_bytes()) + }) { address_merkle_tree .queue_elements diff --git a/sdk-libs/program-test/src/program_test/rpc.rs b/sdk-libs/program-test/src/program_test/rpc.rs index 64a44487b7..a298a7ef8a 100644 --- a/sdk-libs/program-test/src/program_test/rpc.rs +++ b/sdk-libs/program-test/src/program_test/rpc.rs @@ -174,12 +174,10 @@ impl Rpc for LightProgramTest { .await?; } else { let _res = self.context.send_transaction(transaction).map_err(|x| { - #[cfg(not(debug_assertions))] - { - if self.config.log_failed_tx { - println!("{}", x.meta.pretty_logs()); - } + if self.config.log_failed_tx { + println!("{}", x.meta.pretty_logs()); } + RpcError::TransactionError(x.err) })?; #[cfg(debug_assertions)] @@ -350,12 +348,10 @@ impl LightProgramTest { .context .simulate_transaction(transaction.clone()) .map_err(|x| { - #[cfg(not(debug_assertions))] - { - if self.config.log_failed_tx { - println!("{}", x.meta.pretty_logs()); - } + if self.config.log_failed_tx { + println!("{}", x.meta.pretty_logs()); } + RpcError::TransactionError(x.err) })?; @@ -411,9 +407,12 @@ impl LightProgramTest { }); event_from_light_transaction( - program_ids.as_slice(), + &program_ids.iter().map(|x| (*x).into()).collect::>(), vec.as_slice(), - vec_accounts.to_vec(), + vec_accounts + .iter() + .map(|inner_vec| inner_vec.iter().map(|x| (*x).into()).collect()) + .collect(), ) .or(Ok::< Option>, @@ -424,12 +423,11 @@ impl LightProgramTest { // Transaction was successful, execute it. let _res = self.context.send_transaction(transaction).map_err(|x| { // Prevent duplicate prints for failing tx. - #[cfg(not(debug_assertions))] - { - if self.config.log_failed_tx { - println!("{}", x.meta.pretty_logs()); - } + + if self.config.log_failed_tx { + println!("{}", x.meta.pretty_logs()); } + RpcError::TransactionError(x.err) })?; #[cfg(debug_assertions)] diff --git a/sdk-libs/program-test/src/utils/setup_light_programs.rs b/sdk-libs/program-test/src/utils/setup_light_programs.rs index d723c15da4..221fcb826a 100644 --- a/sdk-libs/program-test/src/utils/setup_light_programs.rs +++ b/sdk-libs/program-test/src/utils/setup_light_programs.rs @@ -45,17 +45,40 @@ pub fn setup_light_programs( )))?; std::env::set_var("SBF_OUT_DIR", light_bin_path); let path = format!("{}/light_registry.so", light_bin_path); - program_test.add_program_from_file(light_registry::ID, path)?; + program_test + .add_program_from_file(light_registry::ID, path.clone()) + .inspect_err(|_| { + println!("Program light_registry bin not found in {}", path); + })?; let path = format!("{}/account_compression.so", light_bin_path); - program_test.add_program_from_file(account_compression::ID, path)?; + program_test + .add_program_from_file(account_compression::ID, path.clone()) + .inspect_err(|_| { + println!("Program account_compression bin not found in {}", path); + })?; let path = format!("{}/light_compressed_token.so", light_bin_path); - program_test.add_program_from_file(light_compressed_token::ID, path)?; + program_test + .add_program_from_file(light_compressed_token::ID, path.clone()) + .inspect_err(|_| { + println!("Program light_compressed_token bin not found in {}", path); + })?; let path = format!("{}/spl_noop.so", light_bin_path); - program_test.add_program_from_file(NOOP_PROGRAM_ID, path)?; + program_test + .add_program_from_file(NOOP_PROGRAM_ID, path.clone()) + .inspect_err(|_| { + println!("Program spl_noop bin not found in {}", path); + })?; #[cfg(feature = "devenv")] { let path = format!("{}/light_system_program_pinocchio.so", light_bin_path); - program_test.add_program_from_file(light_sdk::constants::PROGRAM_ID_LIGHT_SYSTEM, path)?; + program_test + .add_program_from_file(light_sdk::constants::PROGRAM_ID_LIGHT_SYSTEM, path.clone()) + .inspect_err(|_| { + println!( + "Program light_system_program_pinocchio bin not found in {}", + path + ); + })?; } #[cfg(not(feature = "devenv"))] diff --git a/sdk-libs/sdk-pinocchio/Cargo.toml b/sdk-libs/sdk-pinocchio/Cargo.toml new file mode 100644 index 0000000000..29dc22e309 --- /dev/null +++ b/sdk-libs/sdk-pinocchio/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "light-sdk-pinocchio" +version = "0.12.0" +description = "Rust SDK for ZK Compression on Solana with Pinocchio features" +repository = "https://github.com/Lightprotocol/light-protocol" +license = "Apache-2.0" +edition = "2021" + +[features] +default = [] +v2 = [] + +[dependencies] +pinocchio = { workspace = true } +light-hasher = { workspace = true } +light-account-checks = { workspace = true } +light-macros = { workspace = true } +light-sdk-macros = { workspace = true } +borsh = { workspace = true } +thiserror = { workspace = true } +light-compressed-account = { workspace = true } diff --git a/sdk-libs/sdk-pinocchio/src/account.rs b/sdk-libs/sdk-pinocchio/src/account.rs new file mode 100644 index 0000000000..9fb3e4f922 --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/account.rs @@ -0,0 +1,198 @@ +use std::ops::{Deref, DerefMut}; + +use light_compressed_account::{ + compressed_account::PackedMerkleContext, + instruction_data::with_account_info::{CompressedAccountInfo, InAccountInfo, OutAccountInfo}, +}; +use light_hasher::{DataHasher, Poseidon}; +use pinocchio::pubkey::Pubkey; + +use crate::{ + error::LightSdkError, instruction::account_meta::CompressedAccountMetaTrait, BorshDeserialize, + BorshSerialize, LightDiscriminator, +}; + +#[derive(Debug, PartialEq)] +pub struct LightAccount< + 'a, + A: BorshSerialize + BorshDeserialize + LightDiscriminator + DataHasher + Default, +> { + owner: &'a Pubkey, + pub account: A, + account_info: CompressedAccountInfo, +} + +impl<'a, A: BorshSerialize + BorshDeserialize + LightDiscriminator + DataHasher + Default> + LightAccount<'a, A> +{ + pub fn new_init( + owner: &'a Pubkey, + address: Option<[u8; 32]>, + output_state_tree_index: u8, + ) -> Self { + let output_account_info = OutAccountInfo { + output_merkle_tree_index: output_state_tree_index, + discriminator: A::LIGHT_DISCRIMINATOR, + ..Default::default() + }; + Self { + owner, + account: A::default(), + account_info: CompressedAccountInfo { + address, + input: None, + output: Some(output_account_info), + }, + } + } + + pub fn new_mut( + owner: &'a Pubkey, + input_account_meta: &impl CompressedAccountMetaTrait, + input_account: A, + ) -> Result { + let input_account_info = { + let input_data_hash = input_account.hash::()?; + let tree_info = input_account_meta.get_tree_info(); + InAccountInfo { + data_hash: input_data_hash, + lamports: input_account_meta.get_lamports().unwrap_or_default(), + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, + queue_pubkey_index: tree_info.queue_pubkey_index, + leaf_index: tree_info.leaf_index, + prove_by_index: tree_info.prove_by_index, + }, + root_index: input_account_meta.get_root_index().unwrap_or_default(), + discriminator: A::LIGHT_DISCRIMINATOR, + } + }; + let output_account_info = { + let output_merkle_tree_index = input_account_meta + .get_output_state_tree_index() + .ok_or(LightSdkError::OutputStateTreeIndexIsNone)?; + OutAccountInfo { + lamports: input_account_meta.get_lamports().unwrap_or_default(), + output_merkle_tree_index, + discriminator: A::LIGHT_DISCRIMINATOR, + ..Default::default() + } + }; + + Ok(Self { + owner, + account: input_account, + account_info: CompressedAccountInfo { + address: input_account_meta.get_address(), + input: Some(input_account_info), + output: Some(output_account_info), + }, + }) + } + + pub fn new_close( + owner: &'a Pubkey, + input_account_meta: &impl CompressedAccountMetaTrait, + input_account: A, + ) -> Result { + let input_account_info = { + let input_data_hash = input_account.hash::()?; + let tree_info = input_account_meta.get_tree_info(); + InAccountInfo { + data_hash: input_data_hash, + lamports: input_account_meta.get_lamports().unwrap_or_default(), + merkle_context: PackedMerkleContext { + merkle_tree_pubkey_index: tree_info.merkle_tree_pubkey_index, + queue_pubkey_index: tree_info.queue_pubkey_index, + leaf_index: tree_info.leaf_index, + prove_by_index: tree_info.prove_by_index, + }, + root_index: input_account_meta.get_root_index().unwrap_or_default(), + discriminator: A::LIGHT_DISCRIMINATOR, + } + }; + Ok(Self { + owner, + account: input_account, + account_info: CompressedAccountInfo { + address: input_account_meta.get_address(), + input: Some(input_account_info), + output: None, + }, + }) + } + + pub fn discriminator(&self) -> &[u8; 8] { + &A::LIGHT_DISCRIMINATOR + } + + pub fn lamports(&self) -> u64 { + if let Some(output) = self.account_info.output.as_ref() { + output.lamports + } else if let Some(input) = self.account_info.input.as_ref() { + input.lamports + } else { + 0 + } + } + + pub fn lamports_mut(&mut self) -> &mut u64 { + if let Some(output) = self.account_info.output.as_mut() { + &mut output.lamports + } else if let Some(input) = self.account_info.input.as_mut() { + &mut input.lamports + } else { + panic!("No lamports field available in account_info") + } + } + + pub fn address(&self) -> &Option<[u8; 32]> { + &self.account_info.address + } + + pub fn owner(&self) -> &Pubkey { + self.owner + } + + pub fn in_account_info(&self) -> &Option { + &self.account_info.input + } + + pub fn out_account_info(&mut self) -> &Option { + &self.account_info.output + } + + /// 1. Serializes the account data and sets the output data hash. + /// 2. Returns CompressedAccountInfo. + /// + /// Note this is an expensive operation + /// that should only be called once per instruction. + pub fn to_account_info(mut self) -> Result { + if let Some(output) = self.account_info.output.as_mut() { + output.data_hash = self.account.hash::()?; + output.data = self + .account + .try_to_vec() + .map_err(|_| LightSdkError::Borsh)?; + } + Ok(self.account_info) + } +} + +impl Deref + for LightAccount<'_, A> +{ + type Target = A; + + fn deref(&self) -> &Self::Target { + &self.account + } +} + +impl DerefMut + for LightAccount<'_, A> +{ + fn deref_mut(&mut self) -> &mut ::Target { + &mut self.account + } +} diff --git a/sdk-libs/sdk-pinocchio/src/address.rs b/sdk-libs/sdk-pinocchio/src/address.rs new file mode 100644 index 0000000000..826c0391a1 --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/address.rs @@ -0,0 +1,88 @@ +use light_hasher::{hash_to_field_size::hashv_to_bn254_field_size_be, Hasher, Keccak}; +use pinocchio::{account_info::AccountInfo, pubkey::Pubkey}; + +// Define derive_address function locally +pub fn derive_address( + seed: &[u8; 32], + merkle_tree_pubkey: &[u8; 32], + program_id_bytes: &[u8; 32], +) -> [u8; 32] { + let slices = [ + seed.as_slice(), + merkle_tree_pubkey.as_slice(), + program_id_bytes.as_slice(), + ]; + + light_hasher::hash_to_field_size::hashv_to_bn254_field_size_be_const_array::<4>(&slices) + .unwrap() +} + +// Define data structures needed +#[derive(Clone, Debug, Default)] +pub struct NewAddressParams { + pub seed: [u8; 32], + pub address_queue_pubkey: [u8; 32], + pub address_merkle_tree_pubkey: [u8; 32], + pub address_merkle_tree_root_index: u16, +} + +pub fn unpack_new_address_params( + address_params: &crate::NewAddressParamsPacked, + remaining_accounts: &[AccountInfo], +) -> NewAddressParams { + let address_merkle_tree_pubkey = + remaining_accounts[address_params.address_merkle_tree_account_index as usize].key(); + let address_queue_pubkey = + remaining_accounts[address_params.address_queue_account_index as usize].key(); + + NewAddressParams { + seed: address_params.seed, + address_queue_pubkey: *address_queue_pubkey, + address_merkle_tree_pubkey: *address_merkle_tree_pubkey, + address_merkle_tree_root_index: address_params.address_merkle_tree_root_index, + } +} + +pub mod v1 { + use super::*; + + /// Derives a single address seed for a compressed account, based on the + /// provided multiple `seeds`, `program_id` and `merkle_tree_pubkey`. + pub fn derive_address_seed(seeds: &[&[u8]], program_id: &Pubkey) -> [u8; 32] { + let mut inputs = Vec::with_capacity(seeds.len() + 1); + inputs.push(program_id.as_slice()); + inputs.extend(seeds); + hashv_to_bn254_field_size_be_legacy(inputs.as_slice()) + } + + fn hashv_to_bn254_field_size_be_legacy(bytes: &[&[u8]]) -> [u8; 32] { + let mut hashed_value: [u8; 32] = Keccak::hashv(bytes).unwrap(); + // Truncates to 31 bytes so that value is less than bn254 Fr modulo + // field size. + hashed_value[0] = 0; + hashed_value + } + + /// Derives an address for a compressed account, based on the provided singular + /// `seed` and `merkle_tree_pubkey`: + pub(crate) fn derive_address_from_seed( + address_seed: &[u8; 32], + merkle_tree_pubkey: &Pubkey, + ) -> [u8; 32] { + let input = [merkle_tree_pubkey.as_slice(), address_seed].concat(); + hashv_to_bn254_field_size_be(&[input.as_slice()]) + } + + /// Derives an address from provided seeds. Returns that address and a singular + /// seed. + pub fn derive_address( + seeds: &[&[u8]], + merkle_tree_pubkey: &Pubkey, + program_id: &Pubkey, + ) -> ([u8; 32], [u8; 32]) { + let address_seed = derive_address_seed(seeds, program_id); + let address = derive_address_from_seed(&address_seed, merkle_tree_pubkey); + + (address, address_seed) + } +} diff --git a/sdk-libs/sdk-pinocchio/src/cpi/accounts.rs b/sdk-libs/sdk-pinocchio/src/cpi/accounts.rs new file mode 100644 index 0000000000..847da7f233 --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/cpi/accounts.rs @@ -0,0 +1,245 @@ +use pinocchio::{account_info::AccountInfo, msg, pubkey::Pubkey}; + +use crate::{ + error::{LightSdkError, Result}, + BorshDeserialize, BorshSerialize, +}; + +#[derive(Debug, Default, Copy, Clone, BorshSerialize, BorshDeserialize)] +pub struct CpiAccountsConfig { + pub self_program: Pubkey, + pub cpi_context: bool, + pub sol_compression_recipient: bool, + pub sol_pool_pda: bool, +} + +impl CpiAccountsConfig { + pub fn new(self_program: Pubkey) -> Self { + Self { + self_program, + cpi_context: false, + sol_compression_recipient: false, + sol_pool_pda: false, + } + } + + pub fn new_with_cpi_context(self_program: Pubkey) -> Self { + Self { + self_program, + cpi_context: true, + sol_compression_recipient: false, + sol_pool_pda: false, + } + } +} + +#[repr(usize)] +pub enum CompressionCpiAccountIndex { + LightSystemProgram, + Authority, + RegisteredProgramPda, + NoopProgram, + AccountCompressionAuthority, + AccountCompressionProgram, + InvokingProgram, + SolPoolPda, + DecompressionRecipent, + SystemProgram, + CpiContext, +} + +pub const SYSTEM_ACCOUNTS_LEN: usize = 11; + +pub struct CpiAccounts<'a> { + fee_payer: &'a AccountInfo, + accounts: &'a [AccountInfo], + config: CpiAccountsConfig, +} + +impl<'a> CpiAccounts<'a> { + pub fn new( + fee_payer: &'a AccountInfo, + accounts: &'a [AccountInfo], + program_id: Pubkey, + ) -> Result { + let new = Self { + fee_payer, + accounts, + config: CpiAccountsConfig { + self_program: program_id, + ..Default::default() + }, + }; + if accounts.len() < new.system_accounts_len() { + return Err(LightSdkError::FewerAccountsThanSystemAccounts); + } + Ok(new) + } + + pub fn new_with_config( + fee_payer: &'a AccountInfo, + accounts: &'a [AccountInfo], + config: CpiAccountsConfig, + ) -> Result { + let new = Self { + fee_payer, + accounts, + config, + }; + if accounts.len() < new.system_accounts_len() { + return Err(LightSdkError::FewerAccountsThanSystemAccounts); + } + Ok(new) + } + + pub fn fee_payer(&self) -> &'a AccountInfo { + self.fee_payer + } + + pub fn light_system_program(&self) -> &'a AccountInfo { + // PANICS: We are sure about the bounds of the slice. + self.accounts + .get(CompressionCpiAccountIndex::LightSystemProgram as usize) + .unwrap() + } + + pub fn authority(&self) -> &'a AccountInfo { + // PANICS: We are sure about the bounds of the slice. + self.accounts + .get(CompressionCpiAccountIndex::Authority as usize) + .unwrap() + } + + pub fn invoking_program(&self) -> &'a AccountInfo { + // PANICS: We are sure about the bounds of the slice. + self.accounts + .get(CompressionCpiAccountIndex::InvokingProgram as usize) + .unwrap() + } + + pub fn self_program_id(&self) -> &Pubkey { + &self.config.self_program + } + + pub fn to_account_infos(&self) -> Vec<&'a AccountInfo> { + let mut account_infos = Vec::with_capacity(1 + SYSTEM_ACCOUNTS_LEN); + account_infos.push(self.fee_payer); + // Skip the first account (light_system_program) and add the rest + self.accounts[1..] + .iter() + .for_each(|acc| account_infos.push(acc)); + let mut current_index = 7; + if !self.config.sol_pool_pda { + account_infos.insert(current_index, self.light_system_program()); + } + current_index += 1; + + if !self.config.sol_compression_recipient { + account_infos.insert(current_index, self.light_system_program()); + } + current_index += 1; + // system program + current_index += 1; + + if !self.config.cpi_context { + account_infos.insert(current_index, self.light_system_program()); + } + account_infos + } + + pub fn to_account_metas(&self) -> Vec { + use pinocchio::instruction::AccountMeta; + msg!("pre account_metas"); + msg!(format!("{}", self.accounts.len()).as_str()); + let mut account_metas = Vec::with_capacity(1 + SYSTEM_ACCOUNTS_LEN); + account_metas.push(AccountMeta::writable_signer(self.fee_payer.key())); + account_metas.push(AccountMeta::readonly_signer(self.authority().key())); + + account_metas.push(AccountMeta::readonly( + self.accounts[CompressionCpiAccountIndex::RegisteredProgramPda as usize].key(), + )); + account_metas.push(AccountMeta::readonly( + self.accounts[CompressionCpiAccountIndex::NoopProgram as usize].key(), + )); + account_metas.push(AccountMeta::readonly( + self.accounts[CompressionCpiAccountIndex::AccountCompressionAuthority as usize].key(), + )); + account_metas.push(AccountMeta::readonly( + self.accounts[CompressionCpiAccountIndex::AccountCompressionProgram as usize].key(), + )); + account_metas.push(AccountMeta::readonly( + self.accounts[CompressionCpiAccountIndex::InvokingProgram as usize].key(), + )); + let mut current_index = 7; + if !self.config.sol_pool_pda { + account_metas.push(AccountMeta::readonly(self.light_system_program().key())); + } else { + account_metas.push(AccountMeta::writable(self.accounts[current_index].key())); + current_index += 1; + } + + if !self.config.sol_compression_recipient { + account_metas.push(AccountMeta::readonly(self.light_system_program().key())); + } else { + account_metas.push(AccountMeta::writable(self.accounts[current_index].key())); + current_index += 1; + } + + // System program - use default (all zeros) + account_metas.push(AccountMeta::readonly(&[0u8; 32])); + current_index += 1; + + if !self.config.cpi_context { + account_metas.push(AccountMeta::readonly(self.light_system_program().key())); + } else { + account_metas.push(AccountMeta::writable(self.accounts[current_index].key())); + current_index += 1; + } + + // Add remaining tree accounts + self.accounts[current_index..].iter().for_each(|acc| { + let account_meta = if acc.is_writable() { + AccountMeta::writable(acc.key()) + } else { + AccountMeta::readonly(acc.key()) + }; + account_metas.push(account_meta); + }); + + account_metas + } + + pub fn system_accounts_len(&self) -> usize { + let mut len = 7; // Base system accounts + + if self.config.sol_pool_pda { + len += 1; + } + + if self.config.sol_compression_recipient { + len += 1; + } + + if self.config.cpi_context { + len += 1; + } + + len + 1 // Add system program + } + + pub fn account_infos(&self) -> &'a [AccountInfo] { + self.accounts + } + + pub fn tree_accounts(&self) -> &'a [AccountInfo] { + msg!(format!("tree_accounts: {}", self.accounts.len()).as_str()); + msg!(format!("offset {}", self.system_accounts_len()).as_str()); + + // Debug print all accounts + for (i, acc) in self.accounts.iter().enumerate() { + msg!(format!(" accounts[{}] = {:?}", i, acc.key()).as_str()); + } + + &self.accounts[self.system_accounts_len()..] + } +} diff --git a/sdk-libs/sdk-pinocchio/src/cpi/invoke.rs b/sdk-libs/sdk-pinocchio/src/cpi/invoke.rs new file mode 100644 index 0000000000..f07efcef06 --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/cpi/invoke.rs @@ -0,0 +1,217 @@ +use light_compressed_account::{ + compressed_account::{ + CompressedAccount, CompressedAccountData, PackedCompressedAccountWithMerkleContext, + }, + discriminators::DISCRIMINATOR_INVOKE_CPI, + instruction_data::{ + cpi_context::CompressedCpiContext, + data::{NewAddressParamsPacked, OutputCompressedAccountWithPackedContext}, + invoke_cpi::InstructionDataInvokeCpi, + with_account_info::CompressedAccountInfo, + }, +}; +use pinocchio::{cpi::slice_invoke_signed, msg, pubkey::Pubkey}; + +use crate::{ + cpi::CpiAccounts, + error::{LightSdkError, Result}, + find_cpi_signer_macro, BorshSerialize, ValidityProof, CPI_AUTHORITY_PDA_SEED, + PROGRAM_ID_LIGHT_SYSTEM, +}; + +// Trait to provide the missing methods for CompressedAccountInfo +pub trait CompressedAccountInfoExt { + fn input_compressed_account( + &self, + owner: Pubkey, + ) -> Result>; + fn output_compressed_account( + &self, + owner: Pubkey, + ) -> Result>; +} + +impl CompressedAccountInfoExt for CompressedAccountInfo { + fn input_compressed_account( + &self, + owner: Pubkey, + ) -> Result> { + match self.input.as_ref() { + Some(input) => { + let data = Some(CompressedAccountData { + discriminator: input.discriminator, + data: Vec::new(), + data_hash: input.data_hash, + }); + Ok(Some(PackedCompressedAccountWithMerkleContext { + compressed_account: CompressedAccount { + owner: owner.into(), + lamports: input.lamports, + address: self.address, + data, + }, + merkle_context: input.merkle_context, + root_index: input.root_index, + read_only: false, + })) + } + None => Ok(None), + } + } + + fn output_compressed_account( + &self, + owner: Pubkey, + ) -> Result> { + match self.output.as_ref() { + Some(output) => { + let data = Some(CompressedAccountData { + discriminator: output.discriminator, + data: output.data.clone(), + data_hash: output.data_hash, + }); + Ok(Some(OutputCompressedAccountWithPackedContext { + compressed_account: CompressedAccount { + owner: owner.into(), + lamports: output.lamports, + address: self.address, + data, + }, + merkle_tree_index: output.output_merkle_tree_index, + })) + } + None => Ok(None), + } + } +} + +#[derive(Debug, Default, PartialEq, Clone)] +pub struct CpiInputs { + pub proof: ValidityProof, + pub account_infos: Option>, + pub new_addresses: Option>, + pub compress_or_decompress_lamports: Option, + pub is_compress: bool, + pub cpi_context: Option, +} + +impl CpiInputs { + pub fn new(proof: ValidityProof, account_infos: Vec) -> Self { + Self { + proof, + account_infos: Some(account_infos), + ..Default::default() + } + } + + pub fn new_with_address( + proof: ValidityProof, + account_infos: Vec, + new_addresses: Vec, + ) -> Self { + Self { + proof, + account_infos: Some(account_infos), + new_addresses: Some(new_addresses), + ..Default::default() + } + } + + pub fn invoke_light_system_program(self, cpi_accounts: CpiAccounts) -> Result<()> { + light_system_progam_instruction_invoke_cpi(self, &cpi_accounts) + } +} + +pub fn light_system_progam_instruction_invoke_cpi( + cpi_inputs: CpiInputs, + cpi_accounts: &CpiAccounts, +) -> Result<()> { + let owner = *cpi_accounts.invoking_program().key(); + let (input_compressed_accounts_with_merkle_context, output_compressed_accounts) = + if let Some(account_infos) = cpi_inputs.account_infos.as_ref() { + let mut input_compressed_accounts_with_merkle_context = + Vec::with_capacity(account_infos.len()); + let mut output_compressed_accounts = Vec::with_capacity(account_infos.len()); + for account_info in account_infos.iter() { + if let Some(input_account) = + CompressedAccountInfoExt::input_compressed_account(account_info, owner)? + { + input_compressed_accounts_with_merkle_context.push(input_account); + } + if let Some(output_account) = + CompressedAccountInfoExt::output_compressed_account(account_info, owner)? + { + output_compressed_accounts.push(output_account); + } + } + ( + input_compressed_accounts_with_merkle_context, + output_compressed_accounts, + ) + } else { + (vec![], vec![]) + }; + + let inputs = InstructionDataInvokeCpi { + proof: cpi_inputs.proof.0, + new_address_params: cpi_inputs.new_addresses.unwrap_or_default(), + relay_fee: None, + input_compressed_accounts_with_merkle_context, + output_compressed_accounts, + compress_or_decompress_lamports: cpi_inputs.compress_or_decompress_lamports, + is_compress: cpi_inputs.is_compress, + cpi_context: cpi_inputs.cpi_context, + }; + let inputs = inputs.try_to_vec().map_err(|_| LightSdkError::Borsh)?; + + let mut data = Vec::with_capacity(8 + 4 + inputs.len()); + data.extend_from_slice(&DISCRIMINATOR_INVOKE_CPI); + data.extend_from_slice(&(inputs.len() as u32).to_le_bytes()); + data.extend(inputs); + msg!("pre account_metas"); + + let account_metas: Vec = cpi_accounts.to_account_metas(); + msg!(format!("account_metas len: {}", account_metas.len()).as_str()); + + // Create instruction with owned data and immediately invoke it + use pinocchio::instruction::{Instruction, Seed, Signer}; + + let (_authority, bump) = find_cpi_signer_macro!(cpi_accounts.invoking_program().key()); + let bump_seed = [bump]; + let seed_array = [ + Seed::from(CPI_AUTHORITY_PDA_SEED), + Seed::from(bump_seed.as_slice()), + ]; + let signer = Signer::from(&seed_array); + + let instruction = Instruction { + program_id: &PROGRAM_ID_LIGHT_SYSTEM, + accounts: &account_metas, + data: &data, + }; + msg!("pre account infos"); + let account_infos = cpi_accounts.to_account_infos(); + msg!(format!("account_infos len: {}", account_infos.len()).as_str()); + + // Debug: print account order + msg!("Account metas:"); + for (i, meta) in account_metas.iter().enumerate() { + msg!(format!(" [{}] {:?}", i, meta.pubkey).as_str()); + } + msg!("Account infos:"); + for (i, info) in account_infos.iter().enumerate() { + msg!(format!(" [{}] {:?}", i, info.key()).as_str()); + } + + msg!("calling slice_invoke_signed"); + match slice_invoke_signed(&instruction, &account_infos, &[signer]) { + Ok(()) => { + msg!("slice_invoke_signed completed successfully"); + } + Err(e) => { + msg!(format!("slice_invoke_signed failed: {:?}", e).as_str()); + return Err(LightSdkError::ProgramError(e)); + } + } + Ok(()) +} diff --git a/sdk-libs/sdk-pinocchio/src/cpi/mod.rs b/sdk-libs/sdk-pinocchio/src/cpi/mod.rs new file mode 100644 index 0000000000..f077b4ab32 --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/cpi/mod.rs @@ -0,0 +1,5 @@ +pub mod accounts; +pub mod invoke; + +pub use accounts::*; +pub use invoke::*; diff --git a/sdk-libs/sdk-pinocchio/src/error.rs b/sdk-libs/sdk-pinocchio/src/error.rs new file mode 100644 index 0000000000..8174250e55 --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/error.rs @@ -0,0 +1,94 @@ +use light_hasher::HasherError; +use pinocchio::program_error::ProgramError; +use thiserror::Error; + +pub type Result = std::result::Result; + +#[derive(Debug, Error, PartialEq)] +pub enum LightSdkError { + #[error("Constraint violation")] + ConstraintViolation, + #[error("Invalid light-system-program ID")] + InvalidLightSystemProgram, + #[error("Expected accounts in the instruction")] + ExpectedAccounts, + #[error("Expected address Merkle context to be provided")] + ExpectedAddressTreeInfo, + #[error("Expected address root index to be provided")] + ExpectedAddressRootIndex, + #[error("Accounts with a specified input are expected to have data")] + ExpectedData, + #[error("Accounts with specified data are expected to have a discriminator")] + ExpectedDiscriminator, + #[error("Accounts with specified data are expected to have a hash")] + ExpectedHash, + #[error("Expected the `{0}` light account to be provided")] + ExpectedLightSystemAccount(String), + #[error("`mut` and `close` accounts are expected to have a Merkle context")] + ExpectedMerkleContext, + #[error("Expected root index to be provided")] + ExpectedRootIndex, + #[error("Cannot transfer lamports from an account without input")] + TransferFromNoInput, + #[error("Cannot transfer from an account without lamports")] + TransferFromNoLamports, + #[error("Account, from which a transfer was attempted, has insufficient amount of lamports")] + TransferFromInsufficientLamports, + #[error("Integer overflow resulting from too large resulting amount")] + TransferIntegerOverflow, + #[error("Borsh error.")] + Borsh, + #[error("Fewer accounts than number of system accounts.")] + FewerAccountsThanSystemAccounts, + #[error("InvalidCpiSignerAccount")] + InvalidCpiSignerAccount, + #[error("Missing meta field: {0}")] + MissingField(String), + #[error("Output state tree index is none. Use an CompressedAccountMeta type with output tree index to initialize or update accounts.")] + OutputStateTreeIndexIsNone, + #[error(transparent)] + Hasher(#[from] HasherError), + #[error("Program error: {0:?}")] + ProgramError(ProgramError), +} + +impl From for LightSdkError { + fn from(error: ProgramError) -> Self { + LightSdkError::ProgramError(error) + } +} + +impl From for u32 { + fn from(e: LightSdkError) -> Self { + match e { + LightSdkError::ConstraintViolation => 14001, + LightSdkError::InvalidLightSystemProgram => 14002, + LightSdkError::ExpectedAccounts => 14003, + LightSdkError::ExpectedAddressTreeInfo => 14004, + LightSdkError::ExpectedAddressRootIndex => 14005, + LightSdkError::ExpectedData => 14006, + LightSdkError::ExpectedDiscriminator => 14007, + LightSdkError::ExpectedHash => 14008, + LightSdkError::ExpectedLightSystemAccount(_) => 14009, + LightSdkError::ExpectedMerkleContext => 14010, + LightSdkError::ExpectedRootIndex => 14011, + LightSdkError::TransferFromNoInput => 14012, + LightSdkError::TransferFromNoLamports => 14013, + LightSdkError::TransferFromInsufficientLamports => 14014, + LightSdkError::TransferIntegerOverflow => 14015, + LightSdkError::Borsh => 14016, + LightSdkError::FewerAccountsThanSystemAccounts => 14017, + LightSdkError::InvalidCpiSignerAccount => 14018, + LightSdkError::MissingField(_) => 14019, + LightSdkError::OutputStateTreeIndexIsNone => 14020, + LightSdkError::Hasher(e) => e.into(), + LightSdkError::ProgramError(e) => u32::try_from(u64::from(e)).unwrap(), + } + } +} + +impl From for ProgramError { + fn from(e: LightSdkError) -> Self { + ProgramError::Custom(e.into()) + } +} diff --git a/sdk-libs/sdk-pinocchio/src/instruction/account_meta.rs b/sdk-libs/sdk-pinocchio/src/instruction/account_meta.rs new file mode 100644 index 0000000000..df0456baad --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/instruction/account_meta.rs @@ -0,0 +1,186 @@ +use crate::{instruction::tree_info::PackedStateTreeInfo, BorshDeserialize, BorshSerialize}; + +/// CompressedAccountMeta (context, address, root_index, output_state_tree_index) +/// CompressedAccountMetaNoLamportsNoAddress (context, root_index, output_state_tree_index) +/// CompressedAccountMetaWithLamportsNoAddress (context, root_index, output_state_tree_index) +/// CompressedAccountMetaWithLamports (context, lamports, address, root_index, output_state_tree_index) +pub trait CompressedAccountMetaTrait { + fn get_tree_info(&self) -> &PackedStateTreeInfo; + fn get_lamports(&self) -> Option; + fn get_root_index(&self) -> Option; + fn get_address(&self) -> Option<[u8; 32]>; + fn get_output_state_tree_index(&self) -> Option; +} + +#[derive(Default, Debug, Clone, Copy, PartialEq, BorshSerialize, BorshDeserialize)] +pub struct CompressedAccountMetaNoLamportsNoAddress { + pub tree_info: PackedStateTreeInfo, + pub output_state_tree_index: u8, +} + +impl CompressedAccountMetaTrait for CompressedAccountMetaNoLamportsNoAddress { + fn get_tree_info(&self) -> &PackedStateTreeInfo { + &self.tree_info + } + + fn get_lamports(&self) -> Option { + None + } + + fn get_root_index(&self) -> Option { + if self.tree_info.prove_by_index { + None + } else { + Some(self.tree_info.root_index) + } + } + + fn get_address(&self) -> Option<[u8; 32]> { + None + } + + fn get_output_state_tree_index(&self) -> Option { + Some(self.output_state_tree_index) + } +} + +#[derive(Default, Debug, Clone, Copy, PartialEq, BorshSerialize, BorshDeserialize)] +pub struct CompressedAccountMetaNoAddress { + pub tree_info: PackedStateTreeInfo, + pub output_state_tree_index: u8, + pub lamports: u64, +} + +impl CompressedAccountMetaTrait for CompressedAccountMetaNoAddress { + fn get_tree_info(&self) -> &PackedStateTreeInfo { + &self.tree_info + } + + fn get_lamports(&self) -> Option { + Some(self.lamports) + } + + fn get_root_index(&self) -> Option { + if self.tree_info.prove_by_index { + None + } else { + Some(self.tree_info.root_index) + } + } + + fn get_address(&self) -> Option<[u8; 32]> { + None + } + + fn get_output_state_tree_index(&self) -> Option { + Some(self.output_state_tree_index) + } +} + +#[derive(Default, Debug, Clone, Copy, PartialEq, BorshSerialize, BorshDeserialize)] +pub struct CompressedAccountMeta { + /// Merkle tree context. + pub tree_info: PackedStateTreeInfo, + /// Address. + pub address: [u8; 32], + /// Output merkle tree index. + pub output_state_tree_index: u8, +} + +impl CompressedAccountMetaTrait for CompressedAccountMeta { + fn get_tree_info(&self) -> &PackedStateTreeInfo { + &self.tree_info + } + + fn get_lamports(&self) -> Option { + None + } + + fn get_root_index(&self) -> Option { + if self.tree_info.prove_by_index { + None + } else { + Some(self.tree_info.root_index) + } + } + + fn get_address(&self) -> Option<[u8; 32]> { + Some(self.address) + } + + fn get_output_state_tree_index(&self) -> Option { + Some(self.output_state_tree_index) + } +} + +#[derive(Default, Debug, Clone, Copy, PartialEq, BorshSerialize, BorshDeserialize)] +pub struct CompressedAccountMetaWithLamports { + /// Merkle tree context. + pub tree_info: PackedStateTreeInfo, + /// Lamports. + pub lamports: u64, + /// Address. + pub address: [u8; 32], + /// Root index. + pub output_state_tree_index: u8, +} + +impl CompressedAccountMetaTrait for CompressedAccountMetaWithLamports { + fn get_tree_info(&self) -> &PackedStateTreeInfo { + &self.tree_info + } + + fn get_lamports(&self) -> Option { + Some(self.lamports) + } + + fn get_root_index(&self) -> Option { + if self.tree_info.prove_by_index { + None + } else { + Some(self.tree_info.root_index) + } + } + + fn get_address(&self) -> Option<[u8; 32]> { + Some(self.address) + } + + fn get_output_state_tree_index(&self) -> Option { + Some(self.output_state_tree_index) + } +} + +#[derive(Default, Debug, Clone, Copy, PartialEq, BorshSerialize, BorshDeserialize)] +pub struct CompressedAccountMetaClose { + /// State Merkle tree context. + pub tree_info: PackedStateTreeInfo, + /// Address. + pub address: [u8; 32], +} + +impl CompressedAccountMetaTrait for CompressedAccountMetaClose { + fn get_tree_info(&self) -> &PackedStateTreeInfo { + &self.tree_info + } + + fn get_lamports(&self) -> Option { + None + } + + fn get_root_index(&self) -> Option { + if self.tree_info.prove_by_index { + None + } else { + Some(self.tree_info.root_index) + } + } + + fn get_address(&self) -> Option<[u8; 32]> { + Some(self.address) + } + + fn get_output_state_tree_index(&self) -> Option { + None + } +} diff --git a/sdk-libs/sdk-pinocchio/src/instruction/mod.rs b/sdk-libs/sdk-pinocchio/src/instruction/mod.rs new file mode 100644 index 0000000000..de80320b17 --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/instruction/mod.rs @@ -0,0 +1,2 @@ +pub mod account_meta; +pub mod tree_info; diff --git a/sdk-libs/sdk-pinocchio/src/instruction/tree_info.rs b/sdk-libs/sdk-pinocchio/src/instruction/tree_info.rs new file mode 100644 index 0000000000..bce12f2387 --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/instruction/tree_info.rs @@ -0,0 +1,70 @@ +// pub use crate::compressed_account::PackedMerkleContext; + +#[derive(Debug, Clone, Copy, BorshDeserialize, BorshSerialize, PartialEq, Default)] +pub struct MerkleContext { + pub merkle_tree_pubkey: [u8; 32], + pub queue_pubkey: [u8; 32], + pub leaf_index: u32, + pub tree_type: u8, // Simplified TreeType as u8 +} +use light_compressed_account::instruction_data::data::NewAddressParamsPacked; +use pinocchio::{account_info::AccountInfo, pubkey::Pubkey}; + +use crate::{BorshDeserialize, BorshSerialize}; + +#[derive(Debug, Clone, Copy, BorshDeserialize, BorshSerialize, PartialEq, Default)] +pub struct PackedStateTreeInfo { + pub root_index: u16, + pub prove_by_index: bool, + pub merkle_tree_pubkey_index: u8, + pub queue_pubkey_index: u8, + pub leaf_index: u32, +} + +#[derive(Debug, Clone, Copy, BorshDeserialize, BorshSerialize, PartialEq, Default)] +pub struct AddressTreeInfo { + pub address_merkle_tree_pubkey: Pubkey, + pub address_queue_pubkey: Pubkey, +} + +#[derive(Debug, Clone, Copy, BorshDeserialize, BorshSerialize, PartialEq, Default)] +pub struct PackedAddressTreeInfo { + pub address_merkle_tree_pubkey_index: u8, + pub address_queue_pubkey_index: u8, + pub root_index: u16, +} + +impl PackedAddressTreeInfo { + pub fn into_new_address_params_packed(self, seed: [u8; 32]) -> NewAddressParamsPacked { + NewAddressParamsPacked { + address_merkle_tree_account_index: self.address_merkle_tree_pubkey_index, + address_queue_account_index: self.address_queue_pubkey_index, + address_merkle_tree_root_index: self.root_index, + seed, + } + } +} + +pub fn unpack_address_tree_infos( + address_tree_infos: &[PackedAddressTreeInfo], + remaining_accounts: &[AccountInfo], +) -> Vec { + let mut result = Vec::with_capacity(address_tree_infos.len()); + for x in address_tree_infos { + let address_merkle_tree_pubkey = + *remaining_accounts[x.address_merkle_tree_pubkey_index as usize].key(); + let address_queue_pubkey = *remaining_accounts[x.address_queue_pubkey_index as usize].key(); + result.push(AddressTreeInfo { + address_merkle_tree_pubkey, + address_queue_pubkey, + }); + } + result +} + +pub fn unpack_address_tree_info( + address_tree_info: PackedAddressTreeInfo, + remaining_accounts: &[AccountInfo], +) -> AddressTreeInfo { + unpack_address_tree_infos(&[address_tree_info], remaining_accounts)[0] +} diff --git a/sdk-libs/sdk-pinocchio/src/lib.rs b/sdk-libs/sdk-pinocchio/src/lib.rs new file mode 100644 index 0000000000..81aaec787b --- /dev/null +++ b/sdk-libs/sdk-pinocchio/src/lib.rs @@ -0,0 +1,56 @@ +pub mod account; +pub mod address; +pub mod cpi; +pub mod error; +pub mod instruction; + +pub use account::LightAccount; +pub use borsh::{BorshDeserialize, BorshSerialize}; +pub use cpi::{CpiAccounts, CpiAccountsConfig, CpiInputs}; +pub use instruction::{ + account_meta::CompressedAccountMeta, + tree_info::{PackedAddressTreeInfo, PackedStateTreeInfo}, +}; +// Re-export discriminator functionality +pub use light_account_checks::discriminator::Discriminator as LightDiscriminator; +// Re-export derive macros +pub use light_compressed_account::{ + self, instruction_data::compressed_proof::ValidityProof, instruction_data::data::*, +}; +pub use light_hasher::{DataHasher as LightHasher, DataHasher, Poseidon}; +pub use light_sdk_macros::{LightDiscriminator, LightHasher}; +use pinocchio::pubkey::Pubkey; + +pub mod hash_to_field_size { + pub use light_hasher::hash_to_field_size::{ + hash_to_bn254_field_size_be, hashv_to_bn254_field_size_be, + hashv_to_bn254_field_size_be_const_array, HashToFieldSize, + }; +} + +// Constants +/// Seed of the CPI authority. +pub const CPI_AUTHORITY_PDA_SEED: &[u8] = b"cpi_authority"; + +/// ID of the account-compression program. +pub const PROGRAM_ID_ACCOUNT_COMPRESSION: Pubkey = [ + 55, 8, 217, 140, 65, 94, 42, 215, 32, 189, 184, 135, 142, 143, 219, 27, 224, 96, 152, 85, 129, + 220, 130, 145, 39, 245, 180, 186, 206, 148, 10, 237, +]; +pub const PROGRAM_ID_NOOP: Pubkey = [ + 132, 155, 207, 4, 208, 227, 48, 117, 105, 194, 163, 167, 98, 204, 61, 138, 137, 185, 222, 182, + 70, 182, 113, 154, 85, 91, 240, 94, 151, 221, 190, 139, +]; +/// ID of the light-system program. +pub const PROGRAM_ID_LIGHT_SYSTEM: Pubkey = [ + 6, 167, 85, 248, 33, 57, 5, 77, 68, 36, 177, 90, 240, 196, 48, 207, 47, 75, 127, 152, 121, 58, + 218, 18, 82, 212, 143, 54, 102, 198, 203, 206, +]; + +// Macro for finding CPI signer +#[macro_export] +macro_rules! find_cpi_signer_macro { + ($program_id:expr) => { + pinocchio::pubkey::find_program_address([CPI_AUTHORITY_PDA_SEED].as_slice(), $program_id) + }; +} diff --git a/sdk-libs/sdk/Cargo.toml b/sdk-libs/sdk/Cargo.toml index 04831098c0..d40492c813 100644 --- a/sdk-libs/sdk/Cargo.toml +++ b/sdk-libs/sdk/Cargo.toml @@ -15,22 +15,18 @@ solana = [ "solana-cpi", "solana-instruction", "solana-pubkey", + "solana-pubkey/borsh", + "solana-pubkey/curve25519", + "solana-pubkey/sha2", "solana-account-info", "solana-msg", "solana-program-error", "borsh", - "light-verifier/solana", "light-compressed-account/solana", - "light-hasher/solana", ] default = ["solana"] idl-build = ["anchor-lang/idl-build"] -anchor = [ - "anchor-lang", - "light-compressed-account/anchor", - "light-verifier/anchor", - "light-hasher/solana", -] +anchor = ["anchor-lang", "light-compressed-account/anchor"] v2 = [] [dependencies] @@ -51,9 +47,9 @@ thiserror = { workspace = true } light-sdk-macros = { workspace = true } light-macros = { workspace = true } light-compressed-account = { workspace = true } -light-verifier = { workspace = true } light-hasher = { workspace = true } light-account-checks = { workspace = true } [dev-dependencies] num-bigint = { workspace = true } +light-compressed-account = { workspace = true , features = ["new-unique"]} diff --git a/sdk-libs/sdk/src/account.rs b/sdk-libs/sdk/src/account.rs index 78fa544263..29da44c72c 100644 --- a/sdk-libs/sdk/src/account.rs +++ b/sdk-libs/sdk/src/account.rs @@ -3,9 +3,9 @@ use std::ops::{Deref, DerefMut}; use light_compressed_account::{ compressed_account::PackedMerkleContext, instruction_data::with_account_info::{CompressedAccountInfo, InAccountInfo, OutAccountInfo}, - pubkey::Pubkey, }; use light_hasher::{DataHasher, Poseidon}; +use solana_pubkey::Pubkey; use crate::{ error::LightSdkError, instruction::account_meta::CompressedAccountMetaTrait, AnchorDeserialize, diff --git a/sdk-libs/sdk/src/account_info.rs b/sdk-libs/sdk/src/account_info.rs index 5f54030bbf..6687cdffc8 100644 --- a/sdk-libs/sdk/src/account_info.rs +++ b/sdk-libs/sdk/src/account_info.rs @@ -193,7 +193,7 @@ impl AccountInfoTrait for CompressedAccountInfo { }); Ok(Some(PackedCompressedAccountWithMerkleContext { compressed_account: CompressedAccount { - owner, + owner: owner.to_bytes().into(), lamports: input.lamports, address: self.address, data, @@ -220,7 +220,7 @@ impl AccountInfoTrait for CompressedAccountInfo { }); Ok(Some(OutputCompressedAccountWithPackedContext { compressed_account: CompressedAccount { - owner, + owner: owner.to_bytes().into(), lamports: output.lamports, address: self.address, data, diff --git a/sdk-libs/sdk/src/address.rs b/sdk-libs/sdk/src/address.rs index 93e657da71..3adb5043f7 100644 --- a/sdk-libs/sdk/src/address.rs +++ b/sdk-libs/sdk/src/address.rs @@ -20,9 +20,9 @@ pub fn pack_new_addresses_params( .iter() .map(|x| { let address_queue_account_index = - remaining_accounts.insert_or_get(x.address_queue_pubkey); + remaining_accounts.insert_or_get(x.address_queue_pubkey.to_bytes().into()); let address_merkle_tree_account_index = - remaining_accounts.insert_or_get(x.address_merkle_tree_pubkey); + remaining_accounts.insert_or_get(x.address_merkle_tree_pubkey.to_bytes().into()); PackedNewAddressParams { seed: x.seed, address_queue_account_index, @@ -50,8 +50,8 @@ pub fn unpack_new_address_params( remaining_accounts[address_params.address_queue_account_index as usize].key; NewAddressParams { seed: address_params.seed, - address_queue_pubkey: *address_queue_pubkey, - address_merkle_tree_pubkey: *address_merkle_tree_pubkey, + address_queue_pubkey: address_queue_pubkey.to_bytes().into(), + address_merkle_tree_pubkey: address_merkle_tree_pubkey.to_bytes().into(), address_merkle_tree_root_index: address_params.address_merkle_tree_root_index, } } diff --git a/sdk-libs/sdk/src/instruction/tree_info.rs b/sdk-libs/sdk/src/instruction/tree_info.rs index b6ccab26b3..59fbbb378d 100644 --- a/sdk-libs/sdk/src/instruction/tree_info.rs +++ b/sdk-libs/sdk/src/instruction/tree_info.rs @@ -61,8 +61,9 @@ pub fn pack_merkle_context( prove_by_index, .. } = merkle_context; - let merkle_tree_pubkey_index = remaining_accounts.insert_or_get(*merkle_tree_pubkey); - let queue_pubkey_index = remaining_accounts.insert_or_get(*queue_pubkey); + let merkle_tree_pubkey_index = + remaining_accounts.insert_or_get(merkle_tree_pubkey.to_bytes().into()); + let queue_pubkey_index = remaining_accounts.insert_or_get(queue_pubkey.to_bytes().into()); PackedMerkleContext { merkle_tree_pubkey_index, @@ -136,6 +137,8 @@ pub fn unpack_address_tree_info( #[cfg(test)] mod test { + use light_compressed_account::Pubkey; + use super::*; #[test] @@ -168,7 +171,7 @@ mod test { #[test] fn test_pack_merkle_contexts() { let mut remaining_accounts = PackedAccounts::default(); - + use light_compressed_account::Pubkey; let merkle_contexts = &[ MerkleContext { merkle_tree_pubkey: Pubkey::new_unique(), @@ -223,6 +226,7 @@ mod test { #[test] fn test_pack_address_tree_info() { + use solana_pubkey::Pubkey; let mut remaining_accounts = PackedAccounts::default(); let address_tree_info = AddressTreeInfo { @@ -245,7 +249,7 @@ mod test { #[test] fn test_pack_address_tree_infos() { let mut remaining_accounts = PackedAccounts::default(); - + use solana_pubkey::Pubkey; let address_tree_infos = [ AddressTreeInfo { address_merkle_tree_pubkey: Pubkey::new_unique(), diff --git a/sdk-libs/sdk/src/lib.rs b/sdk-libs/sdk/src/lib.rs index ac893d4f37..3cd22cd8f4 100644 --- a/sdk-libs/sdk/src/lib.rs +++ b/sdk-libs/sdk/src/lib.rs @@ -16,53 +16,15 @@ use anchor_lang::{AnchorDeserialize, AnchorSerialize}; #[cfg(not(feature = "anchor"))] use borsh::{BorshDeserialize as AnchorDeserialize, BorshSerialize as AnchorSerialize}; pub use light_account_checks::{discriminator::Discriminator as LightDiscriminator, *}; -pub use light_compressed_account::{self, instruction_data::data::*}; +pub use light_compressed_account::{ + self, + instruction_data::{compressed_proof::ValidityProof, data::*}, +}; pub use light_hasher::*; pub use light_sdk_macros::*; -pub use light_verifier as verifier; -use light_verifier::CompressedProof; use solana_account_info::AccountInfo; use solana_cpi::invoke_signed; use solana_instruction::{AccountMeta, Instruction}; use solana_msg::msg; use solana_program_error::ProgramError; use solana_pubkey::{pubkey, Pubkey}; - -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, AnchorDeserialize, AnchorSerialize)] -pub struct ValidityProof(pub Option); - -impl ValidityProof { - pub fn new(proof: Option) -> Self { - Self(proof) - } -} - -impl From for ValidityProof { - fn from(proof: CompressedProof) -> Self { - Self(Some(proof)) - } -} - -impl From> for ValidityProof { - fn from(proof: Option) -> Self { - Self(proof) - } -} -impl From<&CompressedProof> for ValidityProof { - fn from(proof: &CompressedProof) -> Self { - Self(Some(*proof)) - } -} - -impl From<&Option> for ValidityProof { - fn from(proof: &Option) -> Self { - Self(*proof) - } -} - -#[allow(clippy::from_over_into)] -impl Into> for ValidityProof { - fn into(self) -> Option { - self.0 - } -}