From d2422350655db5decad052644f339ec2fcb2b321 Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Mon, 24 Nov 2025 00:11:24 +0000 Subject: [PATCH 01/19] Add client library guide and update landing page AI section - Add client-library/client-guide.mdx with TypeScript/Rust SDK overview - Update landing.mdx: reorder AI tools table, specify 128 byte proof size Co-Authored-By: Claude --- client-library/client-guide.mdx | 1486 +++++++++++++++++++++++++++++++ intro-pages/landing.mdx | 20 +- 2 files changed, 1495 insertions(+), 11 deletions(-) create mode 100644 client-library/client-guide.mdx diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx new file mode 100644 index 00000000..b4c9b9f0 --- /dev/null +++ b/client-library/client-guide.mdx @@ -0,0 +1,1486 @@ +--- +title: Client Guide +description: >- + Overview to Rust and Typescript client guides. Guides include step-by-step + implementation and full code examples. +--- + +ZK Compression provides Rust and Typescript clients to interact with compressed accounts and tokens on Solana. + + + + + + + + + + + + + + + + + + + + + + + + + + +
**TypeScript**[@lightprotocol/stateless.js](https://lightprotocol.github.io/light-protocol/stateless.js/index.html)Client SDK for Compressed Accounts
**TypeScript**[@lightprotocol/compressed-token](https://lightprotocol.github.io/light-protocol/compressed-token/index.html)Client SDK for Compressed Tokens
**Rust**[light-client](https://docs.rs/light-client)Client SDK for Compressed Accounts and Tokens
+ + + +## Setup + + + + + +Use the [API documentation](https://lightprotocol.github.io/light-protocol/) to look up specific function signatures, parameters, and return types. + + +### 1. Installation + + + + +```bash +npm install --save \ + @lightprotocol/stateless.js@0.22.1-alpha.1 \ + @lightprotocol/compressed-token@0.22.1-alpha.1 \ + @solana/web3.js +``` + + + + + +```bash +yarn add \ + @lightprotocol/stateless.js@0.22.1-alpha.1 \ + @lightprotocol/compressed-token@0.22.1-alpha.1 \ + @solana/web3.js +``` + + + + + +```bash +pnpm add \ + @lightprotocol/stateless.js@0.22.1-alpha.1 \ + @lightprotocol/compressed-token@0.22.1-alpha.1 \ + @solana/web3.js +``` + + + + +### 2. RPC Connection + +`Rpc` is a thin wrapper extending Solana's web3.js `Connection` class with compression-related endpoints. + + + + +```typescript +const rpc = createRpc('https://mainnet.helius-rpc.com/?api-key=YOUR_API_KEY'); +``` + + + + + +```typescript +const rpc = createRpc('https://devnet.helius-rpc.com/?api-key=YOUR_API_KEY'); +``` + + + + +1. Install the CLI + +```bash +npm install -g @lightprotocol/zk-compression-cli +``` + +2. Start a local Solana test validator, photon indexer, and prover server on default ports 8899, 8784, and 3001. + +```bash +light test-validator +``` + + + + + + +### 1. Dependencies + +```toml +[dependencies] +light-client = "0.16.0" +light-sdk = "0.16.0" +``` + +### 2. RPC Connection + +Connect to an RPC provider that supports ZK Compression, such as Helius and Triton. + + + + +```rust +let config = LightClientConfig::new( + "https://api.mainnet-beta.solana.com".to_string(), + Some("https://mainnet.helius.xyz".to_string()), + Some("YOUR_API_KEY".to_string()) +); + +let mut client = LightClient::new(config).await?; + +client.payer = read_keypair_file("~/.config/solana/id.json")?; +``` + + + + + +```rust +let config = LightClientConfig::devnet( + Some("https://devnet.helius-rpc.com".to_string()), + Some("YOUR_API_KEY".to_string()) +); + +let mut client = LightClient::new(config).await?; + +client.payer = read_keypair_file("~/.config/solana/id.json")?; +``` + + + + + +```rust +let config = LightClientConfig::local(); + +let mut client = LightClient::new(config).await?; + +client.payer = read_keypair_file("~/.config/solana/id.json")?; +``` + +1. Install the CLI + +```bash +npm install -g @lightprotocol/zk-compression-cli +``` + +2. Start a single-node Solana cluster, an RPC node, and a prover node at ports 8899, 8784, and 3001. + +```bash +light test-validator +``` + + + + + + + + +## Address + + +You only need to derive an address when you create a compressed account. + + + + +Derive a persistent address as a unique identifier for your compressed account, similar to [program-derived addresses (PDAs)](https://solana.com/docs/core/pda). + +* Like PDAs, compressed account addresses don't belong to a private key; rather, they're derived from the program that owns them. +* The key difference to PDAs is that compressed accounts require an **address tree** parameter. +* An address tree is a Merkle tree that stores the compressed account addresses. + + +The protocol maintains Merkle trees. You don't need to initialize custom trees. Find the [pubkeys for Merkle trees here](https://www.zkcompression.com/resources/addresses-and-urls). + + + + + + + +```typescript +const addressTree = getDefaultAddressTreeInfo(); +const seed = deriveAddressSeed( + [Buffer.from('my-seed')], + programId +); +const address = deriveAddress( + seed, + addressTree.tree +); +``` + +**1. Derive the seed** + +**2. Then, derive the address**: + +* Pass the derived 32-byte `seed` from the first step +* Specify `addressTree.tree` pubkey to ensure an address is unique to an address tree. Different trees produce different addresses from identical seeds. + + + + +```typescript +const addressTree = await rpc.getAddressTreeInfoV2(); +const seed = deriveAddressSeedV2( + [Buffer.from('my-seed')] +); + +const address = deriveAddressV2( + seed, + addressTree.tree, + programId +); +``` + +**1. Derive the seed** + +**2. Then, derive the address**: + +* Pass the derived 32-byte `seed` from the first step. +* Specify `addressTree.tree` pubkey to ensure an address is unique to an address tree. Different trees produce different addresses from identical seeds. +* Specify your *`programId` in this step.* + + + + + + + + +```rust +use light_sdk::address::v1::derive_address; + +let address_tree_info = rpc.get_address_tree_v1(); +let (address, _) = derive_address( + &[b"my-seed"], + &address_tree_info.tree, + &program_id, +); +``` + + + + + +```rust +use light_sdk::address::v2::derive_address; + +let address_tree_info = rpc.get_address_tree_v2(); +let (address, _) = derive_address( + &[b"my-seed"], + &address_tree_info.tree, + &program_id, +); +``` + + + + +**Pass these parameters**: + +* `&[b"my-seed"]`: Predefined inputs, such as strings, numbers or other account addresses. +* `&address_tree_info.tree`: Specify the tree pubkey to ensure an address is unique to this address tree. Different trees produce different addresses from identical seeds. +* `&program_id`: Specify your program ID. + + + + +**Use the same address** tree for all subsequent instructions **in client and program**. + + + + +Use the address from the existing compressed account. + +Fetch the account with the RPC methods to get its address. + + + + +```typescript +const compressedAccount = await rpc.getCompressedAccount(address); +const accountAddress = compressedAccount.address; +``` + + + + + +```rust +let compressed_account = rpc.get_compressed_account(address).await?; +let account_address = compressed_account.address; +``` + + + + + + + + + +## Validity Proof + +Transactions with compressed accounts must include a validity proof: +* To **create** a compressed account, you prove the **address doesn't already exist** in the address tree. +* In **other instructions**, you **prove its account hash exists** in a state tree. +* You can **combine multiple addresses and hashes in one proof** to optimize compute cost and instruction data. + + +You fetch a validity proof from your RPC provider that supports ZK Compression (Helius, Triton, ...). + + + + + + + +```typescript +const proof = await rpc.getValidityProofV0( + [], + [{ + address: bn(address.toBytes()), + tree: addressTree.tree, + queue: addressTree.queue + }] +); +``` + +**1. Pass these parameters**: + +* **Specify the derived address**, `tree` and `queue` pubkeys from the address tree `TreeInfo`. +* When you create an account you don't reference a compressed account hash in the hash array (`[]`). + +For account creation, you prove the address does not exist yet in the address tree. + +**2. The RPC returns**: + +* `compressedProof` with the proof that the address does not exist in the address tree for your instruction data (Step 5) +* `rootIndices` array with root index from the validity proof for the address tree. + + + + +```typescript +const proof = await rpc.getValidityProofV0( + [{ + hash: compressedAccount.hash, + tree: compressedAccount.treeInfo.tree, + queue: compressedAccount.treeInfo.queue + }], + [] +); +``` + +**1. Pass these parameters**: + +Specify the **account hash**, `tree` and `queue` pubkeys from the compressed account's `TreeInfo`. + + +* You don't specify the address for update, close, reinitialize, and burn instructions. +* The proof **verifies the account hash exists in the state tree** for these instructions. +* The validity proof structure is identical. The difference is in your program's instruction handler. + + +**2. The RPC returns**: + +* `compressedProof` with the proof that the account hash exists in the state tree for your instruction data (Step 5) +* `rootIndices` and `leafIndices` arrays with proof metadata to pack accounts (Step 4). + + + + + + + + +```rust +let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address: *address, + tree: address_tree_info.tree + }], + None, + ) + .await? + .value; +``` + + +**1. Pass these parameters**: +* **Specify the derived address** and `tree` pubkey from the address tree `TreeInfo`. The `queue` pubkey is only required in TypeScript. +* When you create an account you don't reference a compressed account hash in the hash array (`vec![]`). + +For account creation, you prove the address does not exist yet in the address tree. + + +**2. The RPC returns `ValidityProofWithContext`**: + +* `proof` with the proof that the address does not exist in the address tree for your instruction data (Step 5) +* `addresses` with the public key and metadata of the address tree to pack accounts (Step 4). + + + + +```rust +let rpc_result = rpc + .get_validity_proof( + vec![compressed_account.hash], + vec![], + None, + ) + .await? + .value; +``` + +**1. Pass these parameters**: + +Specify the **account hash**, `tree` and `queue` pubkeys from the compressed account's `TreeInfo`. + +* You don't specify the address for update, close, reinitialize, and burn instructions. +* The proof **verifies the account hash exists in the state tree** for these instructions. +* The validity proof structure is identical. The difference is in your program's instruction handler. + +**2. The RPC returns `ValidityProofWithContext`**: + +* `proof` with the proof that the **account hash exists in the state tree** for your instruction data (Step 5) +* `accounts` with the **public key and metadata of the state tree** to pack accounts (Step 4). + + + + + +### Optimize with Combined Proofs + +A single proof can contain: +* multiple addresses, +* multiple account hashes, or +* a combination of addresses and account hashes + + +**Advantages of combined proofs**: +* You only add **one 128 byte validity proof** to your instruction data **for multiple instructions**. +* This can **optimize** your **transaction's size** to stay inside the 1232 byte limit. +* **Compute unit consumption is reduced by at least 100k CU**, since combined proofs are verified in a single CPI by the Light System Program. + + +Depending on the **Merkle tree version** (V1 or V2) you are using, you can prove the following **in a single proof**: + + + +| | | +| ----------------------- | --------------------------------------------------- | +| Account Hash-only (bulk) | 1, 2, 3, 4, or 8 hashes | +| Address-only (bulk) | 1, 2, 4, or 8 addresses | +| Mixed (hash + address) | Any combination of
**1, 2, 3, 4, or 8** account hashes **and**
**1, 2, 4, or 8** new addresses | +
+ + + +| | | +| ----------------------- | --------------------------------------------------- | +| Account Hash-only (bulk) | 1 to 20 hashes | +| Address-only (bulk) | 1 to 32 addresses | +| Mixed (hash + address) | Any combination of
**1 to 4** account hashes **and**
**1 to 4** new addresses | +
+
+ + +View the [source code for the proof combinations here](https://github.com/Lightprotocol/light-protocol/tree/871215642b4b5b69d2bcd7eca22542346d0e2cfa/program-libs/verifier/src/verifying_keys). + + +### Example Create Address & Update Account in one Proof + +In this example, we generate one proof that proves that an account exists and that an address does not exist yet. +This updates an account and create the address for a new account. + + + + +```typescript +const proof = await rpc.getValidityProofV0( + [{ + hash: compressedAccount.hash, + tree: compressedAccount.treeInfo.tree, + queue: compressedAccount.treeInfo.queue + }], + [{ + address: bn(address.toBytes()), + tree: addressTree.tree, + queue: addressTree.queue + }] +); +``` + +**1. Pass these parameters**: + +* Specify one or more **account hashes**, `tree` and `queue` pubkeys from the compressed account's `TreeInfo`. +* Specify one or more **derived addresses** with their `tree` and `queue` pubkeys from the address tree `TreeInfo`. + +**2. The RPC returns**: + +* `compressedProof` with a single combined proof that verifies both the **account hash exists in the state tree** and the **address does not exist in the address tree** for your instruction data (Step 5) +* `rootIndices` and `leafIndices` arrays with proof metadata to pack accounts (Step 4). + + + + +```rust +let rpc_result = rpc + .get_validity_proof( + vec![compressed_account.hash], + vec![AddressWithTree { + address: *address, + tree: address_tree_info.tree + }], + None, + ) + .await? + .value; +``` + +**1. Pass these parameters**: + +* Specify one or more **compressed account hashes**. +* Specify one or more **derived addresses** with their `tree` pubkeys from the address tree `TreeInfo`. The `queue` pubkey is only required in TypeScript. + +**2. The RPC returns `ValidityProofWithContext`**: + +* `proof` with a single combined proof that verifies both the **account hash exists in the state tree** and the **address does not exist in the address tree** for your instruction data (Step 5) +* `addresses` with the public key and metadata of the address tree to pack accounts (Step 4). +* `accounts` with the public key and metadata of the state tree to pack accounts (Step 4). + + + + +See the full [create-and-update program example for this proof combination with tests](https://github.com/Lightprotocol/program-examples/tree/main/create-and-update). + +
+ + +## Accounts + +Transactions with compressed accounts reference **custom accounts and `PackedAccounts`** in their instruction data in the accounts array: + +``` + PackedAccounts + ┌----------------------------------------------┐ + [custom accounts] [pre accounts] [system accounts] [tree accounts] + ↑ ↑ ↑ + Signers, Light System State trees, + fee payer accounts address trees, +``` + +The `PackedAccounts` helper allows you to **reference accounts by u8 indices instead of 32-byte pubkeys**. + + +Program-specific (custom) accounts can be added to `PackedAccounts` using e.g. `add_pre_accounts_signer` or `add_pre_accounts_meta`. +These are typically accounts that need to be at a specific, known position in the instruction's account list. + + +**Light System accounts** are 8 required accounts for proof verification and CPI calls to update state and address trees. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
#AccountDescription
1Light System ProgramVerifies validity proofs, compressed account ownership checks, CPIs the account compression program to update tree accounts
2CPI SignerPDA to sign CPI calls from your program to Light System Program. Verified by Light System Program during CPI. Derived from your program ID
3Registered Program PDAAccess control to the Account Compression Program
4Noop ProgramLogs compressed account state to Solana ledger. Only used in v1. Indexers parse transaction logs to reconstruct compressed account state
5Account Compression AuthoritySigns CPI calls from Light System Program to Account Compression Program
6Account Compression ProgramWrites to state and address tree accounts. Client and the account compression program do not interact directly
7Invoking ProgramYour program's ID, used by Light System Program to derive the CPI Signer PDA, verify the CPI Signer matches your program ID, and set the owner of created compressed accounts
8System ProgramSolana System Program to transfer lamports
+ +
+ + + +**Merkle tree accounts** are state trees, address trees, and their associated queues to store compressed accounts and addresses. + +Depending on your instruction you must include indices for different tree and queue accounts. +* For other instructions than create, **use the state tree of the existing compressed account** as output state tree. +* The pubkey is automatically deduplicated when you pack accounts. + + +When creating or updating multiple accounts in a single transaction, use one output state tree. + + + +V2 is on Devnet and reduces compute unit consumption by up to 70%. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
InstructionAddress TreeState TreeNullifier QueueOutput State Tree
Create--
Update / Close / Reinit-
Burn--
+ +* **Address tree**: only used to derive and store a new address. +* **State tree**: used to reference the existing compressed account hash. Therefore not used by create. +* **Nullifier queue**: used to nullify the existing compressed account hash to prevent double spending. Therefore not used by create. +* **Output State tree**: used to store the new or updated compressed account hash. Burn does not produce output state. + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
InstructionAddress TreeState Tree (includes nullifier queue)Output Queue
Create-
Update / Close / Reinit-
Burn--
+ +* **Address tree**: only used to derive and store a new address. +* **State tree**: used to reference the existing compressed account hash. Therefore not used by create. V2 combines the state tree and nullifier queue into a single account. +* **Output State tree**: used to store the new or updated compressed account hash. Burn does not produce output state. +* **Output Queue**: used to store compressed account hashes. A forester node updates the Merkle tree asynchronously. + +
+
+ +
+ + +In your instruction, +1. Put your program-specific accounts first +2. Append Packed accounts to end of the vector (recommended due to variable length) + + + + + + + +```typescript +// 1. Initialize helper +const packedAccounts + = new PackedAccounts(); + +// 2. Add light system accounts +const systemAccountConfig + = SystemAccountMetaConfig.new(programId); +packedAccounts.addSystemAccounts(systemAccountConfig); + +// 3. Get indices for tree accounts +const addressMerkleTreePubkeyIndex + = packedAccounts.insertOrGet(addressTree); +const addressQueuePubkeyIndex + = packedAccounts.insertOrGet(addressQueue); + +const packedAddressTreeInfo = { + rootIndex: proofRpcResult.rootIndices[0], + addressMerkleTreePubkeyIndex, + addressQueuePubkeyIndex, +}; + +// 4. Get index for output state tree +const stateTreeInfos = await rpc.getStateTreeInfos(); +const outputStateTree = selectStateTreeInfo(stateTreeInfos).tree; +const outputStateTreeIndex + = packedAccounts.insertOrGet(outputStateTree); + +// 5. Convert to Account Metas +const { remainingAccounts } + = packedAccounts.toAccountMetas(); +``` + + + +```typescript +// 1. Initialize helper +const packedAccounts + = new PackedAccounts(); + +// 2. Add system accounts +const systemAccountConfig + = SystemAccountMetaConfig.new(programId); +packedAccounts.addSystemAccounts(systemAccountConfig); + +// 3. Get indices for tree accounts +const merkleTreePubkeyIndex + = packedAccounts.insertOrGet(compressedAccount.treeInfo.tree); +const queuePubkeyIndex + = packedAccounts.insertOrGet(compressedAccount.treeInfo.queue); + +const packedInputAccounts = { + merkleTreePubkeyIndex, + queuePubkeyIndex, + leafIndex: proofRpcResult.leafIndices[0], + rootIndex: proofRpcResult.rootIndices[0], +}; + +const outputStateTreeIndex + = packedAccounts.insertOrGet(outputStateTree); + +// 4. Convert to Account Metas +const { remainingAccounts } + = packedAccounts.toAccountMetas(); +``` + + + + +* Create uses address tree and address queue from the new address derivation +* Update/Close/Reinit/Burn use state tree and nullifier queue from the existing compressed account's TreeInfo +* Create derives a new address (no existing account) +* Update/Close/Reinit/Burn reference the existing compressed account + + + + + + + + +```rust +// 1. Initialize helper +let mut remaining_accounts = PackedAccounts::default(); + +// 2. Add system accounts +let config + = SystemAccountMetaConfig::new(program_id); + remaining_accounts.add_system_accounts(config)?; + +// 3. Get indices for tree accounts +let packed_accounts + = rpc_result.pack_tree_infos(&mut remaining_accounts); + +// 4. Get index for output state tree +let output_state_tree_info = rpc.get_random_state_tree_info()?; +let output_state_tree_index + = output_state_tree_info.pack_output_tree_index(&mut remaining_accounts)?; + +// 5. Convert to Account Metas +let (remaining_accounts_metas, _, _) + = remaining_accounts.to_account_metas(); +``` + + + + +```rust +// 1. Initialize helper +let mut remaining_accounts = PackedAccounts::default(); + +// 2. Add system accounts +let config + = SystemAccountMetaConfig::new(program_id); + remaining_accounts.add_system_accounts(config)?; + +// 3. Get indices for tree accounts +let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees // includes output_state_tree_index + .unwrap(); + +// 4. Convert to Account Metas +let (remaining_accounts_metas, _, _) + = remaining_accounts.to_account_metas(); +``` + + + + +* Create packs address tree for the new address, then adds output state tree separately +* Update/Close/Reinit/Burn pack state tree that includes output tree automatically + + + + + +
+ + +## Instruction Data + +Build your instruction data with the validity proof, tree account indices, and account data. + + + + + + +```typescript +const proof = { + 0: proofRpcResult.compressedProof, +}; + +const instructionData = { + proof, + addressTreeInfo: packedAddressTreeInfo, + outputStateTreeIndex: outputStateTreeIndex, + message, +}; +``` + +1. Include `compressedProof` from Step 3 to **prove the address does not exist** in the address tree +2. Specify **Merkle trees to store address and account hash** from Step 4 where you packed accounts. +3. Pass **initial account data** + + + + +```typescript +const proof = { + 0: proofRpcResult.compressedProof, +}; + +const instructionData = { + proof, + accountMeta: { + treeInfo: packedStateTreeInfo, + address: compressedAccount.address, + outputStateTreeIndex: outputStateTreeIndex + }, + currentMessage: currentAccount.message, + newMessage, +}; +``` + +1. Include `compressedProof` from **Step 3** to prove the **account hash exists** in the state tree +2. Specify the existing accounts address, its `packedStateTreeInfo` and the output state tree to store the updated compressed account hash. +3. Pass **current account data** and **new data** + +Use the state tree of the existing compressed account as output state tree. + + + + + +```typescript +const proof = { + 0: proofRpcResult.compressedProof, +}; + +const instructionData = { + proof, + accountMeta: { + treeInfo: packedStateTreeInfo, + address: compressedAccount.address, + outputStateTreeIndex: outputStateTreeIndex + }, + currentMessage: currentAccount.message, +}; +``` + +1. Include `compressedProof` from **Step 3** to prove the **account hash exists** in the state tree +2. Specify the existing accounts address, its `packedStateTreeInfo` and the output state tree to store the **hash with zero values** for the closed account. +3. Pass **current account data** + +Use the state tree of the existing compressed account as output state tree. + + + + + +```typescript +const proof = { + 0: proofRpcResult.compressedProof, +}; + +const instructionData = { + proof, + accountMeta: { + treeInfo: packedStateTreeInfo, + address: compressedAccount.address, + outputStateTreeIndex: outputStateTreeIndex + }, +}; +``` + +1. Include `compressedProof` from **Step 3** to prove the **account hash exists** in the state tree +2. Specify the existing accounts address, its `packedStateTreeInfo` and the output state tree that will store the reinitialized account hash +3. Reinitialize creates an account with **default-initialized values** +* These values are `Pubkey` as all zeros, numbers as `0`, strings as empty. +* To set custom values, update the account in the same or a separate transaction. + +Use the state tree of the existing compressed account as output state tree. + + + + + +```typescript +const proof = { + 0: proofRpcResult.compressedProof, +}; + +const instructionData = { + proof, + accountMeta: { + treeInfo: packedStateTreeInfo, + address: compressedAccount.address + }, + currentMessage: currentAccount.message, +}; +``` + +1. Include `compressedProof` from **Step 3** to prove the **account hash exists** in the state tree +2. Specify the existing accounts address and its `packedStateTreeInfo`. You don't need to specify the output state tree, since burn permanently removes the account. +3. Pass **current account data** + + + + + + + + +```rust +let instruction_data = create::instruction::CreateAccount { + proof: rpc_result.proof, + address_tree_info: packed_accounts.address_trees[0], + output_state_tree_index: output_state_tree_index, + message, +} +.data(); +``` + +1. Include `proof` from Step 3 to prove the **address does not exist** in the address tree +2. Specify **address tree and output state tree** from Step 4 where you packed accounts +3. Pass **initial account data** + + + + +```rust +let instruction_data = update::instruction::UpdateAccount { + proof: rpc_result.proof, + current_account, + account_meta: CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }, + new_message, +} +.data(); +``` + + +Use the state tree of the existing compressed account as output state tree. + + +1. Include `proof` from Step 3 to prove the **account hash exists** in the state tree +2. Specify the existing accounts address, its `packed_tree_infos` and the output state tree to store the updated compressed account hash +3. Pass **current account data** and **new data** + + + + + +```rust +let instruction_data = close::instruction::CloseAccount { + proof: rpc_result.proof, + account_meta: CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }, + current_message, +} +.data(); +``` + + +Use the state tree of the existing compressed account as output state tree. + + +1. Include `proof` from Step 3 to prove the **account hash exists** in the state tree +2. Specify the existing accounts address, its `packed_tree_infos` and the output state tree to store the **hash with zero values** for the closed account +3. Pass **current account data** + + + + + +```rust +let instruction_data = reinit::instruction::ReinitAccount { + proof: rpc_result.proof, + account_meta: CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }, +} +.data(); +``` + + +Use the state tree of the existing compressed account as output state tree. + + +1. Include `proof` from Step 3 to prove the **account hash exists** in the state tree +2. Specify the existing accounts address, its `packed_tree_infos` and the output state tree that will store the reinitialized account hash +3. Reinitialize creates an account with **default-initialized values** +* These values are `Pubkey` as all zeros, numbers as `0`, strings as empty. +* To set custom values, update the account in the same or a separate transaction. + + + + +```rust +let instruction_data = burn::instruction::BurnAccount { + proof: rpc_result.proof, + account_meta: CompressedAccountMetaBurn { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + }, + current_message, +} +.data(); +``` + +1. Include `proof` from Step 3 to prove the **account hash exists** in the state tree +2. Specify the existing accounts address and its `packed_tree_infos`. You don't need to specify the output state tree, since burn permanently removes the account +3. Pass **current account data** + + + + + + +* When creating or updating multiple accounts in a single transaction, use one output state tree. +* Minimize the number of different trees per transaction to keep instruction data light. + + + + + +## Instruction + +Build the instruction with your `program_id`, `accounts`, and `data` from Step 5. + + +In `accounts`, +1. Put your program-specific accounts first +2. Append Packed accounts to end of the vector (recommended due to variable length) + + +```rust +let instruction = Instruction { + program_id: program_id, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: instruction_data, +}; +``` + + + + +## Send Transaction + + +
+ +# Full Code Examples + + + +```typescript expandable wrap +// create.ts +import * as anchor from "@coral-xyz/anchor"; +import { Program, web3 } from "@coral-xyz/anchor"; +import { Create } from "../target/types/create"; +import idl from "../target/idl/create.json"; +import { + bn, + CompressedAccountWithMerkleContext, + confirmTx, + createRpc, + defaultStaticAccountsStruct, + defaultTestStateTreeAccounts, + deriveAddress, + deriveAddressSeed, + LightSystemProgram, + PackedAccounts, + Rpc, + sleep, + SystemAccountMetaConfig, +} from "@lightprotocol/stateless.js"; +import * as assert from "assert"; + +const path = require("path"); +const os = require("os"); +require("dotenv").config(); + +const anchorWalletPath = path.join(os.homedir(), ".config/solana/id.json"); +process.env.ANCHOR_WALLET = anchorWalletPath; + +describe("test-anchor", () => { + const program = anchor.workspace.Create as Program; + const coder = new anchor.BorshCoder(idl as anchor.Idl); + + it("create compressed account", async () => { + let signer = new web3.Keypair(); + let rpc = createRpc( + "http://127.0.0.1:8899", + "http://127.0.0.1:8784", + "http://127.0.0.1:3001", + { + commitment: "confirmed", + }, + ); + let lamports = web3.LAMPORTS_PER_SOL; + await rpc.requestAirdrop(signer.publicKey, lamports); + await sleep(2000); + + const outputStateTree = defaultTestStateTreeAccounts().merkleTree; + const addressTree = defaultTestStateTreeAccounts().addressTree; + const addressQueue = defaultTestStateTreeAccounts().addressQueue; + + const messageSeed = new TextEncoder().encode("message"); + const seed = deriveAddressSeed( + [messageSeed, signer.publicKey.toBytes()], + new web3.PublicKey(program.idl.address), + ); + const address = deriveAddress(seed, addressTree); + + // Create compressed account with message + const txId = await createCompressedAccount( + rpc, + addressTree, + addressQueue, + address, + program, + outputStateTree, + signer, + "Hello, compressed world!", + ); + console.log("Transaction ID:", txId); + + // Wait for indexer to process the transaction + const slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + let compressedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + let myAccount = coder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + + console.log("Decoded data owner:", myAccount.owner.toBase58()); + console.log("Decoded data message:", myAccount.message); + + // Verify account data + assert.ok( + myAccount.owner.equals(signer.publicKey), + "Owner should match signer public key" + ); + assert.strictEqual( + myAccount.message, + "Hello, compressed world!", + "Message should match the created message" + ); + }); +}); + +async function createCompressedAccount( + rpc: Rpc, + addressTree: anchor.web3.PublicKey, + addressQueue: anchor.web3.PublicKey, + address: anchor.web3.PublicKey, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, + message: string, +) { + const proofRpcResult = await rpc.getValidityProofV0( + [], + [ + { + tree: addressTree, + queue: addressQueue, + address: bn(address.toBytes()), + }, + ], + ); + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const addressMerkleTreePubkeyIndex = + remainingAccounts.insertOrGet(addressTree); + const addressQueuePubkeyIndex = remainingAccounts.insertOrGet(addressQueue); + const packedAddressTreeInfo = { + rootIndex: proofRpcResult.rootIndices[0], + addressMerkleTreePubkeyIndex, + addressQueuePubkeyIndex, + }; + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .createAccount(proof, packedAddressTreeInfo, outputStateTreeIndex, message) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} +``` + + + +```rust expandable wrap +// test.rs +#![cfg(feature = "test-sbf")] + +use anchor_lang::AnchorDeserialize; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::{ + address::v1::derive_address, + instruction::{PackedAccounts, SystemAccountMetaConfig}, +}; +use create::MyCompressedAccount; +use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signature, Signer}, +}; + +#[tokio::test] +async fn test_create() { + let config = ProgramTestConfig::new(true, Some(vec![("create", create::ID)])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + + let (address, _) = derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_info.tree, + &create::ID, + ); + + create_compressed_account(&mut rpc, &payer, &address, "Hello, compressed world!".to_string()) + .await + .unwrap(); + + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + let data = &compressed_account.data.as_ref().unwrap().data; + let account = MyCompressedAccount::deserialize(&mut &data[..]).unwrap(); + assert_eq!(account.owner, payer.pubkey()); + assert_eq!(account.message, "Hello, compressed world!"); +} + +async fn create_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + address: &[u8; 32], + message: String, +) -> Result { + let config = SystemAccountMetaConfig::new(create::ID); + let mut remaining_accounts = PackedAccounts::default(); + remaining_accounts.add_system_accounts(config)?; + + let address_tree_info = rpc.get_address_tree_v1(); + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + let packed_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: create::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + create::instruction::CreateAccount { + proof: rpc_result.proof, + address_tree_info: packed_accounts.address_trees[0], + output_state_tree_index: output_state_tree_index, + message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` + + + +Find all [full code examples with Rust and Typescript tests here](https://github.com/Lightprotocol/program-examples/tree/add-basic-operations-examples/basic-operations/anchor) for the following instructions: +- **create** - Initialize a new compressed account +- **update** - Modify data of an existing compressed account +- **close** - Close a compressed account (it can be initialized again). +- **reinit** - Reinitialize a closed account +- **burn** - Permanently delete a compressed account (it cannot be initialized again). + + +For help with debugging, see the [Error Cheatsheet](https://www.zkcompression.com/resources/error-cheatsheet) and [AskDevin](https://deepwiki.com/Lightprotocol/light-protocol/3.1-javascripttypescript-sdks). + + +# Next Steps + + + diff --git a/intro-pages/landing.mdx b/intro-pages/landing.mdx index 2d475977..d06b93e0 100644 --- a/intro-pages/landing.mdx +++ b/intro-pages/landing.mdx @@ -68,30 +68,28 @@ ZK Compression is a framework that reduces the storage cost of Solana accounts b * This hash allows transactions to use the account data inside Solana's virtual machine as if it were stored on-chain. - * The protocol uses small zero-knowledge proofs (validity proofs) to verify the integrity of the compressed accounts. + * The protocol uses 128 byte zero-knowledge proofs (validity proofs) to verify the integrity of the compressed accounts. * By default, this is all done under the hood. You can fetch validity proofs from RPC providers that support ZK Compression. ### Using AI to work with ZK Compression -Integrate ZK Compression in your existing AI workflow by following the steps below. - -| Tool | Description | Link | -|:---------------------|:------------------------------------------------------------------------------|:--------------------------------------| -| DeepWiki/AskDevin | Query the Light Protocol codebase and documentation in natural language | Ask DeepWiki | -| MCP | Connect AI tools to the Light Protocol repository via Model Context Protocol | [Setup Guide](https://www.zkcompression.com/references/ai-tools-guide#mcp) | -| Docs AI Search | Search documentation with AI in the search bar. | Available throughout the documentation | - -**AI powered navigation**: Use AI search to quickly find information, get code examples, and learn complex topics. Available throughout our documentation. +**Look up documentation, code examples and guides using the Docs' AI search.** - +Integrate ZK Compression in your development: +| Tool | Description | Link | +|:---------------------|:------------------------------------------------------------------------------|:--------------------------------------| +| MCP | Connect AI tools to the Light Protocol repository via Model Context Protocol | [Setup Guide](https://www.zkcompression.com/references/ai-tools-guide#mcp) | +| DeepWiki/AskDevin | Use AskDevin for advanced AI assistance with your development. | Ask DeepWiki | + + ### Resources Date: Mon, 24 Nov 2025 00:12:17 +0000 Subject: [PATCH 02/19] Update docs.json: add client guide to navigation - Add client-library/client-guide to Compressed PDAs section - Rename "Guides" to "Program Guides" for clarity Co-Authored-By: Claude --- docs.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs.json b/docs.json index 3b508785..488e9223 100644 --- a/docs.json +++ b/docs.json @@ -74,7 +74,7 @@ "pages": [ "compressed-pdas/create-a-program-with-compressed-pdas", { - "group": "Guides", + "group": "Program Guides", "pages": [ "compressed-pdas/guides", "compressed-pdas/guides/how-to-create-compressed-accounts", @@ -84,7 +84,8 @@ "compressed-pdas/guides/how-to-burn-compressed-accounts" ] }, - "compressed-pdas/program-examples" + "compressed-pdas/program-examples", + "client-library/client-guide" ] }, { From 1d7d1d5d703e7afb30e5fc7a5c2200e69f0e75dd Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Mon, 24 Nov 2025 21:35:45 +0000 Subject: [PATCH 03/19] Simplify client guide address section: clarify create-only usage - Update heading to 'Address (Create only)' to clarify scope - Remove redundant 'Update, Close, Reinit, Burn' tab with duplicate fetch examples - Streamline address derivation guidance focused on account creation --- client-library/client-guide.mdx | 29 +---------------------------- mintlify-docs/docs | 2 +- 2 files changed, 2 insertions(+), 29 deletions(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index b4c9b9f0..604d64ae 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -194,7 +194,7 @@ light test-validator -## Address +## Address (Create only) You only need to derive an address when you create a compressed account. @@ -307,33 +307,6 @@ let (address, _) = derive_address( **Use the same address** tree for all subsequent instructions **in client and program**. - - - -Use the address from the existing compressed account. - -Fetch the account with the RPC methods to get its address. - - - - -```typescript -const compressedAccount = await rpc.getCompressedAccount(address); -const accountAddress = compressedAccount.address; -``` - - - - - -```rust -let compressed_account = rpc.get_compressed_account(address).await?; -let account_address = compressed_account.address; -``` - - - - diff --git a/mintlify-docs/docs b/mintlify-docs/docs index 89f3ae60..d872c7a0 160000 --- a/mintlify-docs/docs +++ b/mintlify-docs/docs @@ -1 +1 @@ -Subproject commit 89f3ae6014495a18f8ef8696ababacd867dc7ed1 +Subproject commit d872c7a0de0cfcc70c81292064b8968fb8809c2e From 717acb0bfe1e72e1a6bc3ac13cfec79cdee3c07d Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 17:00:11 +0000 Subject: [PATCH 04/19] Remove docs sync workflow trigger --- .context/trigger-docs-sync.yml | 21 --------------------- 1 file changed, 21 deletions(-) delete mode 100644 .context/trigger-docs-sync.yml diff --git a/.context/trigger-docs-sync.yml b/.context/trigger-docs-sync.yml deleted file mode 100644 index 2fcecddd..00000000 --- a/.context/trigger-docs-sync.yml +++ /dev/null @@ -1,21 +0,0 @@ -# Add this file to the program-examples repo at: -# .github/workflows/trigger-docs-sync.yml - -name: Trigger Docs Sync - -on: - push: - branches: - - main - -jobs: - trigger-sync: - runs-on: ubuntu-latest - steps: - - name: Trigger docs repo sync - run: | - curl -X POST \ - -H "Accept: application/vnd.github.v3+json" \ - -H "Authorization: token ${{ secrets.DOCS_REPO_TOKEN }}" \ - https://api.github.com/repos/Lightprotocol/docs/dispatches \ - -d '{"event_type":"sync-examples"}' From b822d6877175f6483213081d236762a76ddd6356 Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 17:03:28 +0000 Subject: [PATCH 05/19] Add MDX-converted program examples --- .context/program-examples-mdx/README.mdx | 110 +++ .../account-comparison/Anchor-toml.mdx | 26 + .../account-comparison/Cargo-toml.mdx | 21 + .../account-comparison/package-json.mdx | 27 + .../account-comparison/Cargo-toml.mdx | 50 ++ .../account-comparison/Xargo-toml.mdx | 9 + .../account-comparison/src/lib-rs.mdx | 195 ++++++ .../tests/test_compressed_account-rs.mdx | 207 ++++++ .../tests/test_solana_account-rs.mdx | 98 +++ .../account-comparison/tsconfig-json.mdx | 17 + .../basic-operations/anchor/README.mdx | 89 +++ .../anchor/burn/Anchor-toml.mdx | 25 + .../anchor/burn/Cargo-toml.mdx | 22 + .../anchor/burn/package-json.mdx | 30 + .../anchor/burn/programs/burn/Cargo-toml.mdx | 45 ++ .../anchor/burn/programs/burn/Xargo-toml.mdx | 9 + .../anchor/burn/programs/burn/src/lib-rs.mdx | 121 ++++ .../burn/programs/burn/tests/test-rs.mdx | 170 +++++ .../anchor/burn/tests/burn-ts.mdx | 237 +++++++ .../anchor/burn/tsconfig-json.mdx | 24 + .../anchor/close/Anchor-toml.mdx | 25 + .../anchor/close/Cargo-toml.mdx | 22 + .../anchor/close/package-json.mdx | 30 + .../close/programs/close/Cargo-toml.mdx | 45 ++ .../close/programs/close/Xargo-toml.mdx | 9 + .../close/programs/close/src/lib-rs.mdx | 121 ++++ .../close/programs/close/tests/test-rs.mdx | 167 +++++ .../anchor/close/tests/close-ts.mdx | 245 +++++++ .../anchor/close/tsconfig-json.mdx | 24 + .../anchor/create/Anchor-toml.mdx | 26 + .../anchor/create/Cargo-toml.mdx | 22 + .../anchor/create/package-json.mdx | 30 + .../create/programs/create/Cargo-toml.mdx | 46 ++ .../create/programs/create/Xargo-toml.mdx | 9 + .../create/programs/create/src/lib-rs.mdx | 91 +++ .../create/programs/create/tests/test-rs.mdx | 106 +++ .../anchor/create/tests/create-ts.mdx | 159 +++++ .../anchor/create/tsconfig-json.mdx | 24 + .../basic-operations/anchor/package-json.mdx | 19 + .../anchor/reinit/Anchor-toml.mdx | 25 + .../anchor/reinit/Cargo-toml.mdx | 22 + .../anchor/reinit/package-json.mdx | 30 + .../reinit/programs/reinit/Cargo-toml.mdx | 45 ++ .../reinit/programs/reinit/Xargo-toml.mdx | 9 + .../reinit/programs/reinit/src/lib-rs.mdx | 147 ++++ .../reinit/programs/reinit/tests/test-rs.mdx | 231 +++++++ .../anchor/reinit/tests/reinit-ts.mdx | 330 +++++++++ .../anchor/reinit/tsconfig-json.mdx | 24 + .../anchor/update/Anchor-toml.mdx | 25 + .../anchor/update/Cargo-toml.mdx | 22 + .../anchor/update/package-json.mdx | 30 + .../update/programs/update/Cargo-toml.mdx | 45 ++ .../update/programs/update/Xargo-toml.mdx | 9 + .../update/programs/update/src/lib-rs.mdx | 124 ++++ .../update/programs/update/tests/test-rs.mdx | 176 +++++ .../anchor/update/tests/update-ts.mdx | 250 +++++++ .../anchor/update/tsconfig-json.mdx | 24 + .../basic-operations/native/Cargo-toml.mdx | 26 + .../basic-operations/native/README.mdx | 89 +++ .../basic-operations/native/package-json.mdx | 31 + .../native/programs/burn/Cargo-toml.mdx | 47 ++ .../native/programs/burn/src/lib-rs.mdx | 141 ++++ .../programs/burn/src/test_helpers-rs.mdx | 68 ++ .../native/programs/burn/tests/test-rs.mdx | 139 ++++ .../native/programs/close/Cargo-toml.mdx | 47 ++ .../native/programs/close/src/lib-rs.mdx | 144 ++++ .../programs/close/src/test_helpers-rs.mdx | 68 ++ .../native/programs/close/tests/test-rs.mdx | 130 ++++ .../native/programs/create/Cargo-toml.mdx | 45 ++ .../native/programs/create/Xargo-toml.mdx | 9 + .../native/programs/create/src/lib-rs.mdx | 130 ++++ .../programs/create/src/test_helpers-rs.mdx | 68 ++ .../native/programs/create/tests/test-rs.mdx | 116 ++++ .../native/programs/reinit/Cargo-toml.mdx | 47 ++ .../native/programs/reinit/src/lib-rs.mdx | 174 +++++ .../programs/reinit/src/test_helpers-rs.mdx | 124 ++++ .../native/programs/reinit/tests/test-rs.mdx | 148 ++++ .../native/programs/update/Cargo-toml.mdx | 47 ++ .../native/programs/update/Xargo-toml.mdx | 9 + .../native/programs/update/src/lib-rs.mdx | 173 +++++ .../programs/update/src/test_helpers-rs.mdx | 68 ++ .../native/programs/update/tests/test-rs.mdx | 139 ++++ .../basic-operations/native/tsconfig-json.mdx | 20 + .../counter/anchor/Anchor-toml.mdx | 25 + .../counter/anchor/Cargo-toml.mdx | 19 + .../counter/anchor/README.mdx | 73 ++ .../counter/anchor/package-json.mdx | 30 + .../anchor/programs/counter/Cargo-toml.mdx | 43 ++ .../anchor/programs/counter/Xargo-toml.mdx | 9 + .../anchor/programs/counter/src/lib-rs.mdx | 228 +++++++ .../anchor/programs/counter/tests/test-rs.mdx | 422 ++++++++++++ .../counter/anchor/tests/test-ts.mdx | 318 +++++++++ .../counter/anchor/tsconfig-json.mdx | 18 + .../counter/native/Cargo-toml.mdx | 48 ++ .../counter/native/Xargo-toml.mdx | 9 + .../counter/native/src/lib-rs.mdx | 306 +++++++++ .../counter/native/tests/test-rs.mdx | 384 +++++++++++ .../counter/pinocchio/Cargo-toml.mdx | 51 ++ .../counter/pinocchio/Xargo-toml.mdx | 9 + .../counter/pinocchio/src/lib-rs.mdx | 340 ++++++++++ .../counter/pinocchio/tests/test-rs.mdx | 375 ++++++++++ .../create-and-update/Anchor-toml.mdx | 26 + .../create-and-update/Cargo-toml.mdx | 22 + .../create-and-update/README.mdx | 51 ++ .../create-and-update/package-json.mdx | 30 + .../programs/create-and-update/Cargo-toml.mdx | 48 ++ .../programs/create-and-update/Xargo-toml.mdx | 9 + .../programs/create-and-update/src/lib-rs.mdx | 300 ++++++++ .../create-and-update/tests/test-rs.mdx | 396 +++++++++++ .../tests/test_create_two_accounts-rs.mdx | 159 +++++ .../tests/create_and_update-ts.mdx | 289 ++++++++ .../create-and-update/tsconfig-json.mdx | 24 + .../read-only/Cargo-toml.mdx | 33 + .../program-examples-mdx/read-only/README.mdx | 40 ++ .../read-only/Xargo-toml.mdx | 9 + .../read-only/src/lib-rs.mdx | 131 ++++ .../read-only/tests/test-rs.mdx | 180 +++++ .../program-examples-mdx/zk-id/Cargo-toml.mdx | 46 ++ .../program-examples-mdx/zk-id/README.mdx | 107 +++ .../program-examples-mdx/zk-id/Xargo-toml.mdx | 9 + .../program-examples-mdx/zk-id/build-rs.mdx | 39 ++ .../zk-id/circuits/README.mdx | 64 ++ .../zk-id/package-json.mdx | 35 + .../program-examples-mdx/zk-id/src/lib-rs.mdx | 348 ++++++++++ .../zk-id/src/verifying_key-rs.mdx | 32 + .../zk-id/tests/circuit-rs.mdx | 464 +++++++++++++ .../zk-id/tests/test-rs.mdx | 639 ++++++++++++++++++ 127 files changed, 12804 insertions(+) create mode 100644 .context/program-examples-mdx/README.mdx create mode 100644 .context/program-examples-mdx/account-comparison/Anchor-toml.mdx create mode 100644 .context/program-examples-mdx/account-comparison/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/account-comparison/package-json.mdx create mode 100644 .context/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs.mdx create mode 100644 .context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs.mdx create mode 100644 .context/program-examples-mdx/account-comparison/tsconfig-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/README.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/burn/package-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/close/Anchor-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/close/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/close/package-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/close/tests/close-ts.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/close/tsconfig-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/create/Anchor-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/create/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/create/package-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/create/tests/create-ts.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/create/tsconfig-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/package-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/reinit/package-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/update/Anchor-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/update/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/update/package-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/update/tests/update-ts.mdx create mode 100644 .context/program-examples-mdx/basic-operations/anchor/update/tsconfig-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/README.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/package-json.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/basic-operations/native/tsconfig-json.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/Anchor-toml.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/README.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/package-json.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/tests/test-ts.mdx create mode 100644 .context/program-examples-mdx/counter/anchor/tsconfig-json.mdx create mode 100644 .context/program-examples-mdx/counter/native/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/counter/native/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/counter/native/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/counter/native/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/counter/pinocchio/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/counter/pinocchio/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/counter/pinocchio/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/counter/pinocchio/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/create-and-update/Anchor-toml.mdx create mode 100644 .context/program-examples-mdx/create-and-update/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/create-and-update/README.mdx create mode 100644 .context/program-examples-mdx/create-and-update/package-json.mdx create mode 100644 .context/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs.mdx create mode 100644 .context/program-examples-mdx/create-and-update/tests/create_and_update-ts.mdx create mode 100644 .context/program-examples-mdx/create-and-update/tsconfig-json.mdx create mode 100644 .context/program-examples-mdx/read-only/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/read-only/README.mdx create mode 100644 .context/program-examples-mdx/read-only/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/read-only/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/read-only/tests/test-rs.mdx create mode 100644 .context/program-examples-mdx/zk-id/Cargo-toml.mdx create mode 100644 .context/program-examples-mdx/zk-id/README.mdx create mode 100644 .context/program-examples-mdx/zk-id/Xargo-toml.mdx create mode 100644 .context/program-examples-mdx/zk-id/build-rs.mdx create mode 100644 .context/program-examples-mdx/zk-id/circuits/README.mdx create mode 100644 .context/program-examples-mdx/zk-id/package-json.mdx create mode 100644 .context/program-examples-mdx/zk-id/src/lib-rs.mdx create mode 100644 .context/program-examples-mdx/zk-id/src/verifying_key-rs.mdx create mode 100644 .context/program-examples-mdx/zk-id/tests/circuit-rs.mdx create mode 100644 .context/program-examples-mdx/zk-id/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/README.mdx b/.context/program-examples-mdx/README.mdx new file mode 100644 index 00000000..dc01604c --- /dev/null +++ b/.context/program-examples-mdx/README.mdx @@ -0,0 +1,110 @@ +--- +title: "README.md" +description: "https://github.com/Lightprotocol/program-examples/blob/main/README.md" +--- + +```markdown +# Compressed Accounts Program Examples + +[![Ask DeepWiki](https://deepwiki.com/badge.svg)](https://deepwiki.com/Lightprotocol/program-examples) to query the program examples in natural language and for help with debugging. + +## Examples + +### Basic Operations + +- **[basic-operations/anchor](./basic-operations/anchor/)** - Anchor program with Rust and TypeScript tests +- **[basic-operations/native-rust](./basic-operations/native-rust/)** - Native Solana program with light-sdk and Rust tests. + +Basic Operations include: +- **create** - Initialize a new compressed account. +- **update** - Modify data in an existing compressed account. +- **close** - Clear account data and preserve its address. +- **reinit** - Reinitialize a closed account with the same address. +- **burn** - Permanently delete a compressed account. + +### Counter Program + +Full compressed account lifecycle (create, increment, decrement, reset, close): + +- **[counter/anchor](./counter/anchor/)** - Anchor program with Rust and TypeScript tests +- **[counter/native](./counter/native/)** - Native Solana program with light-sdk and Rust tests. +- **[counter/pinocchio](./counter/pinocchio/)** - Pinocchio program with light-sdk-pinocchio and Rust tests. + + +### Create-and-update Program + +- **[create-and-update](./create-and-update/)** - Create a new compressed account and update an existing compressed account with a single validity proof in one instruction. + +### Create-and-read Program + +- **[read-only](./read-only)** - Create a new compressed account and read it onchain. + + +### Compare uncompressed vs compressed accounts Program + +- **[account-comparison](./account-comparison/)** - Compare compressed vs regular Solana accounts. + +### zk-id Program + +- **[zk-id](./zk-id)** - A minimal zk id Solana program that uses zero-knowledge proofs for identity verification with compressed accounts. + + +## Light Protocol dependencies + +### Rust Crates + +- `light-sdk` - Core SDK for compressed accounts in native and anchor programs +- `light-sdk-pinocchio` Core SDK for compressed accounts in pinocchio programs +- `light-hasher` - Hashing utilities for ZK compression +- `light-client` - RPC client and indexer for interacting with compressed accounts +- `light-program-test` - Testing utilities for compressed programs. + +### TypeScript/JavaScript Packages + +- `@lightprotocol/stateless.js@0.22.1-alpha.1` - Client library for interacting with compressed accounts +- `@lightprotocol/zk-compression-cli@0.27.1-alpha.2` - Command-line tools for ZK compression development + +## Prerequisites + +Required versions: + +- **Rust**: 1.90.0 or later +- **Solana CLI**: 2.3.11 +- **Anchor CLI**: 0.31.1 +- **Zk compression CLI**: 0.27.1-alpha.2 or later +- **Node.js**: 23.5.0 or later + +Install the Light CLI: + +```bash +$ npm -g i @lightprotocol/zk-compression-cli@0.27.1-alpha.2 +``` + +Install Solana CLI: + +```bash +sh -c "$(curl -sSfL https://release.solana.com/v2.3.11/install)" +``` + +Install Anchor CLI: + +```bash +cargo install --git https://github.com/coral-xyz/anchor avm --force +avm install latest +avm use 0.31.1 +``` + +## Getting Started with your own Program + +1. install the light cli + +```bash +$ npm -g i @lightprotocol/zk-compression-cli@0.27.1-alpha.2 +``` + +2. instantiate a template Solana program with compressed accounts + +```bash +$ light init +``` +``` diff --git a/.context/program-examples-mdx/account-comparison/Anchor-toml.mdx b/.context/program-examples-mdx/account-comparison/Anchor-toml.mdx new file mode 100644 index 00000000..5c69cb3c --- /dev/null +++ b/.context/program-examples-mdx/account-comparison/Anchor-toml.mdx @@ -0,0 +1,26 @@ +--- +title: "account-comparison/Anchor.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/account-comparison/Anchor.toml" +--- + +```toml +[toolchain] +package_manager = "yarn" + +[features] +resolution = true +skip-lint = false + +[programs.localnet] +account_comparison = "FYX4GmKJYzSiycc7XZKf12NGXNE9siSx1cJubYJniHcv" + +[registry] +url = "https://api.apr.dev" + +[provider] +cluster = "localnet" +wallet = "~/.config/solana/id.json" + +[scripts] +test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" +``` diff --git a/.context/program-examples-mdx/account-comparison/Cargo-toml.mdx b/.context/program-examples-mdx/account-comparison/Cargo-toml.mdx new file mode 100644 index 00000000..db0bb132 --- /dev/null +++ b/.context/program-examples-mdx/account-comparison/Cargo-toml.mdx @@ -0,0 +1,21 @@ +--- +title: "account-comparison/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/account-comparison/Cargo.toml" +--- + +```toml +[workspace] +members = [ + "programs/*" +] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` diff --git a/.context/program-examples-mdx/account-comparison/package-json.mdx b/.context/program-examples-mdx/account-comparison/package-json.mdx new file mode 100644 index 00000000..6b4cfeaf --- /dev/null +++ b/.context/program-examples-mdx/account-comparison/package-json.mdx @@ -0,0 +1,27 @@ +--- +title: "account-comparison/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/account-comparison/package.json" +--- + +```json +{ + "license": "ISC", + "scripts": { + "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w", + "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check" + }, + "dependencies": { + "@coral-xyz/anchor": "^0.31.1" + }, + "devDependencies": { + "chai": "^4.3.4", + "mocha": "^9.0.3", + "ts-mocha": "^10.0.0", + "@types/bn.js": "^5.1.0", + "@types/chai": "^4.3.0", + "@types/mocha": "^9.0.0", + "typescript": "^5.7.3", + "prettier": "^2.6.2" + } +} +``` diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml.mdx b/.context/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml.mdx new file mode 100644 index 00000000..3c347093 --- /dev/null +++ b/.context/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml.mdx @@ -0,0 +1,50 @@ +--- +title: "account-comparison/programs/account-comparison/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/account-comparison/programs/account-comparison/Cargo.toml" +--- + +```toml +[package] +name = "account-comparison" +version = "0.1.0" +description = "Created with Anchor" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "account_comparison" + +[features] +default = [] +cpi = ["no-entrypoint"] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +idl-build = ["anchor-lang/idl-build"] + + +[dependencies] +anchor-lang = "0.31.1" +light-hasher = { version = "5.0.0", features = ["solana"] } +light-sdk = { version = "0.16.0" , features = ["anchor"] } +light-sdk-types = { version = "0.16.0" , features = ["anchor"] } + +[dev-dependencies] +light-client = "0.16.0" +litesvm = "0.7.1" +solana-keypair = "2.2" +solana-message = "2.2" +solana-pubkey = { version = "2.2", features = ["curve25519", "sha2"] } +solana-signer = "2.2" +solana-transaction = "2.2" +light-program-test = "0.16.0" +tokio = "1.43.0" +solana-sdk = "2.2" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml.mdx b/.context/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml.mdx new file mode 100644 index 00000000..260a3476 --- /dev/null +++ b/.context/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "account-comparison/programs/account-comparison/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/account-comparison/programs/account-comparison/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs.mdx b/.context/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs.mdx new file mode 100644 index 00000000..bbfbe84e --- /dev/null +++ b/.context/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs.mdx @@ -0,0 +1,195 @@ +--- +title: "account-comparison/programs/account-comparison/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/account-comparison/programs/account-comparison/src/lib.rs" +--- + +```rust +#![allow(deprecated)] + +use anchor_lang::prelude::*; +use light_sdk::{ + account::LightAccount, + address::v1::derive_address, + cpi::{v1::CpiAccounts, CpiSigner}, + derive_light_cpi_signer, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, LightHasher, +}; + +#[error_code] +pub enum CustomError { + #[msg("No authority to perform this action")] + Unauthorized, +} + +declare_id!("FYX4GmKJYzSiycc7XZKf12NGXNE9siSx1cJubYJniHcv"); + +const CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("FYX4GmKJYzSiycc7XZKf12NGXNE9siSx1cJubYJniHcv"); + +#[program] +pub mod account_comparison { + use light_sdk::cpi::{ + v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction, + }; + use light_sdk::error::LightSdkError; + + use super::*; + + pub fn create_account(ctx: Context, name: String) -> Result<()> { + let account = &mut ctx.accounts.account; + account.data = [1; 128]; + account.name = name; + account.user = *ctx.accounts.user.key; + + Ok(()) + } + + pub fn update_data(ctx: Context, data: [u8; 128]) -> Result<()> { + let account = &mut ctx.accounts.account; + account.data = data; + Ok(()) + } + + pub fn create_compressed_account<'info>( + ctx: Context<'_, '_, '_, 'info, CreateCompressedAccount<'info>>, + name: String, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_tree_index: u8, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.user.as_ref(), + ctx.remaining_accounts, + CPI_SIGNER, + ); + + let (address, address_seed) = derive_address( + &[b"account", ctx.accounts.user.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|err| ProgramError::from(LightSdkError::from(err)))?, + &crate::ID, + ); + + // LightAccount::new_init will create an account with empty output state (no input state). + // Modifying the account will modify the output state that when converted to_account_info() + // is hashed with poseidon hashes, serialized with borsh + // and created with invoke_light_system_program by invoking the light-system-program. + // The hashing scheme is the account structure derived with LightHasher. + let mut compressed_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_tree_index, + ); + + compressed_account.user = ctx.accounts.user.key(); + compressed_account.name = name; + compressed_account.data = [1u8; 128]; + + let new_address_params = address_tree_info.into_new_address_params_packed(address_seed); + + LightSystemProgramCpi::new_cpi(CPI_SIGNER, proof) + .with_light_account(compressed_account)? + .with_new_addresses(&[new_address_params]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + pub fn update_compressed_account<'info>( + ctx: Context<'_, '_, '_, 'info, UpdateCompressedAccount<'info>>, + new_data: [u8; 128], + existing_data: [u8; 128], + name: String, + proof: ValidityProof, + account_meta: CompressedAccountMeta, + ) -> Result<()> { + let mut compressed_account = LightAccount::::new_mut( + &crate::ID, + &account_meta, + CompressedAccountData { + user: ctx.accounts.user.key(), + data: existing_data, + name, + }, + )?; + + if compressed_account.user != ctx.accounts.user.key() { + return err!(CustomError::Unauthorized); + } + + compressed_account.data = new_data; + + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.user.as_ref(), + ctx.remaining_accounts, + CPI_SIGNER, + ); + + LightSystemProgramCpi::new_cpi(CPI_SIGNER, proof) + .with_light_account(compressed_account)? + .invoke(light_cpi_accounts)?; + + Ok(()) + } +} + +#[derive(Accounts)] +pub struct CreateAccount<'info> { + #[account(mut)] + pub user: Signer<'info>, + #[account(init, payer = user, space = 8 + 32 + 128 + 64, seeds = [b"account", user.key().as_ref()], bump)] + pub account: Account<'info, AccountData>, + pub system_program: Program<'info, System>, +} + +/// [0..8, 8..40,40..168,168..232] +#[account] +#[derive(Debug)] +pub struct AccountData { + pub user: Pubkey, + pub name: String, + pub data: [u8; 128], +} + +#[derive(Accounts)] +pub struct UpdateData<'info> { + #[account(mut, has_one = user)] + pub account: Account<'info, AccountData>, + #[account(mut)] + pub user: Signer<'info>, +} + +#[derive(Accounts)] +pub struct CreateCompressedAccount<'info> { + #[account(mut)] + pub user: Signer<'info>, +} + +#[derive(Accounts)] +pub struct UpdateCompressedAccount<'info> { + #[account(mut)] + pub user: Signer<'info>, +} + +#[derive(Clone, Debug, AnchorDeserialize, AnchorSerialize, LightDiscriminator, LightHasher)] +pub struct CompressedAccountData { + #[hash] + pub user: Pubkey, + #[hash] + pub name: String, + #[hash] + pub data: [u8; 128], +} + +impl Default for CompressedAccountData { + fn default() -> Self { + Self { + user: Pubkey::default(), + name: String::default(), + data: [0u8; 128], + } + } +} +``` diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs.mdx b/.context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs.mdx new file mode 100644 index 00000000..a911d3e5 --- /dev/null +++ b/.context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs.mdx @@ -0,0 +1,207 @@ +--- +title: "account-comparison/programs/account-comparison/tests/test_compressed_account.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/account-comparison/programs/account-comparison/tests/test_compressed_account.rs" +--- + +```rust +use account_comparison::CompressedAccountData; +use anchor_lang::{AnchorDeserialize, InstructionData, ToAccountMetas}; +use light_client::indexer::{CompressedAccount, TreeInfo}; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::{ + address::v1::derive_address, + instruction::{account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig}, +}; +use solana_sdk::{ + instruction::Instruction, + signature::{Keypair, Signature, Signer}, +}; + +#[tokio::test] +async fn test_create_compressed_account() { + let name = "Heinrich".to_string(); + + let config = ProgramTestConfig::new( + true, + Some(vec![("account_comparison", account_comparison::ID)]), + ); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let user = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + + let (address, _) = derive_address( + &[b"account", user.pubkey().as_ref()], + &address_tree_info.tree, + &account_comparison::ID, + ); + + // Create the counter. + create_compressed_account(&mut rpc, &user, &address, address_tree_info, name.clone()) + .await + .unwrap(); + + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + let data_account = CompressedAccountData::deserialize( + &mut compressed_account.data.as_ref().unwrap().data.as_slice(), + ) + .unwrap(); + assert_eq!(data_account.user, user.pubkey()); + assert_eq!(data_account.name, name); + assert_eq!(data_account.data, [1u8; 128]); + + update_compressed_account(&mut rpc, &user, &compressed_account, [2u8; 128]) + .await + .unwrap(); + + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + let data_account = CompressedAccountData::deserialize( + &mut compressed_account.data.as_ref().unwrap().data.as_slice(), + ) + .unwrap(); + assert_eq!(data_account.user, user.pubkey()); + assert_eq!(data_account.name, name); + assert_eq!(data_account.data, [2u8; 128]); +} + +async fn create_compressed_account( + rpc: &mut R, + user: &Keypair, + address: &[u8; 32], + address_tree_info: TreeInfo, + name: String, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(account_comparison::ID); + remaining_accounts.add_system_accounts(config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + tree: address_tree_info.tree, + address: *address, + }], + None, + ) + .await? + .value; + + let output_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + let address_tree_info = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .address_trees[0]; + + let instruction_data = account_comparison::instruction::CreateCompressedAccount { + proof: rpc_result.proof, + address_tree_info, + output_tree_index, + name, + }; + + let accounts = account_comparison::accounts::CreateCompressedAccount { + user: user.pubkey(), + }; + + let (remaining_accounts_metas, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: account_comparison::ID, + accounts: [ + accounts.to_account_metas(Some(true)), + remaining_accounts_metas, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &user.pubkey(), &[user]) + .await +} + +async fn update_compressed_account( + rpc: &mut R, + user: &Keypair, + compressed_account: &CompressedAccount, + new_data: [u8; 128], +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(account_comparison::ID); + remaining_accounts.add_system_accounts(config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_infos = rpc_result.pack_tree_infos(&mut remaining_accounts); + + let compressed_account_data = CompressedAccountData::deserialize( + &mut compressed_account.data.as_ref().unwrap().data.as_slice(), + ) + .unwrap(); + + let account_meta = CompressedAccountMeta { + tree_info: packed_tree_infos + .state_trees + .as_ref() + .unwrap() + .packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_infos + .state_trees + .as_ref() + .unwrap() + .output_tree_index, + }; + + let instruction_data = account_comparison::instruction::UpdateCompressedAccount { + proof: rpc_result.proof, + new_data, + existing_data: compressed_account_data.data, + name: compressed_account_data.name, + account_meta, + }; + + let accounts = account_comparison::accounts::UpdateCompressedAccount { + user: user.pubkey(), + }; + + let (remaining_accounts_metas, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: account_comparison::ID, + accounts: [ + accounts.to_account_metas(Some(true)), + remaining_accounts_metas, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &user.pubkey(), &[user]) + .await +} +``` diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs.mdx b/.context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs.mdx new file mode 100644 index 00000000..f6c226ce --- /dev/null +++ b/.context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs.mdx @@ -0,0 +1,98 @@ +--- +title: "account-comparison/programs/account-comparison/tests/test_solana_account.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/account-comparison/programs/account-comparison/tests/test_solana_account.rs" +--- + +```rust +use account_comparison::AccountData; +use anchor_lang::solana_program::instruction::Instruction; +use anchor_lang::{AnchorDeserialize, InstructionData, ToAccountMetas}; +use litesvm::LiteSVM; +use solana_keypair::Keypair; +use solana_message::Message; +use solana_pubkey::Pubkey; +use solana_signer::Signer; +use solana_transaction::Transaction; + +#[test] +fn test_solana_account() { + let user = Keypair::new(); + + let mut svm = LiteSVM::new(); + svm.add_program_from_file( + account_comparison::ID, + "../../target/deploy/account_comparison.so", + ) + .unwrap(); + svm.airdrop(&user.pubkey(), 1_000_000_000_000).unwrap(); + + let account_pda = Pubkey::find_program_address( + &[b"account", user.pubkey().as_ref()], + &account_comparison::id(), + ) + .0; + + create_solana_account(&mut svm, &user, &account_pda); + + let account = svm.get_account(&account_pda).unwrap(); + let data_account = AccountData::deserialize(&mut &account.data[8..]).unwrap(); + println!("data_account {data_account:?}",); + assert_eq!(data_account.name, "Heinrich".to_string()); + assert_eq!(data_account.data, [1u8; 128]); + + update_solana_account(&mut svm, &user, &account_pda, [2u8; 128]); + + let account = svm.get_account(&account_pda).unwrap(); + let data_account = AccountData::deserialize(&mut &account.data[8..]).unwrap(); + println!("data_account {data_account:?}",); + assert_eq!(data_account.name, "Heinrich".to_string()); + assert_eq!(data_account.data, [2u8; 128]); +} + +fn create_solana_account(svm: &mut LiteSVM, user: &Keypair, account_pda: &Pubkey) { + let instruction = account_comparison::instruction::CreateAccount { + name: "Heinrich".to_string(), + }; + let accounts = account_comparison::accounts::CreateAccount { + user: user.pubkey(), + account: *account_pda, + system_program: Pubkey::default(), + }; + + let instruction = Instruction { + program_id: account_comparison::id(), + accounts: accounts.to_account_metas(Some(true)), + data: instruction.data(), + }; + + let tx = Transaction::new( + &[&user], + Message::new(&[instruction], Some(&user.pubkey())), + svm.latest_blockhash(), + ); + let tx_res = svm.send_transaction(tx).unwrap(); + println!("{}", tx_res.pretty_logs()); +} + +fn update_solana_account(svm: &mut LiteSVM, user: &Keypair, account_pda: &Pubkey, data: [u8; 128]) { + let instruction_data = account_comparison::instruction::UpdateData { data }; + let accounts = account_comparison::accounts::UpdateData { + user: user.pubkey(), + account: *account_pda, + }; + + let instruction = Instruction { + program_id: account_comparison::id(), + accounts: accounts.to_account_metas(Some(true)), + data: instruction_data.data(), + }; + + let tx = Transaction::new( + &[&user], + Message::new(&[instruction], Some(&user.pubkey())), + svm.latest_blockhash(), + ); + let tx_res = svm.send_transaction(tx).unwrap(); + println!("{}", tx_res.pretty_logs()); +} +``` diff --git a/.context/program-examples-mdx/account-comparison/tsconfig-json.mdx b/.context/program-examples-mdx/account-comparison/tsconfig-json.mdx new file mode 100644 index 00000000..62500894 --- /dev/null +++ b/.context/program-examples-mdx/account-comparison/tsconfig-json.mdx @@ -0,0 +1,17 @@ +--- +title: "account-comparison/tsconfig.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/account-comparison/tsconfig.json" +--- + +```json +{ + "compilerOptions": { + "types": ["mocha", "chai"], + "typeRoots": ["./node_modules/@types"], + "lib": ["es2015"], + "module": "commonjs", + "target": "es6", + "esModuleInterop": true + } +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/README.mdx b/.context/program-examples-mdx/basic-operations/anchor/README.mdx new file mode 100644 index 00000000..c7893efd --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/README.mdx @@ -0,0 +1,89 @@ +--- +title: "basic-operations/anchor/README.md" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/README.md" +--- + +```markdown +# Basic Operations - Anchor Programs + +Standalone Anchor programs for compressed accounts. + +## Structure + +Each operation is an independent Anchor project: + +- **create** - Initialize a new compressed account +- **update** - Modify data in an existing compressed account +- **close** - Clear account data while preserving address +- **reinit** - Reinitialize a closed account +- **burn** - Permanently delete a compressed account + +Each project contains its own workspace, program, and tests. + +## Build + +Navigate to a specific project directory and build: + +```bash +cd create/ # or update/, close/, reinit/, burn/ +anchor build +``` + +## Test + +### Requirements + +- light cli version 0.27.1-alpha.2+ (install via `npm install -g @lightprotocol/zk-compression-cli@0.27.1-alpha.2`) +- solana cli version 2.1.16+ +- anchor version 0.31.1+ +- Node.js and npm + +### Running Tests + +#### Rust Tests + +```bash +cd create/ # or update/, close/, reinit/, burn/ +cargo test-sbf +``` + +#### TypeScript Tests + +1. Build the program and sync the program ID: + + ```bash + cd create/ # or update/, close/, reinit/, burn/ + anchor build && anchor keys sync && anchor build + ``` + +2. Start the test validator with the program deployed: + + ```bash + light test-validator --sbf-program "" ./target/deploy/.so + ``` + + NOTE: Replace `` with the ID from `Anchor.toml` and `` with `create`, `update`, `close`, `reinit`, or `burn`. + +3. Install dependencies and run tests: + + ```bash + npm install + anchor test --skip-local-validator --skip-build --skip-deploy + ``` + +The TypeScript tests demonstrate client-side interaction with compressed accounts using `@lightprotocol/stateless.js`. + +`light test-validator` spawns the following background processes: + +1. solana test validator `http://127.0.0.1:8899` +2. prover server `http://127.0.0.1:3001` +3. photon indexer `http://127.0.0.1:8784` + +You can kill these background processes with `lsof -i:` and `kill `. + +## Disclaimer + +This reference implementation is not audited. + +The Light Protocol programs are audited and deployed on Solana devnet and mainnet. +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml.mdx new file mode 100644 index 00000000..18ac3437 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml.mdx @@ -0,0 +1,25 @@ +--- +title: "basic-operations/anchor/burn/Anchor.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/burn/Anchor.toml" +--- + +```toml +[toolchain] + +[features] +resolution = true +skip-lint = false + +[programs.localnet] +burn = "BJhPWQnD31mdo6739Mac1gLuSsbbwTmpgjHsW6shf6WA" + +[registry] +url = "https://api.apr.dev" + +[provider] +cluster = "localnet" +wallet = "~/.config/solana/id.json" + +[scripts] +test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml.mdx new file mode 100644 index 00000000..946081c7 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml.mdx @@ -0,0 +1,22 @@ +--- +title: "basic-operations/anchor/burn/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/burn/Cargo.toml" +--- + +```toml +[workspace] +members = [ + "programs/burn", +] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 + +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/package-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/burn/package-json.mdx new file mode 100644 index 00000000..c25c5263 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/burn/package-json.mdx @@ -0,0 +1,30 @@ +--- +title: "basic-operations/anchor/burn/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/burn/package.json" +--- + +```json +{ + "license": "ISC", + "scripts": { + "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w", + "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check", + "test": "ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" + }, + "dependencies": { + "@coral-xyz/anchor": "0.31.1", + "@lightprotocol/stateless.js": "0.22.1-alpha.1", + "dotenv": "^16.5.0" + }, + "devDependencies": { + "@types/bn.js": "^5.1.0", + "@types/chai": "^4.3.0", + "@types/mocha": "^9.0.0", + "chai": "^4.3.4", + "mocha": "^9.0.3", + "prettier": "^2.6.2", + "ts-mocha": "^10.1.0", + "typescript": "^5.0.0" + } +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml.mdx new file mode 100644 index 00000000..939b3aa2 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml.mdx @@ -0,0 +1,45 @@ +--- +title: "basic-operations/anchor/burn/programs/burn/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/burn/programs/burn/Cargo.toml" +--- + +```toml +[package] +name = "burn" +version = "0.1.0" +description = "Created with Anchor" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "burn" + +[features] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +default = ["idl-build"] +test-sbf = [] +idl-build = ["anchor-lang/idl-build", "light-sdk/idl-build"] + +[dependencies] +anchor-lang = "0.31.1" +light-sdk = { version = "0.16.0", features = ["anchor"] } +light-sdk-types = { version = "0.16.0", features = ["anchor"] } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +solana-sdk = "2.2" + +[dev-dependencies] +light-client = { version = "0.16.0" } +light-program-test = { version = "0.16.0" } +tokio = "1.36.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml.mdx new file mode 100644 index 00000000..f3010760 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "basic-operations/anchor/burn/programs/burn/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/burn/programs/burn/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs.mdx new file mode 100644 index 00000000..50f68ffe --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs.mdx @@ -0,0 +1,121 @@ +--- +title: "basic-operations/anchor/burn/programs/burn/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/burn/programs/burn/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] +#![allow(deprecated)] + +use anchor_lang::{prelude::*, AnchorDeserialize, AnchorSerialize}; +use light_sdk::{ + account::LightAccount, + address::v1::derive_address, + cpi::{v1::CpiAccounts, CpiSigner}, + derive_light_cpi_signer, + instruction::{account_meta::CompressedAccountMetaBurn, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; + +declare_id!("BJhPWQnD31mdo6739Mac1gLuSsbbwTmpgjHsW6shf6WA"); + +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("BJhPWQnD31mdo6739Mac1gLuSsbbwTmpgjHsW6shf6WA"); + +#[program] +pub mod burn { + + use super::*; + use light_sdk::cpi::{ + v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction, + }; + + /// Setup: Creates a compressed account + pub fn create_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let (address, address_seed) = derive_address( + &[b"message", ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let mut my_compressed_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + my_compressed_account.owner = ctx.accounts.signer.key(); + my_compressed_account.message = message.clone(); + + msg!( + "Created compressed account with message: {}", + my_compressed_account.message + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + /// Burns a compressed account permanently + pub fn burn_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + account_meta: CompressedAccountMetaBurn, + current_message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let my_compressed_account = LightAccount::::new_burn( + &crate::ID, + &account_meta, + MyCompressedAccount { + owner: ctx.accounts.signer.key(), + message: current_message, + }, + )?; + + msg!("Burning compressed account permanently"); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .invoke(light_cpi_accounts)?; + + Ok(()) + } +} + +#[derive(Accounts)] +pub struct GenericAnchorAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, +} + +#[event] +#[derive(Clone, Debug, Default, LightDiscriminator)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs.mdx new file mode 100644 index 00000000..a5c3f7f0 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs.mdx @@ -0,0 +1,170 @@ +--- +title: "basic-operations/anchor/burn/programs/burn/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/burn/programs/burn/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use anchor_lang::AnchorDeserialize; +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::instruction::{ + account_meta::CompressedAccountMetaBurn, PackedAccounts, SystemAccountMetaConfig, +}; +use burn::MyCompressedAccount; +use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signature, Signer}, +}; + +#[tokio::test] +async fn test_burn() { + let config = ProgramTestConfig::new(true, Some(vec![ + ("burn", burn::ID), + ])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + // Create account first + let address_tree_info = rpc.get_address_tree_v1(); + let (address, _) = light_sdk::address::v1::derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_info.tree, + &burn::ID, + ); + + create_compressed_account( + &mut rpc, + &payer, + &address, + "Hello, compressed world!".to_string(), + ) + .await + .unwrap(); + + let account = rpc.get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + let data = &account.data.as_ref().unwrap().data; + let message_account = MyCompressedAccount::deserialize(&mut &data[..]).unwrap(); + assert_eq!(message_account.owner, payer.pubkey()); + assert_eq!(message_account.message, "Hello, compressed world!"); + + // Burn the account + burn_compressed_account(&mut rpc, &payer, account, "Hello, compressed world!".to_string()) + .await + .unwrap(); + + // Verify account is burned (should not exist) + let result = rpc.get_compressed_account(address, None).await; + assert!(result.unwrap().value.is_none(), "Account should be burned and not exist"); +} + +async fn burn_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + compressed_account: CompressedAccount, + current_message: String, +) -> Result { + let mut remaining_accounts = PackedAccounts::default(); + + let config = SystemAccountMetaConfig::new(burn::ID); + remaining_accounts.add_system_accounts(config)?; + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: burn::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + burn::instruction::BurnAccount { + proof: rpc_result.proof, + account_meta: CompressedAccountMetaBurn { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + }, + current_message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn create_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + address: &[u8; 32], + message: String, +) -> Result { + let config = SystemAccountMetaConfig::new(burn::ID); + let mut remaining_accounts = PackedAccounts::default(); + remaining_accounts.add_system_accounts(config)?; + + let address_tree_info = rpc.get_address_tree_v1(); + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![light_program_test::AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + let packed_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: burn::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + burn::instruction::CreateAccount { + proof: rpc_result.proof, + address_tree_info: packed_accounts.address_trees[0], + output_state_tree_index: output_state_tree_index, + message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts.mdx b/.context/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts.mdx new file mode 100644 index 00000000..caaad06f --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts.mdx @@ -0,0 +1,237 @@ +--- +title: "basic-operations/anchor/burn/tests/burn.ts" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/burn/tests/burn.ts" +--- + +```typescript +import * as anchor from "@coral-xyz/anchor"; +import { Program, web3 } from "@coral-xyz/anchor"; +import { Burn } from "../target/types/burn"; +import burnIdl from "../target/idl/burn.json"; +import { + bn, + CompressedAccountWithMerkleContext, + confirmTx, + createRpc, + defaultStaticAccountsStruct, + defaultTestStateTreeAccounts, + deriveAddress, + deriveAddressSeed, + LightSystemProgram, + PackedAccounts, + Rpc, + sleep, + SystemAccountMetaConfig, +} from "@lightprotocol/stateless.js"; +import * as assert from "assert"; + +const path = require("path"); +const os = require("os"); +require("dotenv").config(); + +const anchorWalletPath = path.join(os.homedir(), ".config/solana/id.json"); +process.env.ANCHOR_WALLET = anchorWalletPath; + +describe("test-anchor-burn", () => { + const burnProgram = anchor.workspace.Burn as Program; + const burnCoder = new anchor.BorshCoder(burnIdl as anchor.Idl); + + it("burn compressed account", async () => { + let signer = new web3.Keypair(); + let rpc = createRpc( + "http://127.0.0.1:8899", + "http://127.0.0.1:8784", + "http://127.0.0.1:3001", + { + commitment: "confirmed", + }, + ); + let lamports = web3.LAMPORTS_PER_SOL; + await rpc.requestAirdrop(signer.publicKey, lamports); + await sleep(2000); + + const outputStateTree = defaultTestStateTreeAccounts().merkleTree; + const addressTree = defaultTestStateTreeAccounts().addressTree; + const addressQueue = defaultTestStateTreeAccounts().addressQueue; + + const messageSeed = new TextEncoder().encode("message"); + const seed = deriveAddressSeed( + [messageSeed, signer.publicKey.toBytes()], + new web3.PublicKey(burnProgram.idl.address), + ); + const address = deriveAddress(seed, addressTree); + + // Step 1: Create compressed account with initial message + const createTxId = await createCompressedAccount( + rpc, + addressTree, + addressQueue, + address, + burnProgram, + outputStateTree, + signer, + "Hello, compressed world!", + ); + console.log("Create Transaction ID:", createTxId); + + // Wait for indexer to process the create transaction + let slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + // Step 2: Get the created account and verify + let compressedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + let myAccount = burnCoder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + assert.strictEqual(myAccount.message, "Hello, compressed world!"); + assert.ok(myAccount.owner.equals(signer.publicKey), "Owner should match signer public key"); + console.log("Created message:", myAccount.message); + + // Step 3: Burn the account permanently + const burnTxId = await burnCompressedAccount( + rpc, + compressedAccount, + burnProgram, + signer, + "Hello, compressed world!", + ); + console.log("Burn Transaction ID:", burnTxId); + + // Wait for indexer to process the burn transaction + slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + // Step 4: Verify the account is burned (does not exist) + try { + await rpc.getCompressedAccount(bn(address.toBytes())); + assert.fail("Expected account to not exist after burning"); + } catch (error: any) { + // Account should not exist after burn + console.log("Verified account was burned"); + } + }); +}); + +async function createCompressedAccount( + rpc: Rpc, + addressTree: anchor.web3.PublicKey, + addressQueue: anchor.web3.PublicKey, + address: anchor.web3.PublicKey, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, + message: string, +) { + const proofRpcResult = await rpc.getValidityProofV0( + [], + [ + { + tree: addressTree, + queue: addressQueue, + address: bn(address.toBytes()), + }, + ], + ); + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const addressMerkleTreePubkeyIndex = + remainingAccounts.insertOrGet(addressTree); + const addressQueuePubkeyIndex = remainingAccounts.insertOrGet(addressQueue); + const packedAddressTreeInfo = { + rootIndex: proofRpcResult.rootIndices[0], + addressMerkleTreePubkeyIndex, + addressQueuePubkeyIndex, + }; + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .createAccount(proof, packedAddressTreeInfo, outputStateTreeIndex, message) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} + +async function burnCompressedAccount( + rpc: Rpc, + compressedAccount: CompressedAccountWithMerkleContext, + program: anchor.Program, + signer: anchor.web3.Keypair, + currentMessage: string, +) { + const proofRpcResult = await rpc.getValidityProofV0( + [ + { + hash: compressedAccount.hash, + tree: compressedAccount.treeInfo.tree, + queue: compressedAccount.treeInfo.queue, + }, + ], + [], + ); + + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const merkleTreePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.tree, + ); + const queuePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.queue, + ); + + // CompressedAccountMetaBurn does not have output_state_tree_index + const compressedAccountMeta = { + treeInfo: { + merkleTreePubkeyIndex, + queuePubkeyIndex, + leafIndex: compressedAccount.leafIndex, + proveByIndex: false, + rootIndex: proofRpcResult.rootIndices[0], + }, + address: compressedAccount.address, + }; + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .burnAccount(proof, compressedAccountMeta, currentMessage) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json.mdx new file mode 100644 index 00000000..dbd4fa7b --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json.mdx @@ -0,0 +1,24 @@ +--- +title: "basic-operations/anchor/burn/tsconfig.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/burn/tsconfig.json" +--- + +```json +{ + "compilerOptions": { + "types": ["mocha", "chai"], + "typeRoots": ["./node_modules/@types"], + "lib": ["es2015"], + "module": "commonjs", + "target": "es6", + "esModuleInterop": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "noEmit": true, + "noUnusedLocals": false, + "noUnusedParameters": false + }, + "include": ["tests/**/*", "migrations/**/*"], + "exclude": ["node_modules", "target"] +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/Anchor-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/close/Anchor-toml.mdx new file mode 100644 index 00000000..489d4344 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/close/Anchor-toml.mdx @@ -0,0 +1,25 @@ +--- +title: "basic-operations/anchor/close/Anchor.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/close/Anchor.toml" +--- + +```toml +[toolchain] + +[features] +resolution = true +skip-lint = false + +[programs.localnet] +close = "DzQ3za3DVCpXkXhmZVSrNchwbbSsJXmi9MBc8v5tvZuQ" + +[registry] +url = "https://api.apr.dev" + +[provider] +cluster = "localnet" +wallet = "~/.config/solana/id.json" + +[scripts] +test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/close/Cargo-toml.mdx new file mode 100644 index 00000000..a192615d --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/close/Cargo-toml.mdx @@ -0,0 +1,22 @@ +--- +title: "basic-operations/anchor/close/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/close/Cargo.toml" +--- + +```toml +[workspace] +members = [ + "programs/close", +] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 + +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/package-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/close/package-json.mdx new file mode 100644 index 00000000..484909b0 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/close/package-json.mdx @@ -0,0 +1,30 @@ +--- +title: "basic-operations/anchor/close/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/close/package.json" +--- + +```json +{ + "license": "ISC", + "scripts": { + "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w", + "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check", + "test": "ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" + }, + "dependencies": { + "@coral-xyz/anchor": "0.31.1", + "@lightprotocol/stateless.js": "0.22.1-alpha.1", + "dotenv": "^16.5.0" + }, + "devDependencies": { + "@types/bn.js": "^5.1.0", + "@types/chai": "^4.3.0", + "@types/mocha": "^9.0.0", + "chai": "^4.3.4", + "mocha": "^9.0.3", + "prettier": "^2.6.2", + "ts-mocha": "^10.1.0", + "typescript": "^5.0.0" + } +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml.mdx new file mode 100644 index 00000000..f93773de --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml.mdx @@ -0,0 +1,45 @@ +--- +title: "basic-operations/anchor/close/programs/close/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/close/programs/close/Cargo.toml" +--- + +```toml +[package] +name = "close" +version = "0.1.0" +description = "Created with Anchor" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "close" + +[features] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +default = ["idl-build"] +test-sbf = [] +idl-build = ["anchor-lang/idl-build", "light-sdk/idl-build"] + +[dependencies] +anchor-lang = "0.31.1" +light-sdk = { version = "0.16.0", features = ["anchor"] } +light-sdk-types = { version = "0.16.0", features = ["anchor"] } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +solana-sdk = "2.2" + +[dev-dependencies] +light-client = { version = "0.16.0" } +light-program-test = { version = "0.16.0" } +tokio = "1.36.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml.mdx new file mode 100644 index 00000000..a1e22549 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "basic-operations/anchor/close/programs/close/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/close/programs/close/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs.mdx new file mode 100644 index 00000000..af535a67 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs.mdx @@ -0,0 +1,121 @@ +--- +title: "basic-operations/anchor/close/programs/close/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/close/programs/close/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] +#![allow(deprecated)] + +use anchor_lang::{prelude::*, AnchorDeserialize, AnchorSerialize}; +use light_sdk::{ + account::LightAccount, + address::v1::derive_address, + cpi::{v1::CpiAccounts, CpiSigner}, + derive_light_cpi_signer, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; + +declare_id!("DzQ3za3DVCpXkXhmZVSrNchwbbSsJXmi9MBc8v5tvZuQ"); + +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("DzQ3za3DVCpXkXhmZVSrNchwbbSsJXmi9MBc8v5tvZuQ"); + +#[program] +pub mod close { + + use super::*; + use light_sdk::cpi::{ + v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction, + }; + + /// Setup: Create a compressed account + pub fn create_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let (address, address_seed) = derive_address( + &[b"message", ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let mut my_compressed_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + my_compressed_account.owner = ctx.accounts.signer.key(); + my_compressed_account.message = message.clone(); + + msg!( + "Created compressed account with message: {}", + my_compressed_account.message + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + /// Close compressed account + pub fn close_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + account_meta: CompressedAccountMeta, + current_message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let my_compressed_account = LightAccount::::new_close( + &crate::ID, + &account_meta, + MyCompressedAccount { + owner: ctx.accounts.signer.key(), + message: current_message, + }, + )?; + + msg!("Close compressed account."); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .invoke(light_cpi_accounts)?; + + Ok(()) + } +} + +#[derive(Accounts)] +pub struct GenericAnchorAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, +} + +#[event] +#[derive(Clone, Debug, Default, LightDiscriminator)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs.mdx new file mode 100644 index 00000000..febeaa99 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs.mdx @@ -0,0 +1,167 @@ +--- +title: "basic-operations/anchor/close/programs/close/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/close/programs/close/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::instruction::{ + account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig, +}; +use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signature, Signer}, +}; + +#[tokio::test] +async fn test_close() { + let config = ProgramTestConfig::new(true, Some(vec![ + ("close", close::ID), + ])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + let (address, _) = light_sdk::address::v1::derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_info.tree, + &close::ID, + ); + + create_compressed_account( + &mut rpc, + &payer, + &address, + "Hello, compressed world!".to_string(), + ) + .await + .unwrap(); + + let account = rpc.get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + close_compressed_account(&mut rpc, &payer, account, "Hello, compressed world!".to_string()) + .await + .unwrap(); + + let closed = rpc.get_compressed_account(address, None).await.unwrap().value.unwrap(); + assert_eq!(closed.address.unwrap(), address); + assert_eq!(closed.owner, close::ID); + + let data = closed.data.unwrap(); + assert_eq!(data.discriminator, [0u8; 8]); + assert!(data.data.is_empty()); + assert_eq!(data.data_hash, [0u8; 32]); +} + +async fn close_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + compressed_account: CompressedAccount, + message: String, +) -> Result { + let mut remaining_accounts = PackedAccounts::default(); + + let config = SystemAccountMetaConfig::new(close::ID); + remaining_accounts.add_system_accounts(config)?; + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: close::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + close::instruction::CloseAccount { + proof: rpc_result.proof, + account_meta: CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }, + current_message: message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn create_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + address: &[u8; 32], + message: String, +) -> Result { + let config = SystemAccountMetaConfig::new(close::ID); + let mut remaining_accounts = PackedAccounts::default(); + remaining_accounts.add_system_accounts(config)?; + + let address_tree_info = rpc.get_address_tree_v1(); + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![light_program_test::AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + let packed_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: close::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + close::instruction::CreateAccount { + proof: rpc_result.proof, + address_tree_info: packed_accounts.address_trees[0], + output_state_tree_index: output_state_tree_index, + message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/tests/close-ts.mdx b/.context/program-examples-mdx/basic-operations/anchor/close/tests/close-ts.mdx new file mode 100644 index 00000000..b79e4d51 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/close/tests/close-ts.mdx @@ -0,0 +1,245 @@ +--- +title: "basic-operations/anchor/close/tests/close.ts" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/close/tests/close.ts" +--- + +```typescript +import * as anchor from "@coral-xyz/anchor"; +import { Program, web3 } from "@coral-xyz/anchor"; +import { Close } from "../target/types/close"; +import closeIdl from "../target/idl/close.json"; +import { + bn, + CompressedAccountWithMerkleContext, + confirmTx, + createRpc, + defaultStaticAccountsStruct, + defaultTestStateTreeAccounts, + deriveAddress, + deriveAddressSeed, + LightSystemProgram, + PackedAccounts, + Rpc, + sleep, + SystemAccountMetaConfig, +} from "@lightprotocol/stateless.js"; +import * as assert from "assert"; + +const path = require("path"); +const os = require("os"); +require("dotenv").config(); + +const anchorWalletPath = path.join(os.homedir(), ".config/solana/id.json"); +process.env.ANCHOR_WALLET = anchorWalletPath; + +describe("test-anchor-close", () => { + const closeProgram = anchor.workspace.Close as Program; + const closeCoder = new anchor.BorshCoder(closeIdl as anchor.Idl); + + it("close compressed account", async () => { + let signer = new web3.Keypair(); + let rpc = createRpc( + "http://127.0.0.1:8899", + "http://127.0.0.1:8784", + "http://127.0.0.1:3001", + { + commitment: "confirmed", + }, + ); + let lamports = web3.LAMPORTS_PER_SOL; + await rpc.requestAirdrop(signer.publicKey, lamports); + await sleep(2000); + + const outputStateTree = defaultTestStateTreeAccounts().merkleTree; + const addressTree = defaultTestStateTreeAccounts().addressTree; + const addressQueue = defaultTestStateTreeAccounts().addressQueue; + + const messageSeed = new TextEncoder().encode("message"); + const seed = deriveAddressSeed( + [messageSeed, signer.publicKey.toBytes()], + new web3.PublicKey(closeProgram.idl.address), + ); + const address = deriveAddress(seed, addressTree); + + const createTxId = await createCompressedAccount( + rpc, + addressTree, + addressQueue, + address, + closeProgram, + outputStateTree, + signer, + "Hello, compressed world!", + ); + console.log("Create Transaction ID:", createTxId); + + // Wait for indexer to process the create transaction + let slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + let compressedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + let myAccount = closeCoder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + assert.strictEqual(myAccount.message, "Hello, compressed world!"); + assert.ok(myAccount.owner.equals(signer.publicKey), "Owner should match signer public key"); + console.log("Created message:", myAccount.message); + + const closeTxId = await closeCompressedAccount( + rpc, + compressedAccount, + closeProgram, + outputStateTree, + signer, + "Hello, compressed world!", + ); + console.log("Close Transaction ID:", closeTxId); + + // Wait for indexer to process the close transaction + slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + // After closing, the account exists with zero data. + // Verify the account was closed by checking that data.data is empty. + const closedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + assert.ok( + closedAccount.data.data === null || + (Buffer.isBuffer(closedAccount.data.data) && closedAccount.data.data.length === 0), + "Closed account should have null or empty data.data" + ); + console.log("Verified account was closed (data.data is empty as expected)"); + }); +}); + +async function createCompressedAccount( + rpc: Rpc, + addressTree: anchor.web3.PublicKey, + addressQueue: anchor.web3.PublicKey, + address: anchor.web3.PublicKey, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, + message: string, +) { + const proofRpcResult = await rpc.getValidityProofV0( + [], + [ + { + tree: addressTree, + queue: addressQueue, + address: bn(address.toBytes()), + }, + ], + ); + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const addressMerkleTreePubkeyIndex = + remainingAccounts.insertOrGet(addressTree); + const addressQueuePubkeyIndex = remainingAccounts.insertOrGet(addressQueue); + const packedAddressTreeInfo = { + rootIndex: proofRpcResult.rootIndices[0], + addressMerkleTreePubkeyIndex, + addressQueuePubkeyIndex, + }; + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .createAccount(proof, packedAddressTreeInfo, outputStateTreeIndex, message) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} + +async function closeCompressedAccount( + rpc: Rpc, + compressedAccount: CompressedAccountWithMerkleContext, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, + message: string, +) { + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const proofRpcResult = await rpc.getValidityProofV0( + [ + { + hash: compressedAccount.hash, + tree: compressedAccount.treeInfo.tree, + queue: compressedAccount.treeInfo.queue, + }, + ], + [], + ); + + const merkleTreePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.tree, + ); + const queuePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.queue, + ); + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + + const coder = new anchor.BorshCoder(closeIdl as anchor.Idl); + const currentAccount = coder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + + const compressedAccountMeta = { + treeInfo: { + merkleTreePubkeyIndex, + queuePubkeyIndex, + leafIndex: compressedAccount.leafIndex, + proveByIndex: false, + rootIndex: proofRpcResult.rootIndices[0], + }, + address: compressedAccount.address, + outputStateTreeIndex, + }; + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .closeAccount(proof, compressedAccountMeta, message) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/tsconfig-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/close/tsconfig-json.mdx new file mode 100644 index 00000000..89679a59 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/close/tsconfig-json.mdx @@ -0,0 +1,24 @@ +--- +title: "basic-operations/anchor/close/tsconfig.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/close/tsconfig.json" +--- + +```json +{ + "compilerOptions": { + "types": ["mocha", "chai"], + "typeRoots": ["./node_modules/@types"], + "lib": ["es2015"], + "module": "commonjs", + "target": "es6", + "esModuleInterop": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "noEmit": true, + "noUnusedLocals": false, + "noUnusedParameters": false + }, + "include": ["tests/**/*", "migrations/**/*"], + "exclude": ["node_modules", "target"] +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/Anchor-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/create/Anchor-toml.mdx new file mode 100644 index 00000000..7f4dccec --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/create/Anchor-toml.mdx @@ -0,0 +1,26 @@ +--- +title: "basic-operations/anchor/create/Anchor.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/create/Anchor.toml" +--- + +```toml +[toolchain] +anchor_version = "0.31.1" + +[features] +resolution = true +skip-lint = false + +[programs.localnet] +create = "Hps5oaKdYWqjVZJnAxUE1uwbozwEgZZGCRA57p2wdqcS" + +[registry] +url = "https://api.apr.dev" + +[provider] +cluster = "localnet" +wallet = "~/.config/solana/id.json" + +[scripts] +test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/create/Cargo-toml.mdx new file mode 100644 index 00000000..af277840 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/create/Cargo-toml.mdx @@ -0,0 +1,22 @@ +--- +title: "basic-operations/anchor/create/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/create/Cargo.toml" +--- + +```toml +[workspace] +members = [ + "programs/create", +] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 + +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/package-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/create/package-json.mdx new file mode 100644 index 00000000..d58312ea --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/create/package-json.mdx @@ -0,0 +1,30 @@ +--- +title: "basic-operations/anchor/create/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/create/package.json" +--- + +```json +{ + "license": "ISC", + "scripts": { + "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w", + "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check", + "test": "ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" + }, + "dependencies": { + "@coral-xyz/anchor": "0.31.1", + "@lightprotocol/stateless.js": "0.22.1-alpha.1", + "dotenv": "^16.5.0" + }, + "devDependencies": { + "@types/bn.js": "^5.1.0", + "@types/chai": "^4.3.0", + "@types/mocha": "^9.0.0", + "chai": "^4.3.4", + "mocha": "^9.0.3", + "prettier": "^2.6.2", + "ts-mocha": "^10.1.0", + "typescript": "^5.0.0" + } +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml.mdx new file mode 100644 index 00000000..fd51b5d2 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml.mdx @@ -0,0 +1,46 @@ +--- +title: "basic-operations/anchor/create/programs/create/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/create/programs/create/Cargo.toml" +--- + +```toml +[package] +name = "create" +version = "0.1.0" +description = "Created with Anchor" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "create" + +[features] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +default = ["idl-build"] +test-sbf = [] +idl-build = ["anchor-lang/idl-build", "light-sdk/idl-build"] + +[dependencies] +anchor-lang = "0.31.1" +light-sdk = { version = "0.16.0", features = ["anchor"] } +light-hasher = { version = "3.1.0", features = ["solana"] } +light-sdk-types = { version = "0.16.0", features = ["anchor"] } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +solana-sdk = "2.2" + +[dev-dependencies] +light-client = { version = "0.16.0" } +light-program-test = { version = "0.16.0" } +tokio = "1.36.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml.mdx new file mode 100644 index 00000000..dd152f9c --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "basic-operations/anchor/create/programs/create/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/create/programs/create/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs.mdx new file mode 100644 index 00000000..f538a966 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs.mdx @@ -0,0 +1,91 @@ +--- +title: "basic-operations/anchor/create/programs/create/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/create/programs/create/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] +#![allow(deprecated)] + +use anchor_lang::{prelude::*, AnchorDeserialize, AnchorSerialize}; +use light_sdk::{ + account::LightAccount, + address::v1::derive_address, + cpi::{v1::CpiAccounts, CpiSigner}, + derive_light_cpi_signer, + instruction::{PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; + +declare_id!("Hps5oaKdYWqjVZJnAxUE1uwbozwEgZZGCRA57p2wdqcS"); + +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("Hps5oaKdYWqjVZJnAxUE1uwbozwEgZZGCRA57p2wdqcS"); + +#[program] +pub mod create { + + use super::*; + use light_sdk::cpi::{ + v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction, + }; + + /// Creates a new compressed account + pub fn create_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let (address, address_seed) = derive_address( + &[b"message", ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let mut my_compressed_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + my_compressed_account.owner = ctx.accounts.signer.key(); + my_compressed_account.message = message.clone(); + + msg!( + "Created compressed account with message: {}", + my_compressed_account.message + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } +} + +#[derive(Accounts)] +pub struct GenericAnchorAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, +} + +// declared as event so that it is part of the idl. +#[event] +#[derive(Clone, Debug, Default, LightDiscriminator)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs.mdx new file mode 100644 index 00000000..622267ed --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs.mdx @@ -0,0 +1,106 @@ +--- +title: "basic-operations/anchor/create/programs/create/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/create/programs/create/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use anchor_lang::AnchorDeserialize; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::{ + address::v1::derive_address, + instruction::{PackedAccounts, SystemAccountMetaConfig}, +}; +use create::MyCompressedAccount; +use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signature, Signer}, +}; + +#[tokio::test] +async fn test_create() { + let config = ProgramTestConfig::new(true, Some(vec![("create", create::ID)])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + + let (address, _) = derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_info.tree, + &create::ID, + ); + + create_compressed_account(&mut rpc, &payer, &address, "Hello, compressed world!".to_string()) + .await + .unwrap(); + + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + let data = &compressed_account.data.as_ref().unwrap().data; + let account = MyCompressedAccount::deserialize(&mut &data[..]).unwrap(); + assert_eq!(account.owner, payer.pubkey()); + assert_eq!(account.message, "Hello, compressed world!"); +} + +async fn create_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + address: &[u8; 32], + message: String, +) -> Result { + let config = SystemAccountMetaConfig::new(create::ID); + let mut remaining_accounts = PackedAccounts::default(); + remaining_accounts.add_system_accounts(config)?; + + let address_tree_info = rpc.get_address_tree_v1(); + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + let packed_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: create::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + create::instruction::CreateAccount { + proof: rpc_result.proof, + address_tree_info: packed_accounts.address_trees[0], + output_state_tree_index: output_state_tree_index, + message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/tests/create-ts.mdx b/.context/program-examples-mdx/basic-operations/anchor/create/tests/create-ts.mdx new file mode 100644 index 00000000..2178629f --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/create/tests/create-ts.mdx @@ -0,0 +1,159 @@ +--- +title: "basic-operations/anchor/create/tests/create.ts" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/create/tests/create.ts" +--- + +```typescript +import * as anchor from "@coral-xyz/anchor"; +import { Program, web3 } from "@coral-xyz/anchor"; +import { Create } from "../target/types/create"; +import idl from "../target/idl/create.json"; +import { + bn, + CompressedAccountWithMerkleContext, + confirmTx, + createRpc, + defaultStaticAccountsStruct, + defaultTestStateTreeAccounts, + deriveAddress, + deriveAddressSeed, + LightSystemProgram, + PackedAccounts, + Rpc, + sleep, + SystemAccountMetaConfig, +} from "@lightprotocol/stateless.js"; +import * as assert from "assert"; + +const path = require("path"); +const os = require("os"); +require("dotenv").config(); + +const anchorWalletPath = path.join(os.homedir(), ".config/solana/id.json"); +process.env.ANCHOR_WALLET = anchorWalletPath; + +describe("test-anchor", () => { + const program = anchor.workspace.Create as Program; + const coder = new anchor.BorshCoder(idl as anchor.Idl); + + it("create compressed account", async () => { + let signer = new web3.Keypair(); + let rpc = createRpc( + "http://127.0.0.1:8899", + "http://127.0.0.1:8784", + "http://127.0.0.1:3001", + { + commitment: "confirmed", + }, + ); + let lamports = web3.LAMPORTS_PER_SOL; + await rpc.requestAirdrop(signer.publicKey, lamports); + await sleep(2000); + + const outputStateTree = defaultTestStateTreeAccounts().merkleTree; + const addressTree = defaultTestStateTreeAccounts().addressTree; + const addressQueue = defaultTestStateTreeAccounts().addressQueue; + + const messageSeed = new TextEncoder().encode("message"); + const seed = deriveAddressSeed( + [messageSeed, signer.publicKey.toBytes()], + new web3.PublicKey(program.idl.address), + ); + const address = deriveAddress(seed, addressTree); + + // Create compressed account with message + const txId = await createCompressedAccount( + rpc, + addressTree, + addressQueue, + address, + program, + outputStateTree, + signer, + "Hello, compressed world!", + ); + console.log("Transaction ID:", txId); + + // Wait for indexer to process the transaction + const slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + let compressedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + let myAccount = coder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + + console.log("Decoded data owner:", myAccount.owner.toBase58()); + console.log("Decoded data message:", myAccount.message); + + // Verify account data + assert.ok( + myAccount.owner.equals(signer.publicKey), + "Owner should match signer public key" + ); + assert.strictEqual( + myAccount.message, + "Hello, compressed world!", + "Message should match the created message" + ); + }); +}); + +async function createCompressedAccount( + rpc: Rpc, + addressTree: anchor.web3.PublicKey, + addressQueue: anchor.web3.PublicKey, + address: anchor.web3.PublicKey, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, + message: string, +) { + const proofRpcResult = await rpc.getValidityProofV0( + [], + [ + { + tree: addressTree, + queue: addressQueue, + address: bn(address.toBytes()), + }, + ], + ); + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const addressMerkleTreePubkeyIndex = + remainingAccounts.insertOrGet(addressTree); + const addressQueuePubkeyIndex = remainingAccounts.insertOrGet(addressQueue); + const packedAddressTreeInfo = { + rootIndex: proofRpcResult.rootIndices[0], + addressMerkleTreePubkeyIndex, + addressQueuePubkeyIndex, + }; + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .createAccount(proof, packedAddressTreeInfo, outputStateTreeIndex, message) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/tsconfig-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/create/tsconfig-json.mdx new file mode 100644 index 00000000..13b03d5c --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/create/tsconfig-json.mdx @@ -0,0 +1,24 @@ +--- +title: "basic-operations/anchor/create/tsconfig.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/create/tsconfig.json" +--- + +```json +{ + "compilerOptions": { + "types": ["mocha", "chai"], + "typeRoots": ["./node_modules/@types"], + "lib": ["es2015"], + "module": "commonjs", + "target": "es6", + "esModuleInterop": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "noEmit": true, + "noUnusedLocals": false, + "noUnusedParameters": false + }, + "include": ["tests/**/*", "migrations/**/*"], + "exclude": ["node_modules", "target"] +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/package-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/package-json.mdx new file mode 100644 index 00000000..8fb1bf23 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/package-json.mdx @@ -0,0 +1,19 @@ +--- +title: "basic-operations/anchor/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/package.json" +--- + +```json +{ + "name": "basic-operations-anchor", + "version": "1.0.0", + "private": true, + "workspaces": [ + "burn", + "create", + "update", + "close", + "reinit" + ] +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml.mdx new file mode 100644 index 00000000..3b9f114d --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml.mdx @@ -0,0 +1,25 @@ +--- +title: "basic-operations/anchor/reinit/Anchor.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/reinit/Anchor.toml" +--- + +```toml +[toolchain] + +[features] +resolution = true +skip-lint = false + +[programs.localnet] +reinit = "DeSUZ4to3qN7mQimoTgvEnBXiBWeTqMVcMz3ynLaWx1t" + +[registry] +url = "https://api.apr.dev" + +[provider] +cluster = "localnet" +wallet = "~/.config/solana/id.json" + +[scripts] +test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml.mdx new file mode 100644 index 00000000..fd5d8066 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml.mdx @@ -0,0 +1,22 @@ +--- +title: "basic-operations/anchor/reinit/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/reinit/Cargo.toml" +--- + +```toml +[workspace] +members = [ + "programs/reinit", +] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 + +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/package-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/reinit/package-json.mdx new file mode 100644 index 00000000..83b69587 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/reinit/package-json.mdx @@ -0,0 +1,30 @@ +--- +title: "basic-operations/anchor/reinit/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/reinit/package.json" +--- + +```json +{ + "license": "ISC", + "scripts": { + "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w", + "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check", + "test": "ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" + }, + "dependencies": { + "@coral-xyz/anchor": "0.31.1", + "@lightprotocol/stateless.js": "0.22.1-alpha.1", + "dotenv": "^16.5.0" + }, + "devDependencies": { + "@types/bn.js": "^5.1.0", + "@types/chai": "^4.3.0", + "@types/mocha": "^9.0.0", + "chai": "^4.3.4", + "mocha": "^9.0.3", + "prettier": "^2.6.2", + "ts-mocha": "^10.1.0", + "typescript": "^5.0.0" + } +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml.mdx new file mode 100644 index 00000000..73714a59 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml.mdx @@ -0,0 +1,45 @@ +--- +title: "basic-operations/anchor/reinit/programs/reinit/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/reinit/programs/reinit/Cargo.toml" +--- + +```toml +[package] +name = "reinit" +version = "0.1.0" +description = "Created with Anchor" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "reinit" + +[features] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +default = ["idl-build"] +test-sbf = [] +idl-build = ["anchor-lang/idl-build", "light-sdk/idl-build"] + +[dependencies] +anchor-lang = "0.31.1" +light-sdk = { version = "0.16.0", features = ["anchor"] } +light-sdk-types = { version = "0.16.0", features = ["anchor"] } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +solana-sdk = "2.2" + +[dev-dependencies] +light-client = { version = "0.16.0" } +light-program-test = { version = "0.16.0" } +tokio = "1.36.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml.mdx new file mode 100644 index 00000000..0f25c096 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "basic-operations/anchor/reinit/programs/reinit/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/reinit/programs/reinit/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs.mdx new file mode 100644 index 00000000..46bf363e --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs.mdx @@ -0,0 +1,147 @@ +--- +title: "basic-operations/anchor/reinit/programs/reinit/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/reinit/programs/reinit/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] +#![allow(deprecated)] + +use anchor_lang::{prelude::*, AnchorDeserialize, AnchorSerialize}; +use light_sdk::{ + account::LightAccount, + address::v1::derive_address, + cpi::{v1::CpiAccounts, CpiSigner}, + derive_light_cpi_signer, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; + +declare_id!("DeSUZ4to3qN7mQimoTgvEnBXiBWeTqMVcMz3ynLaWx1t"); + +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("DeSUZ4to3qN7mQimoTgvEnBXiBWeTqMVcMz3ynLaWx1t"); + +#[program] +pub mod reinit { + + use super::*; + use light_sdk::cpi::{ + v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction, + }; + + /// Setup: Create a compressed account + pub fn create_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let (address, address_seed) = derive_address( + &[b"message", ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let mut my_compressed_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + my_compressed_account.owner = ctx.accounts.signer.key(); + my_compressed_account.message = message.clone(); + + msg!( + "Created compressed account with message: {}", + my_compressed_account.message + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + /// Setup: Close compressed account + pub fn close_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + account_meta: CompressedAccountMeta, + current_message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let my_compressed_account = LightAccount::::new_close( + &crate::ID, + &account_meta, + MyCompressedAccount { + owner: ctx.accounts.signer.key(), + message: current_message, + }, + )?; + + msg!("Close compressed account."); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + /// Reinitialize closed compressed account + pub fn reinit_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + account_meta: CompressedAccountMeta, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let my_compressed_account = LightAccount::::new_empty( + &crate::ID, + &account_meta, + )?; + + msg!("Reinitializing closed compressed account"); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .invoke(light_cpi_accounts)?; + + Ok(()) + } +} + +#[derive(Accounts)] +pub struct GenericAnchorAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, +} + +#[event] +#[derive(Clone, Debug, Default, LightDiscriminator)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs.mdx new file mode 100644 index 00000000..6ba57cad --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs.mdx @@ -0,0 +1,231 @@ +--- +title: "basic-operations/anchor/reinit/programs/reinit/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/reinit/programs/reinit/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::instruction::{ + account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig, +}; +use light_sdk::LightDiscriminator; +use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signature, Signer}, +}; + +#[tokio::test] +async fn test_reinit() { + let config = ProgramTestConfig::new(true, Some(vec![ + ("reinit", reinit::ID), + ])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + let (address, _) = light_sdk::address::v1::derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_info.tree, + &reinit::ID, + ); + + create_compressed_account( + &mut rpc, + &payer, + &address, + "Hello, compressed world!".to_string(), + ) + .await + .unwrap(); + + let account = rpc.get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + close_compressed_account(&mut rpc, &payer, account, "Hello, compressed world!".to_string()) + .await + .unwrap(); + + let closed = rpc.get_compressed_account(address, None).await.unwrap().value.unwrap(); + assert_eq!(closed.address.as_ref().unwrap(), &address); + assert_eq!(closed.owner, reinit::ID); + + let data = closed.data.as_ref().unwrap(); + assert_eq!(data.discriminator, [0u8; 8]); + assert!(data.data.is_empty()); + assert_eq!(data.data_hash, [0u8; 32]); + + // Reinitialize the closed account + reinit_compressed_account(&mut rpc, &payer, closed) + .await + .unwrap(); + + // Verify reinitialized account has default values + let reinitialized = rpc.get_compressed_account(address, None).await.unwrap().value.unwrap(); + assert_eq!(reinitialized.address.as_ref().unwrap(), &address); + assert_eq!(reinitialized.owner, reinit::ID); + + let data = reinitialized.data.as_ref().unwrap(); + // Default MyCompressedAccount should have empty message and default pubkey + assert_eq!(data.discriminator, reinit::MyCompressedAccount::LIGHT_DISCRIMINATOR); + assert!(!data.data.is_empty()); // Has default-initialized data now +} + +async fn close_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + compressed_account: CompressedAccount, + message: String, +) -> Result { + let mut remaining_accounts = PackedAccounts::default(); + + let config = SystemAccountMetaConfig::new(reinit::ID); + remaining_accounts.add_system_accounts(config)?; + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: reinit::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + reinit::instruction::CloseAccount { + proof: rpc_result.proof, + account_meta: CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }, + current_message: message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn reinit_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + compressed_account: CompressedAccount, +) -> Result { + let mut remaining_accounts = PackedAccounts::default(); + + let config = SystemAccountMetaConfig::new(reinit::ID); + remaining_accounts.add_system_accounts(config)?; + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: reinit::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + reinit::instruction::ReinitAccount { + proof: rpc_result.proof, + account_meta: CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn create_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + address: &[u8; 32], + message: String, +) -> Result { + let config = SystemAccountMetaConfig::new(reinit::ID); + let mut remaining_accounts = PackedAccounts::default(); + remaining_accounts.add_system_accounts(config)?; + + let address_tree_info = rpc.get_address_tree_v1(); + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![light_program_test::AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + let packed_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: reinit::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + reinit::instruction::CreateAccount { + proof: rpc_result.proof, + address_tree_info: packed_accounts.address_trees[0], + output_state_tree_index: output_state_tree_index, + message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts.mdx b/.context/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts.mdx new file mode 100644 index 00000000..6e3e24f0 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts.mdx @@ -0,0 +1,330 @@ +--- +title: "basic-operations/anchor/reinit/tests/reinit.ts" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/reinit/tests/reinit.ts" +--- + +```typescript +import * as anchor from "@coral-xyz/anchor"; +import { Program, web3 } from "@coral-xyz/anchor"; +import { Reinit } from "../target/types/reinit"; +import reinitIdl from "../target/idl/reinit.json"; +import { + bn, + CompressedAccountWithMerkleContext, + confirmTx, + createRpc, + defaultStaticAccountsStruct, + defaultTestStateTreeAccounts, + deriveAddress, + deriveAddressSeed, + LightSystemProgram, + PackedAccounts, + Rpc, + sleep, + SystemAccountMetaConfig, +} from "@lightprotocol/stateless.js"; +import * as assert from "assert"; + +const path = require("path"); +const os = require("os"); +require("dotenv").config(); + +const anchorWalletPath = path.join(os.homedir(), ".config/solana/id.json"); +process.env.ANCHOR_WALLET = anchorWalletPath; + +describe("test-anchor-reinit", () => { + const reinitProgram = anchor.workspace.Reinit as Program; + const reinitCoder = new anchor.BorshCoder(reinitIdl as anchor.Idl); + + it("reinitialize compressed account", async () => { + let signer = new web3.Keypair(); + let rpc = createRpc( + "http://127.0.0.1:8899", + "http://127.0.0.1:8784", + "http://127.0.0.1:3001", + { + commitment: "confirmed", + }, + ); + let lamports = web3.LAMPORTS_PER_SOL; + await rpc.requestAirdrop(signer.publicKey, lamports); + await sleep(2000); + + const outputStateTree = defaultTestStateTreeAccounts().merkleTree; + const addressTree = defaultTestStateTreeAccounts().addressTree; + const addressQueue = defaultTestStateTreeAccounts().addressQueue; + + const messageSeed = new TextEncoder().encode("message"); + const seed = deriveAddressSeed( + [messageSeed, signer.publicKey.toBytes()], + new web3.PublicKey(reinitProgram.idl.address), + ); + const address = deriveAddress(seed, addressTree); + + const createTxId = await createCompressedAccount( + rpc, + addressTree, + addressQueue, + address, + reinitProgram, + outputStateTree, + signer, + "Hello, compressed world!", + ); + console.log("Create Transaction ID:", createTxId); + + // Wait for indexer to process the transaction + let slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + let compressedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + let myAccount = reinitCoder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + assert.strictEqual(myAccount.message, "Hello, compressed world!"); + assert.ok(myAccount.owner.equals(signer.publicKey), "Owner should match signer public key"); + console.log("Created message:", myAccount.message); + + const closeTxId = await closeCompressedAccount( + rpc, + compressedAccount, + reinitProgram, + outputStateTree, + signer, + "Hello, compressed world!", + ); + console.log("Close Transaction ID:", closeTxId); + + // Wait for indexer to process the close transaction + slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + let closedCompressedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + + // The getValidityProofV0 call will fetch the current closed account state. + const reinitTxId = await reinitCompressedAccount( + rpc, + closedCompressedAccount, + reinitProgram, + outputStateTree, + signer, + ); + console.log("Reinit Transaction ID:", reinitTxId); + + // Wait for indexer to process the reinit transaction + slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + // Verify the account was reinitialized with default values + let reinitializedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + let reinitMyAccount = reinitCoder.types.decode( + "MyCompressedAccount", + reinitializedAccount.data.data, + ); + assert.strictEqual(reinitMyAccount.message, "", "Message should be empty (default)"); + assert.ok( + reinitMyAccount.owner.equals(web3.PublicKey.default), + "Owner should be default PublicKey" + ); + console.log("Compressed account was reinitialized with default values"); + }); +}); + +async function createCompressedAccount( + rpc: Rpc, + addressTree: anchor.web3.PublicKey, + addressQueue: anchor.web3.PublicKey, + address: anchor.web3.PublicKey, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, + message: string, +) { + const proofRpcResult = await rpc.getValidityProofV0( + [], + [ + { + tree: addressTree, + queue: addressQueue, + address: bn(address.toBytes()), + }, + ], + ); + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const addressMerkleTreePubkeyIndex = + remainingAccounts.insertOrGet(addressTree); + const addressQueuePubkeyIndex = remainingAccounts.insertOrGet(addressQueue); + const packedAddressTreeInfo = { + rootIndex: proofRpcResult.rootIndices[0], + addressMerkleTreePubkeyIndex, + addressQueuePubkeyIndex, + }; + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .createAccount(proof, packedAddressTreeInfo, outputStateTreeIndex, message) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} + +async function closeCompressedAccount( + rpc: Rpc, + compressedAccount: CompressedAccountWithMerkleContext, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, + message: string, +) { + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const proofRpcResult = await rpc.getValidityProofV0( + [ + { + hash: compressedAccount.hash, + tree: compressedAccount.treeInfo.tree, + queue: compressedAccount.treeInfo.queue, + }, + ], + [], + ); + + const merkleTreePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.tree, + ); + const queuePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.queue, + ); + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + + const coder = new anchor.BorshCoder(reinitIdl as anchor.Idl); + const currentAccount = coder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + + const compressedAccountMeta = { + treeInfo: { + merkleTreePubkeyIndex, + queuePubkeyIndex, + leafIndex: compressedAccount.leafIndex, + proveByIndex: false, + rootIndex: proofRpcResult.rootIndices[0], + }, + address: compressedAccount.address, + outputStateTreeIndex, + }; + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .closeAccount(proof, compressedAccountMeta, message) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} + +async function reinitCompressedAccount( + rpc: Rpc, + compressedAccount: CompressedAccountWithMerkleContext, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, +) { + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const proofRpcResult = await rpc.getValidityProofV0( + [ + { + hash: compressedAccount.hash, + tree: compressedAccount.treeInfo.tree, + queue: compressedAccount.treeInfo.queue, + }, + ], + [], + ); + + const merkleTreePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.tree, + ); + const queuePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.queue, + ); + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + + const compressedAccountMeta = { + treeInfo: { + merkleTreePubkeyIndex, + queuePubkeyIndex, + leafIndex: compressedAccount.leafIndex, + proveByIndex: false, + rootIndex: proofRpcResult.rootIndices[0], + }, + address: compressedAccount.address, + outputStateTreeIndex, + }; + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .reinitAccount(proof, compressedAccountMeta) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json.mdx new file mode 100644 index 00000000..4484cf26 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json.mdx @@ -0,0 +1,24 @@ +--- +title: "basic-operations/anchor/reinit/tsconfig.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/reinit/tsconfig.json" +--- + +```json +{ + "compilerOptions": { + "types": ["mocha", "chai"], + "typeRoots": ["./node_modules/@types"], + "lib": ["es2015"], + "module": "commonjs", + "target": "es6", + "esModuleInterop": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "noEmit": true, + "noUnusedLocals": false, + "noUnusedParameters": false + }, + "include": ["tests/**/*", "migrations/**/*"], + "exclude": ["node_modules", "target"] +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/Anchor-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/update/Anchor-toml.mdx new file mode 100644 index 00000000..eae8bd24 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/update/Anchor-toml.mdx @@ -0,0 +1,25 @@ +--- +title: "basic-operations/anchor/update/Anchor.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/update/Anchor.toml" +--- + +```toml +[toolchain] + +[features] +resolution = true +skip-lint = false + +[programs.localnet] +update = "Cj3DxyqB7wJh511VKexsjKt7Hx1kvPvCBMrbLuL8grKc" + +[registry] +url = "https://api.apr.dev" + +[provider] +cluster = "localnet" +wallet = "~/.config/solana/id.json" + +[scripts] +test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/update/Cargo-toml.mdx new file mode 100644 index 00000000..2961e265 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/update/Cargo-toml.mdx @@ -0,0 +1,22 @@ +--- +title: "basic-operations/anchor/update/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/update/Cargo.toml" +--- + +```toml +[workspace] +members = [ + "programs/update", +] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 + +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/package-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/update/package-json.mdx new file mode 100644 index 00000000..b34c2655 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/update/package-json.mdx @@ -0,0 +1,30 @@ +--- +title: "basic-operations/anchor/update/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/update/package.json" +--- + +```json +{ + "license": "ISC", + "scripts": { + "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w", + "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check", + "test": "ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" + }, + "dependencies": { + "@coral-xyz/anchor": "0.31.1", + "@lightprotocol/stateless.js": "0.22.1-alpha.1", + "dotenv": "^16.5.0" + }, + "devDependencies": { + "@types/bn.js": "^5.1.0", + "@types/chai": "^4.3.0", + "@types/mocha": "^9.0.0", + "chai": "^4.3.4", + "mocha": "^9.0.3", + "prettier": "^2.6.2", + "ts-mocha": "^10.1.0", + "typescript": "^5.0.0" + } +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml.mdx new file mode 100644 index 00000000..03a8165f --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml.mdx @@ -0,0 +1,45 @@ +--- +title: "basic-operations/anchor/update/programs/update/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/update/programs/update/Cargo.toml" +--- + +```toml +[package] +name = "update" +version = "0.1.0" +description = "Created with Anchor" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "update" + +[features] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +default = ["idl-build"] +test-sbf = [] +idl-build = ["anchor-lang/idl-build", "light-sdk/idl-build"] + +[dependencies] +anchor-lang = "0.31.1" +light-sdk = { version = "0.16.0", features = ["anchor"] } +light-sdk-types = { version = "0.16.0", features = ["anchor"] } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +solana-sdk = "2.2" + +[dev-dependencies] +light-client = { version = "0.16.0" } +light-program-test = { version = "0.16.0" } +tokio = "1.36.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml.mdx b/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml.mdx new file mode 100644 index 00000000..13694624 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "basic-operations/anchor/update/programs/update/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/update/programs/update/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs.mdx new file mode 100644 index 00000000..be758837 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs.mdx @@ -0,0 +1,124 @@ +--- +title: "basic-operations/anchor/update/programs/update/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/update/programs/update/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] +#![allow(deprecated)] + +use anchor_lang::{prelude::*, AnchorDeserialize, AnchorSerialize}; +use light_sdk::{ + account::LightAccount, + address::v1::derive_address, + cpi::{v1::CpiAccounts, CpiSigner}, + derive_light_cpi_signer, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; + +declare_id!("Cj3DxyqB7wJh511VKexsjKt7Hx1kvPvCBMrbLuL8grKc"); + +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("Cj3DxyqB7wJh511VKexsjKt7Hx1kvPvCBMrbLuL8grKc"); + +#[program] +pub mod update { + + use super::*; + use light_sdk::cpi::{ + v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction, + }; + + /// Setup: Creates a compressed account + pub fn create_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let (address, address_seed) = derive_address( + &[b"message", ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let mut my_compressed_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + my_compressed_account.owner = ctx.accounts.signer.key(); + my_compressed_account.message = message.clone(); + + msg!( + "Created compressed account with message: {}", + my_compressed_account.message + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + /// Updates an existing compressed account's message + pub fn update_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + current_account: MyCompressedAccount, + account_meta: CompressedAccountMeta, + new_message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let mut my_compressed_account = LightAccount::::new_mut( + &crate::ID, + &account_meta, + current_account, + )?; + + my_compressed_account.message = new_message.clone(); + + msg!( + "Updated compressed account message to: {}", + my_compressed_account.message + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(my_compressed_account)? + .invoke(light_cpi_accounts)?; + + Ok(()) + } +} + +#[derive(Accounts)] +pub struct GenericAnchorAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, +} + +#[event] +#[derive(Clone, Debug, Default, LightDiscriminator)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs.mdx new file mode 100644 index 00000000..a6e72d21 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs.mdx @@ -0,0 +1,176 @@ +--- +title: "basic-operations/anchor/update/programs/update/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/update/programs/update/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use anchor_lang::AnchorDeserialize; +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::instruction::{ + account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig, +}; +use update::MyCompressedAccount; +use solana_sdk::{ + instruction::{AccountMeta, Instruction}, + signature::{Keypair, Signature, Signer}, +}; + +#[tokio::test] +async fn test_update() { + let config = ProgramTestConfig::new(true, Some(vec![ + ("update", update::ID), + ])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + // Create account first + let address_tree_info = rpc.get_address_tree_v1(); + let (address, _) = light_sdk::address::v1::derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_info.tree, + &update::ID, + ); + + create_compressed_account( + &mut rpc, + &payer, + &address, + "Hello, compressed world!".to_string(), + ) + .await + .unwrap(); + + let account = rpc.get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + update_compressed_account(&mut rpc, &payer, account, "Updated message!".to_string()) + .await + .unwrap(); + + let updated_account = rpc.get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + let data = &updated_account.data.as_ref().unwrap().data; + let updated = MyCompressedAccount::deserialize(&mut &data[..]).unwrap(); + assert_eq!(updated.owner, payer.pubkey()); + assert_eq!(updated.message, "Updated message!"); +} + +async fn update_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + compressed_account: CompressedAccount, + new_message: String, +) -> Result { + let mut remaining_accounts = PackedAccounts::default(); + + let config = SystemAccountMetaConfig::new(update::ID); + remaining_accounts.add_system_accounts(config)?; + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let current_account = MyCompressedAccount::deserialize( + &mut compressed_account.data.as_ref().unwrap().data.as_slice(), + )?; + + let instruction = Instruction { + program_id: update::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + update::instruction::UpdateAccount { + proof: rpc_result.proof, + current_account, + account_meta: CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }, + new_message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn create_compressed_account( + rpc: &mut LightProgramTest, + payer: &Keypair, + address: &[u8; 32], + message: String, +) -> Result { + let config = SystemAccountMetaConfig::new(update::ID); + let mut remaining_accounts = PackedAccounts::default(); + remaining_accounts.add_system_accounts(config)?; + + let address_tree_info = rpc.get_address_tree_v1(); + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![light_program_test::AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + let packed_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let (remaining_accounts, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: update::ID, + accounts: [ + vec![AccountMeta::new(payer.pubkey(), true)], + remaining_accounts, + ] + .concat(), + data: { + use anchor_lang::InstructionData; + update::instruction::CreateAccount { + proof: rpc_result.proof, + address_tree_info: packed_accounts.address_trees[0], + output_state_tree_index: output_state_tree_index, + message, + } + .data() + }, + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/tests/update-ts.mdx b/.context/program-examples-mdx/basic-operations/anchor/update/tests/update-ts.mdx new file mode 100644 index 00000000..a4d629b8 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/update/tests/update-ts.mdx @@ -0,0 +1,250 @@ +--- +title: "basic-operations/anchor/update/tests/update.ts" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/update/tests/update.ts" +--- + +```typescript +import * as anchor from "@coral-xyz/anchor"; +import { Program, web3 } from "@coral-xyz/anchor"; +import { Update } from "../target/types/update"; +import updateIdl from "../target/idl/update.json"; +import { + bn, + CompressedAccountWithMerkleContext, + confirmTx, + createRpc, + defaultStaticAccountsStruct, + defaultTestStateTreeAccounts, + deriveAddress, + deriveAddressSeed, + LightSystemProgram, + PackedAccounts, + Rpc, + sleep, + SystemAccountMetaConfig, +} from "@lightprotocol/stateless.js"; +import * as assert from "assert"; + +const path = require("path"); +const os = require("os"); +require("dotenv").config(); + +const anchorWalletPath = path.join(os.homedir(), ".config/solana/id.json"); +process.env.ANCHOR_WALLET = anchorWalletPath; + +describe("test-anchor-update", () => { + const updateProgram = anchor.workspace.Update as Program; + const updateCoder = new anchor.BorshCoder(updateIdl as anchor.Idl); + + it("update compressed account message", async () => { + let signer = new web3.Keypair(); + let rpc = createRpc( + "http://127.0.0.1:8899", + "http://127.0.0.1:8784", + "http://127.0.0.1:3001", + { + commitment: "confirmed", + }, + ); + let lamports = web3.LAMPORTS_PER_SOL; + await rpc.requestAirdrop(signer.publicKey, lamports); + await sleep(2000); + + const outputStateTree = defaultTestStateTreeAccounts().merkleTree; + const addressTree = defaultTestStateTreeAccounts().addressTree; + const addressQueue = defaultTestStateTreeAccounts().addressQueue; + + const messageSeed = new TextEncoder().encode("message"); + const seed = deriveAddressSeed( + [messageSeed, signer.publicKey.toBytes()], + new web3.PublicKey(updateProgram.idl.address), + ); + const address = deriveAddress(seed, addressTree); + + // Step 1: Create compressed account with initial message using update program's create_account + const createTxId = await createCompressedAccount( + rpc, + addressTree, + addressQueue, + address, + updateProgram, + outputStateTree, + signer, + "Hello, compressed world!", + ); + console.log("Create Transaction ID:", createTxId); + + // Wait for indexer to process the create transaction + let slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + // Step 2: Get the created account + let compressedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + let myAccount = updateCoder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + assert.strictEqual(myAccount.message, "Hello, compressed world!"); + assert.ok(myAccount.owner.equals(signer.publicKey), "Owner should match signer public key"); + console.log("Created message:", myAccount.message); + + // Step 3: Update the account with new message + const updateTxId = await updateCompressedAccount( + rpc, + compressedAccount, + updateProgram, + outputStateTree, + signer, + "Hello again, compressed World!", + ); + console.log("Update Transaction ID:", updateTxId); + + // Wait for indexer to process the update transaction + slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + + // Step 4: Verify the update + compressedAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + myAccount = updateCoder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + console.log("Updated message:", myAccount.message); + + assert.ok(myAccount.owner.equals(signer.publicKey), "Owner should match signer public key"); + assert.strictEqual(myAccount.message, "Hello again, compressed World!", "Message should be updated"); + }); +}); + +async function createCompressedAccount( + rpc: Rpc, + addressTree: anchor.web3.PublicKey, + addressQueue: anchor.web3.PublicKey, + address: anchor.web3.PublicKey, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, + message: string, +) { + const proofRpcResult = await rpc.getValidityProofV0( + [], + [ + { + tree: addressTree, + queue: addressQueue, + address: bn(address.toBytes()), + }, + ], + ); + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const addressMerkleTreePubkeyIndex = + remainingAccounts.insertOrGet(addressTree); + const addressQueuePubkeyIndex = remainingAccounts.insertOrGet(addressQueue); + const packedAddressTreeInfo = { + rootIndex: proofRpcResult.rootIndices[0], + addressMerkleTreePubkeyIndex, + addressQueuePubkeyIndex, + }; + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .createAccount(proof, packedAddressTreeInfo, outputStateTreeIndex, message) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} + +async function updateCompressedAccount( + rpc: Rpc, + compressedAccount: CompressedAccountWithMerkleContext, + program: anchor.Program, + outputStateTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair, + newMessage: string, +) { + const proofRpcResult = await rpc.getValidityProofV0( + [ + { + hash: compressedAccount.hash, + tree: compressedAccount.treeInfo.tree, + queue: compressedAccount.treeInfo.queue, + }, + ], + [], + ); + + const systemAccountConfig = new SystemAccountMetaConfig(program.programId); + let remainingAccounts = new PackedAccounts(); + remainingAccounts.addSystemAccounts(systemAccountConfig); + + const merkleTreePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.tree, + ); + const queuePubkeyIndex = remainingAccounts.insertOrGet( + compressedAccount.treeInfo.queue, + ); + const outputStateTreeIndex = + remainingAccounts.insertOrGet(outputStateTree); + + // Deserialize current account using update program's coder + const coder = new anchor.BorshCoder(updateIdl as anchor.Idl); + const currentAccount = coder.types.decode( + "MyCompressedAccount", + compressedAccount.data.data, + ); + + const compressedAccountMeta = { + treeInfo: { + merkleTreePubkeyIndex, + queuePubkeyIndex, + leafIndex: compressedAccount.leafIndex, + proveByIndex: false, + rootIndex: proofRpcResult.rootIndices[0], + }, + outputStateTreeIndex, + address: compressedAccount.address, + }; + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .updateAccount(proof, currentAccount, compressedAccountMeta, newMessage) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + return sig; +} +``` diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/tsconfig-json.mdx b/.context/program-examples-mdx/basic-operations/anchor/update/tsconfig-json.mdx new file mode 100644 index 00000000..5bda67ff --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/anchor/update/tsconfig-json.mdx @@ -0,0 +1,24 @@ +--- +title: "basic-operations/anchor/update/tsconfig.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/anchor/update/tsconfig.json" +--- + +```json +{ + "compilerOptions": { + "types": ["mocha", "chai"], + "typeRoots": ["./node_modules/@types"], + "lib": ["es2015"], + "module": "commonjs", + "target": "es6", + "esModuleInterop": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "noEmit": true, + "noUnusedLocals": false, + "noUnusedParameters": false + }, + "include": ["tests/**/*", "migrations/**/*"], + "exclude": ["node_modules", "target"] +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/native/Cargo-toml.mdx new file mode 100644 index 00000000..f2e149e3 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/Cargo-toml.mdx @@ -0,0 +1,26 @@ +--- +title: "basic-operations/native/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/Cargo.toml" +--- + +```toml +[workspace] +members = [ + "programs/create", + "programs/update", + "programs/close", + "programs/reinit", + "programs/burn", +] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 + +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` diff --git a/.context/program-examples-mdx/basic-operations/native/README.mdx b/.context/program-examples-mdx/basic-operations/native/README.mdx new file mode 100644 index 00000000..1acdcdb7 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/README.mdx @@ -0,0 +1,89 @@ +--- +title: "basic-operations/native/README.md" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/README.md" +--- + +```markdown +# Basic Operations - Native Rust Programs + +Native Solana programs with basic compressed account operations. + +## Programs + +- **create** - Initialize a new compressed account +- **update** - Modify data in an existing compressed account +- **close** - Close a compressed account and reclaim rent +- **reinit** - Reinitialize a previously closed compressed account +- **burn** - Permanently destroy a compressed account (cannot be reinitialized) + +## Build + +Build all programs in the workspace: + +```bash +cargo build-sbf +``` + +The compiled `.so` files will be in `target/deploy/` + +## Test + +### Requirements + +- light cli version 0.24.0+ (install via `npm i -g @lightprotocol/zk-compression-cli`) +- solana cli version 2.1.16+ +- Node.js and npm + +### Running Tests + +#### Rust Tests + +```bash +cargo test-sbf +``` + +#### TypeScript Tests + +1. Build the programs: + + ```bash + cargo build-sbf + ``` + +2. Start the test validator with deployed programs: + + ```bash + light test-validator \ + --sbf-program "" ./target/deploy/create.so \ + --sbf-program "" ./target/deploy/update.so \ + --sbf-program "" ./target/deploy/close.so \ + --sbf-program "" ./target/deploy/reinit.so \ + --sbf-program "" ./target/deploy/burn.so + ``` + + NOTE: Replace program IDs with those defined in each program's `lib.rs` (`pub const ID`). + +3. Install dependencies and run tests: + + ```bash + npm install + + npm test + ``` + +The TypeScript tests demonstrate client-side interaction with compressed accounts using `@lightprotocol/stateless.js` and `@lightprotocol/zk-compression-cli`. + +`light test-validator` spawns the following background processes: + +1. solana test validator `http://127.0.0.1:8899` +2. prover server `http://127.0.0.1:3001` +3. photon indexer `http://127.0.0.1:8784` + +You can kill these background processes with `lsof -i:` and `kill `. + +## Disclaimer + +This reference implementation is not audited. + +The Light Protocol programs are audited and deployed on Solana devnet and mainnet. +``` diff --git a/.context/program-examples-mdx/basic-operations/native/package-json.mdx b/.context/program-examples-mdx/basic-operations/native/package-json.mdx new file mode 100644 index 00000000..d57ab05a --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/package-json.mdx @@ -0,0 +1,31 @@ +--- +title: "basic-operations/native/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/package.json" +--- + +```json +{ + "license": "ISC", + "scripts": { + "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w", + "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check", + "test": "ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" + }, + "dependencies": { + "@coral-xyz/anchor": "0.31.1", + "@coral-xyz/borsh": "0.31.1", + "@lightprotocol/stateless.js": "0.22.0", + "dotenv": "^16.5.0" + }, + "devDependencies": { + "@types/bn.js": "^5.1.0", + "@types/chai": "^4.3.0", + "@types/mocha": "^9.0.0", + "chai": "^4.3.4", + "mocha": "^9.0.3", + "prettier": "^2.6.2", + "ts-mocha": "^10.1.0", + "typescript": "^5.0.0" + } +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml.mdx new file mode 100644 index 00000000..7b2f1b65 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml.mdx @@ -0,0 +1,47 @@ +--- +title: "basic-operations/native/programs/burn/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/burn/Cargo.toml" +--- + +```toml +[package] +name = "native-program-burn" +version = "0.1.0" +description = "Native Solana program for burning compressed accounts" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "native_program_burn" + +[features] +no-entrypoint = [] +test-sbf = ["test-helpers"] +test-helpers = ["dep:light-program-test", "dep:solana-sdk"] +default = [] + +[[test]] +name = "test" +required-features = ["test-helpers"] + +[dependencies] +light-sdk = { version = "0.16.0" } +light-sdk-types = { version = "0.16.0" } +light-hasher = { version = "3.1.0", features = ["solana"] } +light-macros = { version = "2.1.0", features = ["solana"] } +solana-program = "2.2" +borsh = "0.10.4" +light-program-test = { version = "0.16.0", optional = true } +solana-sdk = { version = "2.2", optional = true } + +[dev-dependencies] +light-client = { version = "0.16.0" } +tokio = "1.36.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs.mdx new file mode 100644 index 00000000..3e64856a --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs.mdx @@ -0,0 +1,141 @@ +--- +title: "basic-operations/native/programs/burn/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/burn/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] + +#[cfg(any(test, feature = "test-helpers"))] +pub mod test_helpers; + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_macros::pubkey; +use light_sdk::{ + account::sha::LightAccount, + address::v1::derive_address, + cpi::{ + v1::{CpiAccounts, LightSystemProgramCpi}, + CpiSigner, InvokeLightSystemProgram, LightCpiInstruction, + }, + derive_light_cpi_signer, + error::LightSdkError, + instruction::{account_meta::CompressedAccountMetaBurn, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; +use solana_program::{ + account_info::AccountInfo, entrypoint, program_error::ProgramError, pubkey::Pubkey, +}; + +pub const ID: Pubkey = pubkey!("CFWrQ8za2yT1xH8yBjYvsDUCWnBH7vXtyVJwqoX5FcNg"); +pub const LIGHT_CPI_SIGNER: CpiSigner = derive_light_cpi_signer!("CFWrQ8za2yT1xH8yBjYvsDUCWnBH7vXtyVJwqoX5FcNg"); + +#[cfg(not(feature = "no-entrypoint"))] +entrypoint!(process_instruction); + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub enum InstructionType { + Create, + Burn, +} + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub struct CreateInstructionData { + pub proof: ValidityProof, + pub address_tree_info: PackedAddressTreeInfo, + pub output_state_tree_index: u8, + pub message: String, +} + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub struct BurnInstructionData { + pub proof: ValidityProof, + pub account_meta: CompressedAccountMetaBurn, + pub current_account: MyCompressedAccount, +} + +#[derive(Debug, Default, Clone, BorshSerialize, BorshDeserialize, LightDiscriminator)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} + +pub fn process_instruction( + _program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), ProgramError> { + let (instruction_type, rest) = instruction_data + .split_first() + .ok_or(ProgramError::InvalidInstructionData)?; + + match InstructionType::try_from_slice(&[*instruction_type]) + .map_err(|_| ProgramError::InvalidInstructionData)? + { + InstructionType::Create => create(accounts, rest)?, + InstructionType::Burn => burn(accounts, rest)?, + } + + Ok(()) +} + +fn create(accounts: &[AccountInfo], instruction_data: &[u8]) -> Result<(), LightSdkError> { + let instruction_data = + CreateInstructionData::try_from_slice(instruction_data).map_err(|_| LightSdkError::Borsh)?; + + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + let (address, address_seed) = derive_address( + &[b"message", signer.key.as_ref()], + &instruction_data + .address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ProgramError::NotEnoughAccountKeys)?, + &ID, + ); + + let new_address_params = instruction_data + .address_tree_info + .into_new_address_params_packed(address_seed); + + let mut my_compressed_account = LightAccount::::new_init( + &ID, + Some(address), + instruction_data.output_state_tree_index, + ); + my_compressed_account.owner = *signer.key; + my_compressed_account.message = instruction_data.message; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[new_address_params]) + .invoke(light_cpi_accounts)?; + + Ok(()) +} + +fn burn(accounts: &[AccountInfo], instruction_data: &[u8]) -> Result<(), LightSdkError> { + let instruction_data = + BurnInstructionData::try_from_slice(instruction_data).map_err(|_| LightSdkError::Borsh)?; + + let (signer, remaining_accounts) = accounts + .split_first() + .ok_or(ProgramError::InvalidAccountData)?; + + let cpi_accounts = CpiAccounts::new(signer, remaining_accounts, LIGHT_CPI_SIGNER); + + let my_compressed_account = LightAccount::::new_burn( + &ID, // Now the burn program owns the account since it created it + &instruction_data.account_meta, + instruction_data.current_account, + )?; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .invoke(cpi_accounts)?; + + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs.mdx new file mode 100644 index 00000000..071bb238 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs.mdx @@ -0,0 +1,68 @@ +--- +title: "basic-operations/native/programs/burn/src/test_helpers.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/burn/src/test_helpers.rs" +--- + +```rust +use borsh::BorshSerialize; +use light_program_test::{AddressWithTree, Indexer, LightProgramTest, Rpc, RpcError}; +use light_sdk::instruction::{PackedAccounts, SystemAccountMetaConfig}; +use crate::{CreateInstructionData, InstructionType, ID}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +pub async fn create_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + merkle_tree_pubkey: &Pubkey, + address_tree_pubkey: Pubkey, + address: [u8; 32], + message: String, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address, + tree: address_tree_pubkey, + }], + None, + ) + .await? + .value; + + let output_state_tree_index = accounts.insert_or_get(*merkle_tree_pubkey); + let packed_address_tree_info = rpc_result.pack_tree_infos(&mut accounts).address_trees[0]; + let (account_metas, _, _) = accounts.to_account_metas(); + + let instruction_data = CreateInstructionData { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_info, + output_state_tree_index: output_state_tree_index, + message, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Create as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs.mdx new file mode 100644 index 00000000..35f7c7bc --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs.mdx @@ -0,0 +1,139 @@ +--- +title: "basic-operations/native/programs/burn/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/burn/tests/test.rs" +--- + +```rust +use borsh::{BorshDeserialize, BorshSerialize}; +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::address::v1::derive_address; +use light_sdk::instruction::{ + account_meta::CompressedAccountMetaBurn, PackedAccounts, SystemAccountMetaConfig, +}; +use native_program_burn::{BurnInstructionData, InstructionType, MyCompressedAccount, ID}; +use solana_sdk::{ + instruction::Instruction, + signature::{Keypair, Signer}, +}; + +#[tokio::test] +async fn test_burn() { + let config = ProgramTestConfig::new(true, Some(vec![ + ("native_program_burn", ID), + ])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + let address_tree_pubkey = address_tree_info.tree; + + // Create compressed account + let (address, _) = derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_pubkey, + &ID, + ); + let merkle_tree_pubkey = rpc.get_random_state_tree_info().unwrap().tree; + + native_program_burn::test_helpers::create_compressed_account( + &payer, + &mut rpc, + &merkle_tree_pubkey, + address_tree_pubkey, + address, + "Hello, compressed world!".to_string(), + ) + .await + .unwrap(); + + // Get the created account + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + println!("compressed_account: {:?}", compressed_account); + assert_eq!(compressed_account.address.unwrap(), address); + + // Burn the account + burn_compressed_account(&payer, &mut rpc, &compressed_account) + .await + .unwrap(); + + // Verify account is burned (should be None) + let burned_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value; + assert!(burned_account.is_none()); +} + +pub async fn burn_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + println!("Requesting proof for hash: {:?}", hash); + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + println!("Proof returned for hashes: {:?}", rpc_result.proof); + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let current_account = + MyCompressedAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + println!("Account owner from chain (program): {:?}", compressed_account.owner); + println!("Account data owner (user): {:?}", current_account.owner); + println!("Account message: {:?}", current_account.message); + println!("Account hash: {:?}", hash); + println!("Account data bytes: {:?}", &compressed_account.data.as_ref().unwrap().data); + + let meta = CompressedAccountMetaBurn { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + }; + + let (account_metas, _, _) = accounts.to_account_metas(); + let instruction_data = BurnInstructionData { + proof: rpc_result.proof, + account_meta: meta, + current_account, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Burn as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml.mdx new file mode 100644 index 00000000..c3c9a4cb --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml.mdx @@ -0,0 +1,47 @@ +--- +title: "basic-operations/native/programs/close/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/close/Cargo.toml" +--- + +```toml +[package] +name = "native-program-close" +version = "0.1.0" +description = "Native Solana program for closing compressed accounts" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "native_program_close" + +[features] +no-entrypoint = [] +test-sbf = ["test-helpers"] +test-helpers = ["dep:light-program-test", "dep:solana-sdk"] +default = [] + +[[test]] +name = "test" +required-features = ["test-helpers"] + +[dependencies] +light-sdk = { version = "0.16.0" } +light-sdk-types = { version = "0.16.0" } +light-hasher = { version = "3.1.0", features = ["solana"] } +light-macros = { version = "2.1.0", features = ["solana"] } +solana-program = "2.2" +borsh = "0.10.4" +light-program-test = { version = "0.16.0", optional = true } +solana-sdk = { version = "2.2", optional = true } + +[dev-dependencies] +light-client = { version = "0.16.0" } +tokio = "1.36.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs.mdx new file mode 100644 index 00000000..f472ec69 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs.mdx @@ -0,0 +1,144 @@ +--- +title: "basic-operations/native/programs/close/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/close/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] + +#[cfg(any(test, feature = "test-helpers"))] +pub mod test_helpers; + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_macros::pubkey; +use light_sdk::{ + account::sha::LightAccount, + address::v1::derive_address, + cpi::{ + v1::{CpiAccounts, LightSystemProgramCpi}, + CpiSigner, InvokeLightSystemProgram, LightCpiInstruction, + }, + derive_light_cpi_signer, + error::LightSdkError, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; +use solana_program::{ + account_info::AccountInfo, entrypoint, program_error::ProgramError, pubkey::Pubkey, +}; + +pub const ID: Pubkey = pubkey!("NLusgr6vsEjYDvF6nDxpdrhMUxUC19s4XoyshSrGFVN"); +pub const LIGHT_CPI_SIGNER: CpiSigner = derive_light_cpi_signer!("NLusgr6vsEjYDvF6nDxpdrhMUxUC19s4XoyshSrGFVN"); + +#[cfg(not(feature = "no-entrypoint"))] +entrypoint!(process_instruction); + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub enum InstructionType { + Create, + Close, +} + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub struct CreateInstructionData { + pub proof: ValidityProof, + pub address_tree_info: PackedAddressTreeInfo, + pub output_state_tree_index: u8, + pub message: String, +} + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub struct CloseInstructionData { + pub proof: ValidityProof, + pub account_meta: CompressedAccountMeta, + pub current_message: String, +} + +#[derive(Debug, Default, Clone, BorshSerialize, BorshDeserialize, LightDiscriminator)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} + +pub fn process_instruction( + _program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), ProgramError> { + let (instruction_type, rest) = instruction_data + .split_first() + .ok_or(ProgramError::InvalidInstructionData)?; + + match InstructionType::try_from_slice(&[*instruction_type]) + .map_err(|_| ProgramError::InvalidInstructionData)? + { + InstructionType::Create => create(accounts, rest)?, + InstructionType::Close => close(accounts, rest)?, + } + + Ok(()) +} + +fn create(accounts: &[AccountInfo], instruction_data: &[u8]) -> Result<(), LightSdkError> { + let instruction_data = + CreateInstructionData::try_from_slice(instruction_data).map_err(|_| LightSdkError::Borsh)?; + + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + let (address, address_seed) = derive_address( + &[b"message", signer.key.as_ref()], + &instruction_data + .address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ProgramError::NotEnoughAccountKeys)?, + &ID, + ); + + let new_address_params = instruction_data + .address_tree_info + .into_new_address_params_packed(address_seed); + + let mut my_compressed_account = LightAccount::::new_init( + &ID, + Some(address), + instruction_data.output_state_tree_index, + ); + my_compressed_account.owner = *signer.key; + my_compressed_account.message = instruction_data.message; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[new_address_params]) + .invoke(light_cpi_accounts)?; + + Ok(()) +} + +fn close(accounts: &[AccountInfo], instruction_data: &[u8]) -> Result<(), LightSdkError> { + let instruction_data = + CloseInstructionData::try_from_slice(instruction_data).map_err(|_| LightSdkError::Borsh)?; + + let (signer, remaining_accounts) = accounts + .split_first() + .ok_or(ProgramError::InvalidAccountData)?; + + let cpi_accounts = CpiAccounts::new(signer, remaining_accounts, LIGHT_CPI_SIGNER); + + let my_compressed_account = LightAccount::::new_close( + &ID, + &instruction_data.account_meta, + MyCompressedAccount { + owner: *signer.key, + message: instruction_data.current_message, + }, + )?; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .invoke(cpi_accounts)?; + + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs.mdx new file mode 100644 index 00000000..ad726b96 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs.mdx @@ -0,0 +1,68 @@ +--- +title: "basic-operations/native/programs/close/src/test_helpers.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/close/src/test_helpers.rs" +--- + +```rust +use borsh::BorshSerialize; +use light_program_test::{AddressWithTree, Indexer, LightProgramTest, Rpc, RpcError}; +use light_sdk::instruction::{PackedAccounts, SystemAccountMetaConfig}; +use crate::{CreateInstructionData, InstructionType, ID}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +pub async fn create_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + merkle_tree_pubkey: &Pubkey, + address_tree_pubkey: Pubkey, + address: [u8; 32], + message: String, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address, + tree: address_tree_pubkey, + }], + None, + ) + .await? + .value; + + let output_state_tree_index = accounts.insert_or_get(*merkle_tree_pubkey); + let packed_address_tree_info = rpc_result.pack_tree_infos(&mut accounts).address_trees[0]; + let (account_metas, _, _) = accounts.to_account_metas(); + + let instruction_data = CreateInstructionData { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_info, + output_state_tree_index: output_state_tree_index, + message, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Create as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs.mdx new file mode 100644 index 00000000..4976f8e4 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs.mdx @@ -0,0 +1,130 @@ +--- +title: "basic-operations/native/programs/close/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/close/tests/test.rs" +--- + +```rust +use borsh::{BorshDeserialize, BorshSerialize}; +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::address::v1::derive_address; +use light_sdk::instruction::{ + account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig, +}; +use native_program_close::{CloseInstructionData, InstructionType, MyCompressedAccount, ID}; +use solana_sdk::{ + instruction::Instruction, + signature::{Keypair, Signer}, +}; + +#[tokio::test] +async fn test_close() { + let config = ProgramTestConfig::new(true, Some(vec![ + ("native_program_close", ID), + ])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + let address_tree_pubkey = address_tree_info.tree; + + // Create compressed account + let (address, _) = derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_pubkey, + &ID, + ); + let merkle_tree_pubkey = rpc.get_random_state_tree_info().unwrap().tree; + + native_program_close::test_helpers::create_compressed_account( + &payer, + &mut rpc, + &merkle_tree_pubkey, + address_tree_pubkey, + address, + "Hello, compressed world!".to_string(), + ) + .await + .unwrap(); + + // Get the created account + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(compressed_account.address.unwrap(), address); + + // Close the account + close_compressed_account(&payer, &mut rpc, &compressed_account) + .await + .unwrap(); + + // Verify account is closed (data should be default/empty) + let closed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(closed_account.data, Some(Default::default())); +} + +pub async fn close_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let current_account = + MyCompressedAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (account_metas, _, _) = accounts.to_account_metas(); + let instruction_data = CloseInstructionData { + proof: rpc_result.proof, + account_meta: meta, + current_message: current_account.message, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Close as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml.mdx new file mode 100644 index 00000000..4b198391 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml.mdx @@ -0,0 +1,45 @@ +--- +title: "basic-operations/native/programs/create/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/create/Cargo.toml" +--- + +```toml +[package] +name = "native-program-create" +version = "0.1.0" +description = "Native Solana program for creating compressed accounts" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "native_program_create" + +[features] +no-entrypoint = [] +test-sbf = [] +test-helpers = ["dep:light-program-test", "dep:solana-sdk"] +default = [] + +[dependencies] +light-sdk = { version = "0.16.0" } +light-sdk-types = { version = "0.16.0" } +light-hasher = { version = "3.1.0", features = ["solana"] } +light-macros = { version = "2.1.0", features = ["solana"] } +solana-program = "2.2" +borsh = "0.10.4" +light-program-test = { version = "0.16.0", optional = true } +solana-sdk = { version = "2.2", optional = true } + +[dev-dependencies] +light-program-test = { version = "0.16.0" } +light-client = { version = "0.16.0" } +tokio = "1.36.0" +solana-sdk = "2.2" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml.mdx b/.context/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml.mdx new file mode 100644 index 00000000..b50c2fb1 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "basic-operations/native/programs/create/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/create/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs.mdx new file mode 100644 index 00000000..1087805a --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs.mdx @@ -0,0 +1,130 @@ +--- +title: "basic-operations/native/programs/create/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/create/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] + +#[cfg(any(test, feature = "test-helpers"))] +pub mod test_helpers; + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_macros::pubkey; +use light_sdk::{ + account::sha::LightAccount, + address::v1::derive_address, + cpi::{ + v1::{CpiAccounts, LightSystemProgramCpi}, + CpiSigner, InvokeLightSystemProgram, LightCpiInstruction, + }, + derive_light_cpi_signer, + error::LightSdkError, + instruction::{PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; +use solana_program::{ + account_info::AccountInfo, entrypoint, program_error::ProgramError, pubkey::Pubkey, +}; + +pub const ID: Pubkey = pubkey!("rent4o4eAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPq"); +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("rent4o4eAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPq"); + +entrypoint!(process_instruction); + +#[repr(u8)] +#[derive(Debug)] +pub enum InstructionType { + Create = 0, +} + +impl TryFrom for InstructionType { + type Error = LightSdkError; + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(InstructionType::Create), + _ => panic!("Invalid instruction discriminator."), + } + } +} + +#[derive( + Debug, Default, Clone, BorshSerialize, BorshDeserialize, LightDiscriminator, +)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct CreateInstructionData { + pub proof: ValidityProof, + pub address_tree_info: PackedAddressTreeInfo, + pub output_state_tree_index: u8, + pub message: String, +} + +pub fn process_instruction( + program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), ProgramError> { + if program_id != &ID { + return Err(ProgramError::IncorrectProgramId); + } + if instruction_data.is_empty() { + return Err(ProgramError::InvalidInstructionData); + } + + let discriminator = InstructionType::try_from(instruction_data[0]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + + match discriminator { + InstructionType::Create => { + let instruction_data = + CreateInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + create(accounts, instruction_data) + } + } +} + +pub fn create( + accounts: &[AccountInfo], + instruction_data: CreateInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + let (address, address_seed) = derive_address( + &[b"message", signer.key.as_ref()], + &instruction_data + .address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ProgramError::NotEnoughAccountKeys)?, + &ID, + ); + + let new_address_params = instruction_data + .address_tree_info + .into_new_address_params_packed(address_seed); + + let mut my_compressed_account = LightAccount::::new_init( + &ID, + Some(address), + instruction_data.output_state_tree_index, + ); + my_compressed_account.owner = *signer.key; + my_compressed_account.message = instruction_data.message; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[new_address_params]) + .invoke(light_cpi_accounts)?; + + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs.mdx new file mode 100644 index 00000000..03732774 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs.mdx @@ -0,0 +1,68 @@ +--- +title: "basic-operations/native/programs/create/src/test_helpers.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/create/src/test_helpers.rs" +--- + +```rust +use borsh::BorshSerialize; +use light_program_test::{AddressWithTree, Indexer, LightProgramTest, Rpc, RpcError}; +use light_sdk::instruction::{PackedAccounts, SystemAccountMetaConfig}; +use crate::{CreateInstructionData, InstructionType, ID}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +pub async fn create_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + merkle_tree_pubkey: &Pubkey, + address_tree_pubkey: Pubkey, + address: [u8; 32], + message: String, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address, + tree: address_tree_pubkey, + }], + None, + ) + .await? + .value; + + let output_state_tree_index = accounts.insert_or_get(*merkle_tree_pubkey); + let packed_address_tree_info = rpc_result.pack_tree_infos(&mut accounts).address_trees[0]; + let (account_metas, _, _) = accounts.to_account_metas(); + + let instruction_data = CreateInstructionData { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_info, + output_state_tree_index: output_state_tree_index, + message, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Create as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs.mdx new file mode 100644 index 00000000..d56bbaee --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs.mdx @@ -0,0 +1,116 @@ +--- +title: "basic-operations/native/programs/create/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/create/tests/test.rs" +--- + +```rust +use borsh::{BorshDeserialize, BorshSerialize}; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::address::v1::derive_address; +use light_sdk::instruction::{PackedAccounts, SystemAccountMetaConfig}; +use native_program_create::{CreateInstructionData, InstructionType, MyCompressedAccount, ID}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +#[tokio::test] +async fn test_create() { + let config = ProgramTestConfig::new(true, Some(vec![("native_program_create", ID)])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + let address_tree_pubkey = address_tree_info.tree; + + // Create compressed account + let (address, _) = derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_pubkey, + &ID, + ); + let merkle_tree_pubkey = rpc.get_random_state_tree_info().unwrap().tree; + + create_compressed_account( + &payer, + &mut rpc, + &merkle_tree_pubkey, + address_tree_pubkey, + address, + "Hello, compressed world!".to_string(), + ) + .await + .unwrap(); + + // Get the created account + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(compressed_account.address.unwrap(), address); + + // Deserialize and verify the account data + let my_account = + MyCompressedAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + assert_eq!(my_account.owner, payer.pubkey()); + assert_eq!(my_account.message, "Hello, compressed world!"); +} + +pub async fn create_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + merkle_tree_pubkey: &Pubkey, + address_tree_pubkey: Pubkey, + address: [u8; 32], + message: String, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address, + tree: address_tree_pubkey, + }], + None, + ) + .await? + .value; + + let output_state_tree_index = accounts.insert_or_get(*merkle_tree_pubkey); + let packed_address_tree_info = rpc_result.pack_tree_infos(&mut accounts).address_trees[0]; + let (account_metas, _, _) = accounts.to_account_metas(); + + let instruction_data = CreateInstructionData { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_info, + output_state_tree_index: output_state_tree_index, + message, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Create as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml.mdx new file mode 100644 index 00000000..52744025 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml.mdx @@ -0,0 +1,47 @@ +--- +title: "basic-operations/native/programs/reinit/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/reinit/Cargo.toml" +--- + +```toml +[package] +name = "native-program-reinit" +version = "0.1.0" +description = "Native Solana program for reinitializing compressed accounts" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "native_program_reinit" + +[features] +no-entrypoint = [] +test-sbf = ["test-helpers"] +test-helpers = ["dep:light-program-test", "dep:light-client", "dep:solana-sdk"] +default = [] + +[[test]] +name = "test" +required-features = ["test-helpers"] + +[dependencies] +light-sdk = { version = "0.16.0" } +light-sdk-types = { version = "0.16.0" } +light-hasher = { version = "3.1.0", features = ["solana"] } +light-macros = { version = "2.1.0", features = ["solana"] } +solana-program = "2.2" +borsh = "0.10.4" +light-program-test = { version = "0.16.0", optional = true } +light-client = { version = "0.16.0", optional = true } +solana-sdk = { version = "2.2", optional = true } + +[dev-dependencies] +tokio = "1.36.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs.mdx new file mode 100644 index 00000000..028cfa15 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs.mdx @@ -0,0 +1,174 @@ +--- +title: "basic-operations/native/programs/reinit/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/reinit/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] + +#[cfg(any(test, feature = "test-helpers"))] +pub mod test_helpers; + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_macros::pubkey; +use light_sdk::{ + account::sha::LightAccount, + address::v1::derive_address, + cpi::{ + v1::{CpiAccounts, LightSystemProgramCpi}, + CpiSigner, InvokeLightSystemProgram, LightCpiInstruction, + }, + derive_light_cpi_signer, + error::LightSdkError, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; +use solana_program::{ + account_info::AccountInfo, entrypoint, program_error::ProgramError, pubkey::Pubkey, +}; + +pub const ID: Pubkey = pubkey!("C9WiPUaQ5PRjEWg7vUmgekfuQtAgFZFhn12ytXEMDr8y"); +pub const LIGHT_CPI_SIGNER: CpiSigner = derive_light_cpi_signer!("C9WiPUaQ5PRjEWg7vUmgekfuQtAgFZFhn12ytXEMDr8y"); + +#[cfg(not(feature = "no-entrypoint"))] +entrypoint!(process_instruction); + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub enum InstructionType { + Create, + Close, + Reinit, +} + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub struct CreateInstructionData { + pub proof: ValidityProof, + pub address_tree_info: PackedAddressTreeInfo, + pub output_state_tree_index: u8, + pub message: String, +} + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub struct CloseInstructionData { + pub proof: ValidityProof, + pub account_meta: CompressedAccountMeta, + pub current_message: String, +} + +#[derive(Debug, BorshSerialize, BorshDeserialize)] +pub struct ReinitInstructionData { + pub proof: ValidityProof, + pub account_meta: CompressedAccountMeta, +} + +#[derive(Debug, Default, Clone, BorshSerialize, BorshDeserialize, LightDiscriminator)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} + +pub fn process_instruction( + _program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), ProgramError> { + let (instruction_type, rest) = instruction_data + .split_first() + .ok_or(ProgramError::InvalidInstructionData)?; + + match InstructionType::try_from_slice(&[*instruction_type]) + .map_err(|_| ProgramError::InvalidInstructionData)? + { + InstructionType::Create => create(accounts, rest)?, + InstructionType::Close => close(accounts, rest)?, + InstructionType::Reinit => reinit(accounts, rest)?, + } + + Ok(()) +} + +fn create(accounts: &[AccountInfo], instruction_data: &[u8]) -> Result<(), LightSdkError> { + let instruction_data = + CreateInstructionData::try_from_slice(instruction_data).map_err(|_| LightSdkError::Borsh)?; + + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + let (address, address_seed) = derive_address( + &[b"message", signer.key.as_ref()], + &instruction_data + .address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ProgramError::NotEnoughAccountKeys)?, + &ID, + ); + + let new_address_params = instruction_data + .address_tree_info + .into_new_address_params_packed(address_seed); + + let mut my_compressed_account = LightAccount::::new_init( + &ID, + Some(address), + instruction_data.output_state_tree_index, + ); + my_compressed_account.owner = *signer.key; + my_compressed_account.message = instruction_data.message; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[new_address_params]) + .invoke(light_cpi_accounts)?; + + Ok(()) +} + +fn close(accounts: &[AccountInfo], instruction_data: &[u8]) -> Result<(), LightSdkError> { + let instruction_data = + CloseInstructionData::try_from_slice(instruction_data).map_err(|_| LightSdkError::Borsh)?; + + let (signer, remaining_accounts) = accounts + .split_first() + .ok_or(ProgramError::InvalidAccountData)?; + + let cpi_accounts = CpiAccounts::new(signer, remaining_accounts, LIGHT_CPI_SIGNER); + + let my_compressed_account = LightAccount::::new_close( + &ID, + &instruction_data.account_meta, + MyCompressedAccount { + owner: *signer.key, + message: instruction_data.current_message, + }, + )?; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .invoke(cpi_accounts)?; + + Ok(()) +} + +fn reinit(accounts: &[AccountInfo], instruction_data: &[u8]) -> Result<(), LightSdkError> { + let instruction_data = + ReinitInstructionData::try_from_slice(instruction_data).map_err(|_| LightSdkError::Borsh)?; + + let (signer, remaining_accounts) = accounts + .split_first() + .ok_or(ProgramError::InvalidAccountData)?; + + let cpi_accounts = CpiAccounts::new(signer, remaining_accounts, LIGHT_CPI_SIGNER); + + let my_compressed_account = LightAccount::::new_empty( + &ID, + &instruction_data.account_meta, + )?; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .invoke(cpi_accounts)?; + + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs.mdx new file mode 100644 index 00000000..e2643352 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs.mdx @@ -0,0 +1,124 @@ +--- +title: "basic-operations/native/programs/reinit/src/test_helpers.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/reinit/src/test_helpers.rs" +--- + +```rust +use borsh::{BorshDeserialize, BorshSerialize}; +use light_client::indexer::CompressedAccount; +use light_program_test::{AddressWithTree, Indexer, LightProgramTest, Rpc, RpcError}; +use light_sdk::instruction::{account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig}; +use crate::{CloseInstructionData, CreateInstructionData, InstructionType, MyCompressedAccount, ID}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +pub async fn create_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + merkle_tree_pubkey: &Pubkey, + address_tree_pubkey: Pubkey, + address: [u8; 32], + message: String, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address, + tree: address_tree_pubkey, + }], + None, + ) + .await? + .value; + + let output_state_tree_index = accounts.insert_or_get(*merkle_tree_pubkey); + let packed_address_tree_info = rpc_result.pack_tree_infos(&mut accounts).address_trees[0]; + let (account_metas, _, _) = accounts.to_account_metas(); + + let instruction_data = CreateInstructionData { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_info, + output_state_tree_index: output_state_tree_index, + message, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Create as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn close_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let current_account = + MyCompressedAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (account_metas, _, _) = accounts.to_account_metas(); + let instruction_data = CloseInstructionData { + proof: rpc_result.proof, + account_meta: meta, + current_message: current_account.message, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Close as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs.mdx new file mode 100644 index 00000000..7d06d618 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs.mdx @@ -0,0 +1,148 @@ +--- +title: "basic-operations/native/programs/reinit/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/reinit/tests/test.rs" +--- + +```rust +use borsh::{BorshDeserialize, BorshSerialize}; +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::address::v1::derive_address; +use light_sdk::instruction::{ + account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig, +}; +use native_program_reinit::{ReinitInstructionData, InstructionType, MyCompressedAccount, ID}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +#[tokio::test] +async fn test_reinit() { + let config = ProgramTestConfig::new(true, Some(vec![ + ("native_program_reinit", ID), + ])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + let address_tree_pubkey = address_tree_info.tree; + + // Create compressed account + let (address, _) = derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_pubkey, + &ID, + ); + let merkle_tree_pubkey = rpc.get_random_state_tree_info().unwrap().tree; + + native_program_reinit::test_helpers::create_compressed_account( + &payer, + &mut rpc, + &merkle_tree_pubkey, + address_tree_pubkey, + address, + "Hello, compressed world!".to_string(), + ) + .await + .unwrap(); + + // Get the created account + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + // Close the account + native_program_reinit::test_helpers::close_compressed_account(&payer, &mut rpc, &compressed_account) + .await + .unwrap(); + + // Verify account is closed + let closed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(closed_account.data, Some(Default::default())); + + // Reinitialize the account + reinit_compressed_account(&payer, &mut rpc, &closed_account) + .await + .unwrap(); + + // Verify account is reinitialized with default MyCompressedAccount values + let reinit_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + // Deserialize and verify it's a default-initialized MyCompressedAccount + let deserialized_account = MyCompressedAccount::deserialize( + &mut reinit_account.data.as_ref().unwrap().data.as_slice() + ) + .unwrap(); + + // Check that the reinitialized account has default values + assert_eq!(deserialized_account.owner, Pubkey::default()); + assert_eq!(deserialized_account.message, String::default()); +} + +pub async fn reinit_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (account_metas, _, _) = accounts.to_account_metas(); + let instruction_data = ReinitInstructionData { + proof: rpc_result.proof, + account_meta: meta, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Reinit as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml.mdx b/.context/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml.mdx new file mode 100644 index 00000000..0ca239d3 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml.mdx @@ -0,0 +1,47 @@ +--- +title: "basic-operations/native/programs/update/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/update/Cargo.toml" +--- + +```toml +[package] +name = "native-program-update" +version = "0.1.0" +description = "Native Solana program for updating compressed accounts" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "native_program_update" + +[features] +no-entrypoint = [] +test-sbf = ["test-helpers"] +test-helpers = ["dep:light-program-test", "dep:solana-sdk"] +default = [] + +[[test]] +name = "test" +required-features = ["test-helpers"] + +[dependencies] +light-sdk = { version = "0.16.0" } +light-sdk-types = { version = "0.16.0" } +light-hasher = { version = "3.1.0", features = ["solana"] } +light-macros = { version = "2.1.0", features = ["solana"] } +solana-program = "2.2" +borsh = "0.10.4" +light-program-test = { version = "0.16.0", optional = true } +solana-sdk = { version = "2.2", optional = true } + +[dev-dependencies] +light-client = { version = "0.16.0" } +tokio = "1.36.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml.mdx b/.context/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml.mdx new file mode 100644 index 00000000..6d202edd --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "basic-operations/native/programs/update/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/update/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs.mdx new file mode 100644 index 00000000..ffab287b --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs.mdx @@ -0,0 +1,173 @@ +--- +title: "basic-operations/native/programs/update/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/update/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] + +#[cfg(any(test, feature = "test-helpers"))] +pub mod test_helpers; + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_macros::pubkey; +use light_sdk::{ + account::sha::LightAccount, + address::v1::derive_address, + cpi::{ + v1::{CpiAccounts, LightSystemProgramCpi}, + CpiSigner, InvokeLightSystemProgram, LightCpiInstruction, + }, + derive_light_cpi_signer, + error::LightSdkError, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; +use solana_program::{ + account_info::AccountInfo, entrypoint, program_error::ProgramError, pubkey::Pubkey, +}; + +pub const ID: Pubkey = pubkey!("2m6LXA7E6kMSkK6QHq2WCznD6kvhDcVFqEKpETKAQxYe"); +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("2m6LXA7E6kMSkK6QHq2WCznD6kvhDcVFqEKpETKAQxYe"); + +entrypoint!(process_instruction); + +#[repr(u8)] +#[derive(Debug)] +pub enum InstructionType { + Create = 0, + Update = 1, +} + +impl TryFrom for InstructionType { + type Error = LightSdkError; + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(InstructionType::Create), + 1 => Ok(InstructionType::Update), + _ => panic!("Invalid instruction discriminator."), + } + } +} + +#[derive( + Debug, Default, Clone, BorshSerialize, BorshDeserialize, LightDiscriminator, +)] +pub struct MyCompressedAccount { + pub owner: Pubkey, + pub message: String, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct CreateInstructionData { + pub proof: ValidityProof, + pub address_tree_info: PackedAddressTreeInfo, + pub output_state_tree_index: u8, + pub message: String, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct UpdateInstructionData { + pub proof: ValidityProof, + pub account_meta: CompressedAccountMeta, + pub current_message: String, + pub new_message: String, +} + +pub fn process_instruction( + program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), ProgramError> { + if program_id != &ID { + return Err(ProgramError::IncorrectProgramId); + } + if instruction_data.is_empty() { + return Err(ProgramError::InvalidInstructionData); + } + + let discriminator = InstructionType::try_from(instruction_data[0]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + + match discriminator { + InstructionType::Create => { + let instruction_data = + CreateInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + create(accounts, instruction_data) + } + InstructionType::Update => { + let instruction_data = + UpdateInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + update(accounts, instruction_data) + } + } +} + +pub fn create( + accounts: &[AccountInfo], + instruction_data: CreateInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + let (address, address_seed) = derive_address( + &[b"message", signer.key.as_ref()], + &instruction_data + .address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ProgramError::NotEnoughAccountKeys)?, + &ID, + ); + + let new_address_params = instruction_data + .address_tree_info + .into_new_address_params_packed(address_seed); + + let mut my_compressed_account = LightAccount::::new_init( + &ID, + Some(address), + instruction_data.output_state_tree_index, + ); + my_compressed_account.owner = *signer.key; + my_compressed_account.message = instruction_data.message; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .with_new_addresses(&[new_address_params]) + .invoke(light_cpi_accounts)?; + + Ok(()) +} + +pub fn update( + accounts: &[AccountInfo], + instruction_data: UpdateInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + let mut my_compressed_account = LightAccount::::new_mut( + &ID, + &instruction_data.account_meta, + MyCompressedAccount { + owner: *signer.key, + message: instruction_data.current_message, + }, + )?; + + // Update the account data with new message + my_compressed_account.account.message = instruction_data.new_message; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(my_compressed_account)? + .invoke(light_cpi_accounts)?; + + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs.mdx new file mode 100644 index 00000000..542128d2 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs.mdx @@ -0,0 +1,68 @@ +--- +title: "basic-operations/native/programs/update/src/test_helpers.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/update/src/test_helpers.rs" +--- + +```rust +use borsh::BorshSerialize; +use light_program_test::{AddressWithTree, Indexer, LightProgramTest, Rpc, RpcError}; +use light_sdk::instruction::{PackedAccounts, SystemAccountMetaConfig}; +use crate::{CreateInstructionData, InstructionType, ID}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +pub async fn create_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + merkle_tree_pubkey: &Pubkey, + address_tree_pubkey: Pubkey, + address: [u8; 32], + message: String, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address, + tree: address_tree_pubkey, + }], + None, + ) + .await? + .value; + + let output_state_tree_index = accounts.insert_or_get(*merkle_tree_pubkey); + let packed_address_tree_info = rpc_result.pack_tree_infos(&mut accounts).address_trees[0]; + let (account_metas, _, _) = accounts.to_account_metas(); + + let instruction_data = CreateInstructionData { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_info, + output_state_tree_index: output_state_tree_index, + message, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Create as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs.mdx b/.context/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs.mdx new file mode 100644 index 00000000..4c87a5f1 --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs.mdx @@ -0,0 +1,139 @@ +--- +title: "basic-operations/native/programs/update/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/programs/update/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::address::v1::derive_address; +use light_sdk::instruction::{ + account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig, +}; +use native_program_update::{InstructionType, MyCompressedAccount, UpdateInstructionData, ID}; +use solana_sdk::{ + instruction::Instruction, + signature::{Keypair, Signer}, +}; + +#[tokio::test] +async fn test_update() { + let config = ProgramTestConfig::new(true, Some(vec![ + ("native_program_update", ID), + ])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + let address_tree_pubkey = address_tree_info.tree; + + // Create compressed account + let (address, _) = derive_address( + &[b"message", payer.pubkey().as_ref()], + &address_tree_pubkey, + &ID, + ); + let merkle_tree_pubkey = rpc.get_random_state_tree_info().unwrap().tree; + + native_program_update::test_helpers::create_compressed_account( + &payer, + &mut rpc, + &merkle_tree_pubkey, + address_tree_pubkey, + address, + "Hello, compressed world!".to_string(), + ) + .await + .unwrap(); + + // Get the created account + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(compressed_account.address.unwrap(), address); + + // Update the account + update_compressed_account(&payer, &mut rpc, &compressed_account) + .await + .unwrap(); + + // Get the updated account + let updated_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + // Deserialize and verify the updated account data + let my_account = + MyCompressedAccount::deserialize(&mut updated_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + assert_eq!(my_account.owner, payer.pubkey()); + assert_eq!(my_account.message, "Updated message!"); +} + +pub async fn update_compressed_account( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let current_account = + MyCompressedAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (account_metas, _, _) = accounts.to_account_metas(); + let instruction_data = UpdateInstructionData { + proof: rpc_result.proof, + account_meta: meta, + current_message: current_account.message, + new_message: "Updated message!".to_string(), + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: ID, + accounts: account_metas, + data: [ + &[InstructionType::Update as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/basic-operations/native/tsconfig-json.mdx b/.context/program-examples-mdx/basic-operations/native/tsconfig-json.mdx new file mode 100644 index 00000000..2700999c --- /dev/null +++ b/.context/program-examples-mdx/basic-operations/native/tsconfig-json.mdx @@ -0,0 +1,20 @@ +--- +title: "basic-operations/native/tsconfig.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/basic-operations/native/tsconfig.json" +--- + +```json +{ + "compilerOptions": { + "types": ["mocha", "chai"], + "typeRoots": ["./node_modules/@types"], + "lib": ["es2015"], + "module": "commonjs", + "target": "es6", + "esModuleInterop": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "noEmit": true + } +} +``` diff --git a/.context/program-examples-mdx/counter/anchor/Anchor-toml.mdx b/.context/program-examples-mdx/counter/anchor/Anchor-toml.mdx new file mode 100644 index 00000000..33ee5a2f --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/Anchor-toml.mdx @@ -0,0 +1,25 @@ +--- +title: "counter/anchor/Anchor.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/Anchor.toml" +--- + +```toml +[toolchain] + +[features] +resolution = true +skip-lint = false + +[programs.localnet] +counter = "GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX" + +[registry] +url = "https://api.apr.dev" + +[provider] +cluster = "Localnet" +wallet = "~/.config/solana/id.json" + +[scripts] +test = "NODE_OPTIONS='--no-deprecation' yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" +``` diff --git a/.context/program-examples-mdx/counter/anchor/Cargo-toml.mdx b/.context/program-examples-mdx/counter/anchor/Cargo-toml.mdx new file mode 100644 index 00000000..a78396b2 --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/Cargo-toml.mdx @@ -0,0 +1,19 @@ +--- +title: "counter/anchor/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/Cargo.toml" +--- + +```toml +[workspace] +members = ["programs/*"] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` diff --git a/.context/program-examples-mdx/counter/anchor/README.mdx b/.context/program-examples-mdx/counter/anchor/README.mdx new file mode 100644 index 00000000..f2456d24 --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/README.mdx @@ -0,0 +1,73 @@ +--- +title: "counter/anchor/README.md" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/README.md" +--- + +```markdown +# Compressed Counter Program (Anchor) + +A counter program built with the Anchor framework. Includes instructions to create a zk-compressed PDA account, increment, decrement, reset the counter value, and close the account. + +## Build + +```bash +anchor build +``` + +## Test + +### Requirements + +- light cli version 0.24.0+ (install via npm `i -g @lightprotocol/zk-compression-cli`) +- solana cli version 2.1.16+ +- anchor version 0.31.1+ +- Node.js and npm + +### Running Tests + +#### Rust Tests + +```bash +cargo test-sbf +``` + +#### TypeScript Tests + +1. Build the program and sync the program ID: + + ```bash + anchor build && anchor keys sync && anchor build + ``` + +2. Start the test validator + + ```bash + light test-validator --sbf-program "GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX" ./target/deploy/counter.so + ``` + +NOTE: Replace the program ID above with the one generated in your `Anchor.toml` file. + +3. Install dependencies and run tests: + + ```bash + npm install + + anchor test --skip-local-validator --skip-build --skip-deploy + ``` + +The TypeScript tests demonstrate client-side interaction with compressed accounts using `@lightprotocol/stateless.js` and `@lightprotocol/zk-compression-cli`. + +`$ light test-validator` spawns the following background processes: + +1. solana test validator `http://127.0.0.1:8899` +2. prover server `http://127.0.0.1:3001` +3. photon indexer `http://127.0.0.1:8784` + +You can kill these background processes with `lsof -i:` and `kill `. + +## Disclaimer + +This reference implementation is not audited. + +The Light Protocol programs are audited and deployed on Solana devnet and mainnet. +``` diff --git a/.context/program-examples-mdx/counter/anchor/package-json.mdx b/.context/program-examples-mdx/counter/anchor/package-json.mdx new file mode 100644 index 00000000..7a0f55ae --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/package-json.mdx @@ -0,0 +1,30 @@ +--- +title: "counter/anchor/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/package.json" +--- + +```json +{ + "license": "ISC", + "scripts": { + "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w", + "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check", + "test": "ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" + }, + "dependencies": { + "@coral-xyz/anchor": "0.31.1", + "@lightprotocol/stateless.js": "0.22.1-alpha.1", + "dotenv": "^16.5.0" + }, + "devDependencies": { + "@types/bn.js": "^5.1.0", + "@types/chai": "^4.3.0", + "@types/mocha": "^9.0.0", + "chai": "^4.3.4", + "mocha": "^9.0.3", + "prettier": "^2.6.2", + "ts-mocha": "^10.1.0", + "typescript": "^4.3.5" + } +} +``` diff --git a/.context/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml.mdx b/.context/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml.mdx new file mode 100644 index 00000000..77f636b7 --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml.mdx @@ -0,0 +1,43 @@ +--- +title: "counter/anchor/programs/counter/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/programs/counter/Cargo.toml" +--- + +```toml +[package] +name = "counter" +version = "0.0.11" +description = "Created with Anchor" +edition = "2021" +license = "Apache-2.0" + +[lib] +crate-type = ["cdylib", "lib"] +name = "counter" + +[features] +no-entrypoint = [] +no-idl = [] +cpi = ["no-entrypoint"] +default = ["idl-build"] +test-sbf = [] +idl-build = ["anchor-lang/idl-build", "light-sdk/idl-build"] + +[dependencies] +anchor-lang = "0.31.1" +light-hasher = { version = "5.0.0", features = ["solana"] } +light-sdk = { version = "0.16.0" , features = ["anchor"] } +light-sdk-types = { version = "0.16.0" , features = ["anchor"] } + +[dev-dependencies] +light-client = "0.16.0" +solana-keypair = "2.2" +solana-message = "2.2" +solana-pubkey = { version = "2.2", features = ["curve25519", "sha2"] } +solana-signer = "2.2" +solana-transaction = "2.2" + +light-program-test = "0.16.0" +tokio = "1.43.0" +solana-sdk = "2.2" +``` diff --git a/.context/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml.mdx b/.context/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml.mdx new file mode 100644 index 00000000..3645d8e6 --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "counter/anchor/programs/counter/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/programs/counter/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs.mdx b/.context/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs.mdx new file mode 100644 index 00000000..dbe1aae7 --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs.mdx @@ -0,0 +1,228 @@ +--- +title: "counter/anchor/programs/counter/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/programs/counter/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] +#![allow(deprecated)] + +use anchor_lang::{prelude::*, AnchorDeserialize, Discriminator}; +use light_sdk::{ + account::LightAccount, + address::v1::derive_address, + cpi::{v1::CpiAccounts, CpiSigner}, + derive_light_cpi_signer, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, LightHasher, +}; + +declare_id!("GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX"); + +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX"); + +#[program] +pub mod counter { + + use super::*; + use light_sdk::cpi::{ + v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction, + }; + + pub fn create_counter<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + ) -> Result<()> { + // LightAccount::new_init will create an account with empty output state (no input state). + // Modifying the account will modify the output state that when converted to_account_info() + // is hashed with poseidon hashes, serialized with borsh + // and created with invoke_light_system_program by invoking the light-system-program. + // The hashing scheme is the account structure derived with LightHasher. + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let (address, address_seed) = derive_address( + &[b"counter", ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let new_address_params = address_tree_info.into_new_address_params_packed(address_seed); + + let mut counter = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + counter.owner = ctx.accounts.signer.key(); + counter.value = 0; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(counter)? + .with_new_addresses(&[new_address_params]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + pub fn increment_counter<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + counter_value: u64, + account_meta: CompressedAccountMeta, + ) -> Result<()> { + // LightAccount::new_mut will create an account with input state and output state. + // The input state is hashed immediately when calling new_mut(). + // Modifying the account will modify the output state that when converted to_account_info() + // is hashed with poseidon hashes, serialized with borsh + // and created with invoke_light_system_program by invoking the light-system-program. + // The hashing scheme is the account structure derived with LightHasher. + let mut counter = LightAccount::::new_mut( + &crate::ID, + &account_meta, + CounterAccount { + owner: ctx.accounts.signer.key(), + value: counter_value, + }, + )?; + + msg!("counter {}", counter.value); + + counter.value = counter.value.checked_add(1).ok_or(CustomError::Overflow)?; + + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(counter)? + .invoke(light_cpi_accounts)?; + Ok(()) + } + + pub fn decrement_counter<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + counter_value: u64, + account_meta: CompressedAccountMeta, + ) -> Result<()> { + let mut counter = LightAccount::::new_mut( + &crate::ID, + &account_meta, + CounterAccount { + owner: ctx.accounts.signer.key(), + value: counter_value, + }, + )?; + + counter.value = counter.value.checked_sub(1).ok_or(CustomError::Underflow)?; + + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(counter)? + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + pub fn reset_counter<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + counter_value: u64, + account_meta: CompressedAccountMeta, + ) -> Result<()> { + let mut counter = LightAccount::::new_mut( + &crate::ID, + &account_meta, + CounterAccount { + owner: ctx.accounts.signer.key(), + value: counter_value, + }, + )?; + + counter.value = 0; + + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(counter)? + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + pub fn close_counter<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + counter_value: u64, + account_meta: CompressedAccountMeta, + ) -> Result<()> { + // LightAccount::new_close() will create an account with only input state and no output state. + // By providing no output state the account is closed after the instruction. + // The address of a closed account cannot be reused. + let counter = LightAccount::::new_close( + &crate::ID, + &account_meta, + CounterAccount { + owner: ctx.accounts.signer.key(), + value: counter_value, + }, + )?; + + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(counter)? + .invoke(light_cpi_accounts)?; + Ok(()) + } +} + +#[error_code] +pub enum CustomError { + #[msg("No authority to perform this action")] + Unauthorized, + #[msg("Counter overflow")] + Overflow, + #[msg("Counter underflow")] + Underflow, +} + +#[derive(Accounts)] +pub struct GenericAnchorAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, +} + +// declared as event so that it is part of the idl. +#[event] +#[derive(Clone, Debug, Default, LightDiscriminator, LightHasher)] +pub struct CounterAccount { + #[hash] + pub owner: Pubkey, + pub value: u64, +} +``` diff --git a/.context/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs.mdx b/.context/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs.mdx new file mode 100644 index 00000000..42b0bdd5 --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs.mdx @@ -0,0 +1,422 @@ +--- +title: "counter/anchor/programs/counter/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/programs/counter/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use anchor_lang::{AnchorDeserialize, InstructionData, ToAccountMetas}; +use counter::CounterAccount; +use light_client::indexer::{CompressedAccount, TreeInfo}; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::{ + address::v1::derive_address, + instruction::{account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig}, +}; +use solana_sdk::{ + instruction::Instruction, + signature::{Keypair, Signature, Signer}, +}; + +#[tokio::test] +async fn test_counter() { + let config = ProgramTestConfig::new(true, Some(vec![("counter", counter::ID)])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + + let (address, _) = derive_address( + &[b"counter", payer.pubkey().as_ref()], + &address_tree_info.tree, + &counter::ID, + ); + + // Create the counter. + create_counter(&mut rpc, &payer, &address, address_tree_info) + .await + .unwrap(); + + // Check that it was created correctly. + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(compressed_account.leaf_index, 0); + let counter = &compressed_account.data.as_ref().unwrap().data; + let counter = CounterAccount::deserialize(&mut &counter[..]).unwrap(); + assert_eq!(counter.value, 0); + + // Increment the counter. + increment_counter(&mut rpc, &payer, &compressed_account) + .await + .unwrap(); + + // Check that it was incremented correctly. + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + assert_eq!(compressed_account.leaf_index, 1); + let counter = &compressed_account.data.as_ref().unwrap().data; + let counter = CounterAccount::deserialize(&mut &counter[..]).unwrap(); + assert_eq!(counter.value, 1); + + // Decrement the counter. + decrement_counter(&mut rpc, &payer, &compressed_account) + .await + .unwrap(); + + // Check that it was decremented correctly. + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + assert_eq!(compressed_account.leaf_index, 2); + + let counter = &compressed_account.data.as_ref().unwrap().data; + let counter = CounterAccount::deserialize(&mut &counter[..]).unwrap(); + assert_eq!(counter.value, 0); + + // Reset the counter. + reset_counter(&mut rpc, &payer, &compressed_account) + .await + .unwrap(); + + // Check that it was reset correctly. + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + let counter = &compressed_account.data.as_ref().unwrap().data; + let counter = CounterAccount::deserialize(&mut &counter[..]).unwrap(); + assert_eq!(counter.value, 0); + + // Close the counter. + close_counter(&mut rpc, &payer, &compressed_account) + .await + .unwrap(); + + // Check that it was closed correctly (account data should be default). + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(compressed_account.data, Some(Default::default())); +} + +async fn create_counter( + rpc: &mut R, + payer: &Keypair, + address: &[u8; 32], + address_tree_info: TreeInfo, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(counter::ID); + remaining_accounts.add_system_accounts(config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + tree: address_tree_info.tree, + address: *address, + }], + None, + ) + .await? + .value; + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + let packed_address_tree_info = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .address_trees[0]; + + let instruction_data = counter::instruction::CreateCounter { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_info, + output_state_tree_index, + }; + + let accounts = counter::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_accounts_metas, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: [ + accounts.to_account_metas(Some(true)), + remaining_accounts_metas, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +#[allow(clippy::too_many_arguments)] +async fn increment_counter( + rpc: &mut R, + payer: &Keypair, + compressed_account: &CompressedAccount, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(counter::ID); + remaining_accounts.add_system_accounts(config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let account_meta = CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }; + + let instruction_data = counter::instruction::IncrementCounter { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta, + }; + + let accounts = counter::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_accounts_metas, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: [ + accounts.to_account_metas(Some(true)), + remaining_accounts_metas, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +#[allow(clippy::too_many_arguments)] +async fn decrement_counter( + rpc: &mut R, + payer: &Keypair, + compressed_account: &CompressedAccount, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(counter::ID); + remaining_accounts.add_system_accounts(config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(Vec::from(&[hash]), vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let account_meta = CompressedAccountMeta { + tree_info: packed_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_accounts.output_tree_index, + }; + + let instruction_data = counter::instruction::DecrementCounter { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta, + }; + + let accounts = counter::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_accounts_metas, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: [ + accounts.to_account_metas(Some(true)), + remaining_accounts_metas, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn reset_counter( + rpc: &mut R, + payer: &Keypair, + compressed_account: &CompressedAccount, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(counter::ID); + remaining_accounts.add_system_accounts(config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(Vec::from(&[hash]), vec![], None) + .await? + .value; + + let packed_merkle_context = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let account_meta = CompressedAccountMeta { + tree_info: packed_merkle_context.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_merkle_context.output_tree_index, + }; + + let instruction_data = counter::instruction::ResetCounter { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta, + }; + + let accounts = counter::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_accounts_metas, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: [ + accounts.to_account_metas(Some(true)), + remaining_accounts_metas, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn close_counter( + rpc: &mut R, + payer: &Keypair, + compressed_account: &CompressedAccount, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(counter::ID); + remaining_accounts.add_system_accounts(config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(Vec::from(&[hash]), vec![], None) + .await + .unwrap() + .value; + + let packed_tree_infos = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let account_meta = CompressedAccountMeta { + tree_info: packed_tree_infos.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_tree_infos.output_tree_index, + }; + + let instruction_data = counter::instruction::CloseCounter { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta, + }; + + let accounts = counter::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_accounts_metas, _, _) = remaining_accounts.to_account_metas(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: [ + accounts.to_account_metas(Some(true)), + remaining_accounts_metas, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` diff --git a/.context/program-examples-mdx/counter/anchor/tests/test-ts.mdx b/.context/program-examples-mdx/counter/anchor/tests/test-ts.mdx new file mode 100644 index 00000000..88910c1e --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/tests/test-ts.mdx @@ -0,0 +1,318 @@ +--- +title: "counter/anchor/tests/test.ts" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/tests/test.ts" +--- + +```typescript +import * as anchor from "@coral-xyz/anchor"; +import { Program, web3 } from "@coral-xyz/anchor"; +import { Counter } from "../target/types/counter"; +import idl from "../target/idl/counter.json"; +import { + bn, + CompressedAccountWithMerkleContext, + confirmTx, + createRpc, + defaultTestStateTreeAccounts, + deriveAddress, + deriveAddressSeed, + LightSystemProgram, + PackedAccounts, + Rpc, + sleep, + SystemAccountMetaConfig, +} from "@lightprotocol/stateless.js"; +import { assert } from "chai"; + +const path = require("path"); +const os = require("os"); +require("dotenv").config(); + +const anchorWalletPath = path.join(os.homedir(), ".config/solana/id.json"); +process.env.ANCHOR_WALLET = anchorWalletPath; + +describe("test-anchor", () => { + const program = anchor.workspace.Counter as Program; + const coder = new anchor.BorshCoder(idl as anchor.Idl); + + it("", async () => { + let signer = new web3.Keypair(); + let rpc = createRpc( + "http://127.0.0.1:8899", + "http://127.0.0.1:8784", + "http://127.0.0.1:3001", + { + commitment: "confirmed", + } + ); + let lamports = web3.LAMPORTS_PER_SOL; + await rpc.requestAirdrop(signer.publicKey, lamports); + await sleep(2000); + + const outputMerkleTree = defaultTestStateTreeAccounts().merkleTree; + const addressTree = defaultTestStateTreeAccounts().addressTree; + const addressQueue = defaultTestStateTreeAccounts().addressQueue; + + const counterSeed = new TextEncoder().encode("counter"); + const seed = deriveAddressSeed( + [counterSeed, signer.publicKey.toBytes()], + new web3.PublicKey(program.idl.address) + ); + const address = deriveAddress(seed, addressTree); + // Create counter compressed account. + await CreateCounterCompressedAccount( + rpc, + addressTree, + addressQueue, + address, + program, + outputMerkleTree, + signer + ); + + let counterAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + + let counter = coder.types.decode( + "CounterAccount", + counterAccount.data.data + ); + console.log("counter account ", counterAccount); + console.log("des counter ", counter); + + await incrementCounterCompressedAccount( + rpc, + counter.value, + counterAccount, + program, + outputMerkleTree, + signer + ); + + counterAccount = await rpc.getCompressedAccount(bn(address.toBytes())); + counter = coder.types.decode("CounterAccount", counterAccount.data.data); + console.log("counter account ", counterAccount); + console.log("des counter ", counter); + + await deleteCounterCompressedAccount( + rpc, + counter.value, + counterAccount, + program, + outputMerkleTree, + signer + ); + + const deletedCounterAccount = await rpc.getCompressedAccount( + bn(address.toBytes()) + ); + console.log("deletedCounterAccount ", deletedCounterAccount); + + assert.isTrue(deletedCounterAccount.data.data.length === 0); + assert.equal( + deletedCounterAccount.data.discriminator.toString(), + Array(8).fill(0).toString() + ); + assert.equal( + deletedCounterAccount.data.dataHash.toString(), + Array(32).fill(0).toString() + ); + }); +}); + +async function CreateCounterCompressedAccount( + rpc: Rpc, + addressTree: anchor.web3.PublicKey, + addressQueue: anchor.web3.PublicKey, + address: anchor.web3.PublicKey, + program: anchor.Program, + outputMerkleTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair +) { + { + const proofRpcResult = await rpc.getValidityProofV0( + [], + [ + { + tree: addressTree, + queue: addressQueue, + address: bn(address.toBytes()), + }, + ] + ); + const systemAccountConfig = SystemAccountMetaConfig.new(program.programId); + let remainingAccounts = + PackedAccounts.newWithSystemAccounts(systemAccountConfig); + + const addressMerkleTreePubkeyIndex = + remainingAccounts.insertOrGet(addressTree); + const addressQueuePubkeyIndex = remainingAccounts.insertOrGet(addressQueue); + const packedAddreesMerkleContext = { + rootIndex: proofRpcResult.rootIndices[0], + addressMerkleTreePubkeyIndex, + addressQueuePubkeyIndex, + }; + const outputMerkleTreeIndex = + remainingAccounts.insertOrGet(outputMerkleTree); + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .createCounter(proof, packedAddreesMerkleContext, outputMerkleTreeIndex) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + + console.log("Created counter compressed account ", sig); + } +} + +async function incrementCounterCompressedAccount( + rpc: Rpc, + counterValue: anchor.BN, + counterAccount: CompressedAccountWithMerkleContext, + program: anchor.Program, + outputMerkleTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair +) { + { + const proofRpcResult = await rpc.getValidityProofV0( + [ + { + hash: counterAccount.hash, + tree: counterAccount.treeInfo.tree, + queue: counterAccount.treeInfo.queue, + }, + ], + [] + ); + const systemAccountConfig = SystemAccountMetaConfig.new(program.programId); + let remainingAccounts = + PackedAccounts.newWithSystemAccounts(systemAccountConfig); + + const merkleTreePubkeyIndex = remainingAccounts.insertOrGet( + counterAccount.treeInfo.tree + ); + const queuePubkeyIndex = remainingAccounts.insertOrGet( + counterAccount.treeInfo.queue + ); + const outputMerkleTreeIndex = + remainingAccounts.insertOrGet(outputMerkleTree); + const compressedAccountMeta = { + treeInfo: { + rootIndex: proofRpcResult.rootIndices[0], + proveByIndex: false, + merkleTreePubkeyIndex, + queuePubkeyIndex, + leafIndex: counterAccount.leafIndex, + }, + address: counterAccount.address, + outputStateTreeIndex: outputMerkleTreeIndex, + }; + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .incrementCounter(proof, counterValue, compressedAccountMeta) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + + console.log("Incremented counter compressed account ", sig); + } +} + +async function deleteCounterCompressedAccount( + rpc: Rpc, + counterValue: anchor.BN, + counterAccount: CompressedAccountWithMerkleContext, + program: anchor.Program, + outputMerkleTree: anchor.web3.PublicKey, + signer: anchor.web3.Keypair +) { + { + const proofRpcResult = await rpc.getValidityProofV0( + [ + { + hash: counterAccount.hash, + tree: counterAccount.treeInfo.tree, + queue: counterAccount.treeInfo.queue, + }, + ], + [] + ); + const systemAccountConfig = SystemAccountMetaConfig.new(program.programId); + let remainingAccounts = + PackedAccounts.newWithSystemAccounts(systemAccountConfig); + + const merkleTreePubkeyIndex = remainingAccounts.insertOrGet( + counterAccount.treeInfo.tree + ); + const queuePubkeyIndex = remainingAccounts.insertOrGet( + counterAccount.treeInfo.queue + ); + const outputMerkleTreeIndex = + remainingAccounts.insertOrGet(outputMerkleTree); + + const compressedAccountMeta = { + treeInfo: { + rootIndex: proofRpcResult.rootIndices[0], + proveByIndex: false, + merkleTreePubkeyIndex, + queuePubkeyIndex, + leafIndex: counterAccount.leafIndex, + }, + address: counterAccount.address, + outputStateTreeIndex: outputMerkleTreeIndex, + }; + + let proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + let tx = await program.methods + .closeCounter(proof, counterValue, compressedAccountMeta) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts.toAccountMetas().remainingAccounts) + .signers([signer]) + .transaction(); + tx.recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + tx.sign(signer); + + const sig = await rpc.sendTransaction(tx, [signer]); + await confirmTx(rpc, sig); + + console.log("Deleted counter compressed account ", sig); + } +} +``` diff --git a/.context/program-examples-mdx/counter/anchor/tsconfig-json.mdx b/.context/program-examples-mdx/counter/anchor/tsconfig-json.mdx new file mode 100644 index 00000000..19efa632 --- /dev/null +++ b/.context/program-examples-mdx/counter/anchor/tsconfig-json.mdx @@ -0,0 +1,18 @@ +--- +title: "counter/anchor/tsconfig.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/anchor/tsconfig.json" +--- + +```json +{ + "compilerOptions": { + "types": ["mocha", "chai"], + "typeRoots": ["./node_modules/@types"], + "lib": ["es2015"], + "module": "commonjs", + "target": "es6", + "esModuleInterop": true, + "resolveJsonModule": true + } +} +``` diff --git a/.context/program-examples-mdx/counter/native/Cargo-toml.mdx b/.context/program-examples-mdx/counter/native/Cargo-toml.mdx new file mode 100644 index 00000000..93587ad0 --- /dev/null +++ b/.context/program-examples-mdx/counter/native/Cargo-toml.mdx @@ -0,0 +1,48 @@ +--- +title: "counter/native/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/native/Cargo.toml" +--- + +```toml +[package] +name = "counter" +version = "1.0.0" +description = "Native counter program using light protocol" +repository = "https://github.com/Lightprotocol/program-examples" +license = "Apache-2.0" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "counter" + +[features] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +test-sbf = [] +default = [] + +[dependencies] +light-sdk = "0.16.0" +light-sdk-types = "0.16.0" +light-hasher = { version = "5.0.0", features = ["solana"] } +solana-program = "2.2" +light-macros = "2.1.0" +borsh = "0.10.4" +light-compressed-account = { version = "0.6.0", features = ["solana"] } + +[dev-dependencies] +light-program-test = "0.16.0" +light-client = "0.16.0" +tokio = "1.43.0" +solana-sdk = "2.2" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/counter/native/Xargo-toml.mdx b/.context/program-examples-mdx/counter/native/Xargo-toml.mdx new file mode 100644 index 00000000..3ff579d7 --- /dev/null +++ b/.context/program-examples-mdx/counter/native/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "counter/native/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/native/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/counter/native/src/lib-rs.mdx b/.context/program-examples-mdx/counter/native/src/lib-rs.mdx new file mode 100644 index 00000000..0f4920d9 --- /dev/null +++ b/.context/program-examples-mdx/counter/native/src/lib-rs.mdx @@ -0,0 +1,306 @@ +--- +title: "counter/native/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/native/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_macros::pubkey; +use light_sdk::{ + account::LightAccount, + address::v1::derive_address, + cpi::{ + v1::{CpiAccounts, LightSystemProgramCpi}, + CpiSigner, InvokeLightSystemProgram, LightCpiInstruction, + }, + derive_light_cpi_signer, + error::LightSdkError, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, LightHasher, +}; +use solana_program::{ + account_info::AccountInfo, entrypoint, program_error::ProgramError, pubkey::Pubkey, +}; +pub const ID: Pubkey = pubkey!("GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX"); +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX"); + +entrypoint!(process_instruction); + +#[repr(u8)] +pub enum InstructionType { + CreateCounter = 0, + IncrementCounter = 1, + DecrementCounter = 2, + ResetCounter = 3, + CloseCounter = 4, +} + +impl TryFrom for InstructionType { + type Error = LightSdkError; + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(InstructionType::CreateCounter), + 1 => Ok(InstructionType::IncrementCounter), + 2 => Ok(InstructionType::DecrementCounter), + 3 => Ok(InstructionType::ResetCounter), + 4 => Ok(InstructionType::CloseCounter), + _ => panic!("Invalid instruction discriminator."), + } + } +} + +#[derive( + Debug, Default, Clone, BorshSerialize, BorshDeserialize, LightDiscriminator, LightHasher, +)] +pub struct CounterAccount { + #[hash] + pub owner: Pubkey, + pub value: u64, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct CreateCounterInstructionData { + pub proof: ValidityProof, + pub address_tree_info: PackedAddressTreeInfo, + pub output_state_tree_index: u8, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct IncrementCounterInstructionData { + pub proof: ValidityProof, + pub counter_value: u64, + pub account_meta: CompressedAccountMeta, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct DecrementCounterInstructionData { + pub proof: ValidityProof, + pub counter_value: u64, + pub account_meta: CompressedAccountMeta, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct ResetCounterInstructionData { + pub proof: ValidityProof, + pub counter_value: u64, + pub account_meta: CompressedAccountMeta, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct CloseCounterInstructionData { + pub proof: ValidityProof, + pub counter_value: u64, + pub account_meta: CompressedAccountMeta, +} + +#[derive(Debug, Clone)] +pub enum CounterError { + Unauthorized, + Overflow, + Underflow, +} + +impl From for ProgramError { + fn from(e: CounterError) -> Self { + match e { + CounterError::Unauthorized => ProgramError::Custom(1), + CounterError::Overflow => ProgramError::Custom(2), + CounterError::Underflow => ProgramError::Custom(3), + } + } +} + +pub fn process_instruction( + program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), ProgramError> { + if program_id != &crate::ID { + return Err(ProgramError::IncorrectProgramId); + } + if instruction_data.is_empty() { + return Err(ProgramError::InvalidInstructionData); + } + + let discriminator = InstructionType::try_from(instruction_data[0]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + + match discriminator { + InstructionType::CreateCounter => { + let instuction_data = + CreateCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + create_counter(accounts, instuction_data) + } + InstructionType::IncrementCounter => { + let instuction_data = + IncrementCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + increment_counter(accounts, instuction_data) + } + InstructionType::DecrementCounter => { + let instuction_data = + DecrementCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + decrement_counter(accounts, instuction_data) + } + InstructionType::ResetCounter => { + let instuction_data = + ResetCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + reset_counter(accounts, instuction_data) + } + InstructionType::CloseCounter => { + let instuction_data = + CloseCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + close_counter(accounts, instuction_data) + } + } +} + +pub fn create_counter( + accounts: &[AccountInfo], + instuction_data: CreateCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + let (address, address_seed) = derive_address( + &[b"counter", signer.key.as_ref()], + &instuction_data + .address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ProgramError::NotEnoughAccountKeys)?, + &ID, + ); + + let new_address_params = instuction_data + .address_tree_info + .into_new_address_params_packed(address_seed); + + let mut counter = LightAccount::::new_init( + &ID, + Some(address), + instuction_data.output_state_tree_index, + ); + counter.owner = *signer.key; + counter.value = 0; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instuction_data.proof) + .with_light_account(counter)? + .with_new_addresses(&[new_address_params]) + .invoke(light_cpi_accounts)?; + + Ok(()) +} + +pub fn increment_counter( + accounts: &[AccountInfo], + instuction_data: IncrementCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let mut counter = LightAccount::::new_mut( + &ID, + &instuction_data.account_meta, + CounterAccount { + owner: *signer.key, + value: instuction_data.counter_value, + }, + )?; + + counter.value = counter.value.checked_add(1).ok_or(CounterError::Overflow)?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instuction_data.proof) + .with_light_account(counter)? + .invoke(light_cpi_accounts)?; + + Ok(()) +} + +pub fn decrement_counter( + accounts: &[AccountInfo], + instuction_data: DecrementCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let mut counter = LightAccount::::new_mut( + &ID, + &instuction_data.account_meta, + CounterAccount { + owner: *signer.key, + value: instuction_data.counter_value, + }, + )?; + + counter.value = counter + .value + .checked_sub(1) + .ok_or(CounterError::Underflow)?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instuction_data.proof) + .with_light_account(counter)? + .invoke(light_cpi_accounts)?; + + Ok(()) +} + +pub fn reset_counter( + accounts: &[AccountInfo], + instuction_data: ResetCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let mut counter = LightAccount::::new_mut( + &ID, + &instuction_data.account_meta, + CounterAccount { + owner: *signer.key, + value: instuction_data.counter_value, + }, + )?; + + counter.value = 0; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instuction_data.proof) + .with_light_account(counter)? + .invoke(light_cpi_accounts)?; + + Ok(()) +} + +pub fn close_counter( + accounts: &[AccountInfo], + instuction_data: CloseCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let counter = LightAccount::::new_close( + &ID, + &instuction_data.account_meta, + CounterAccount { + owner: *signer.key, + value: instuction_data.counter_value, + }, + )?; + + let light_cpi_accounts = CpiAccounts::new(signer, &accounts[1..], LIGHT_CPI_SIGNER); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instuction_data.proof) + .with_light_account(counter)? + .invoke(light_cpi_accounts)?; + + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/counter/native/tests/test-rs.mdx b/.context/program-examples-mdx/counter/native/tests/test-rs.mdx new file mode 100644 index 00000000..b8c32b55 --- /dev/null +++ b/.context/program-examples-mdx/counter/native/tests/test-rs.mdx @@ -0,0 +1,384 @@ +--- +title: "counter/native/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/native/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use borsh::{BorshDeserialize, BorshSerialize}; +use counter::{ + CloseCounterInstructionData, CounterAccount, CreateCounterInstructionData, + DecrementCounterInstructionData, IncrementCounterInstructionData, ResetCounterInstructionData, +}; +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::address::v1::derive_address; +use light_sdk::instruction::{ + account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig, +}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +#[tokio::test] +async fn test_counter() { + let config = ProgramTestConfig::new(true, Some(vec![("counter", counter::ID)])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + let address_tree_pubkey = address_tree_info.tree; + + // Create counter + let (address, _) = derive_address( + &[b"counter", payer.pubkey().as_ref()], + &address_tree_pubkey, + &counter::ID, + ); + let merkle_tree_pubkey = rpc.get_random_state_tree_info().unwrap().tree; + + create_counter( + &payer, + &mut rpc, + &merkle_tree_pubkey, + address_tree_pubkey, + address, + ) + .await + .unwrap(); + + // Get the created counter + let compressed_counter = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(compressed_counter.address.unwrap(), address); + + // Test increment + increment_counter(&payer, &mut rpc, &compressed_counter) + .await + .unwrap(); + + let compressed_counter = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + // Test decrement + decrement_counter(&payer, &mut rpc, &compressed_counter) + .await + .unwrap(); + + let compressed_counter = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + // Test reset + reset_counter(&payer, &mut rpc, &compressed_counter) + .await + .unwrap(); + + let compressed_counter = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + // Test close + close_counter(&payer, &mut rpc, &compressed_counter) + .await + .unwrap(); + + // Check that it was closed correctly (account data should be default). + let closed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(closed_account.data, Some(Default::default())); +} + +pub async fn create_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + merkle_tree_pubkey: &Pubkey, + address_tree_pubkey: Pubkey, + address: [u8; 32], +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address, + tree: address_tree_pubkey, + }], + None, + ) + .await? + .value; + + let output_merkle_tree_index = accounts.insert_or_get(*merkle_tree_pubkey); + let packed_address_tree_info = rpc_result.pack_tree_infos(&mut accounts).address_trees[0]; + let (account_metas, _, _) = accounts.to_account_metas(); + + let instruction_data = CreateCounterInstructionData { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_info, + output_state_tree_index: output_merkle_tree_index, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: account_metas, + data: [ + &[counter::InstructionType::CreateCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn increment_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (account_metas, _, _) = accounts.to_account_metas(); + let instruction_data = IncrementCounterInstructionData { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta: meta, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: account_metas, + data: [ + &[counter::InstructionType::IncrementCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn decrement_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (account_metas, _, _) = accounts.to_account_metas(); + let instruction_data = DecrementCounterInstructionData { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta: meta, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: account_metas, + data: [ + &[counter::InstructionType::DecrementCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn reset_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (account_metas, _, _) = accounts.to_account_metas(); + let instruction_data = ResetCounterInstructionData { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta: meta, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: account_metas, + data: [ + &[counter::InstructionType::ResetCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn close_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta_close = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (account_metas, _, _) = accounts.to_account_metas(); + let instruction_data = CloseCounterInstructionData { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta: meta_close, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID, + accounts: account_metas, + data: [ + &[counter::InstructionType::CloseCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/counter/pinocchio/Cargo-toml.mdx b/.context/program-examples-mdx/counter/pinocchio/Cargo-toml.mdx new file mode 100644 index 00000000..acb09301 --- /dev/null +++ b/.context/program-examples-mdx/counter/pinocchio/Cargo-toml.mdx @@ -0,0 +1,51 @@ +--- +title: "counter/pinocchio/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/pinocchio/Cargo.toml" +--- + +```toml +[package] +name = "counter" +version = "1.0.0" +description = "Pinocchio counter program using light protocol" +repository = "https://github.com/Lightprotocol/program-examples" +license = "Apache-2.0" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "counter" + +[features] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +test-sbf = [] +default = [] + +[dependencies] +light-sdk-pinocchio = { version = "0.16.0" , features = ["light-account"] } +light-sdk-types = "0.16.0" +light-hasher = "5.0.0" +pinocchio = "0.9.2" +light-macros = "2.1.0" +borsh = "0.10.4" +solana-pubkey = "2.3" + +[dev-dependencies] +light-program-test = "0.16.0" +light-client = "0.16.0" +tokio = "1.43.0" +solana-sdk = "2.3" +light-hasher = { version = "5.0.0", features = ["solana"] } +light-compressed-account = { version = "0.6.0", features = ["solana"] } +light-sdk = "0.16.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/counter/pinocchio/Xargo-toml.mdx b/.context/program-examples-mdx/counter/pinocchio/Xargo-toml.mdx new file mode 100644 index 00000000..e5d5d13b --- /dev/null +++ b/.context/program-examples-mdx/counter/pinocchio/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "counter/pinocchio/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/pinocchio/Xargo.toml" +--- + +```toml +[target.bpfel-unknown-unknown.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/counter/pinocchio/src/lib-rs.mdx b/.context/program-examples-mdx/counter/pinocchio/src/lib-rs.mdx new file mode 100644 index 00000000..d6aa16c1 --- /dev/null +++ b/.context/program-examples-mdx/counter/pinocchio/src/lib-rs.mdx @@ -0,0 +1,340 @@ +--- +title: "counter/pinocchio/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/pinocchio/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] + +use borsh::{BorshDeserialize, BorshSerialize}; +use light_macros::pubkey_array; +use light_sdk_pinocchio::{ + address::v1::derive_address, + cpi::{ + v1::{CpiAccounts, LightSystemProgramCpi}, + CpiAccountsConfig, CpiSigner, InvokeLightSystemProgram, LightCpiInstruction, + }, + derive_light_cpi_signer, + error::LightSdkError, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightAccount, LightDiscriminator, LightHasher, +}; +use pinocchio::{ + account_info::AccountInfo, entrypoint, program_error::ProgramError, pubkey::Pubkey, +}; + +pub const ID: Pubkey = pubkey_array!("GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX"); +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("GRLu2hKaAiMbxpkAM1HeXzks9YeGuz18SEgXEizVvPqX"); + +entrypoint!(process_instruction); + +fn to_custom_error>(e: E) -> ProgramError { + ProgramError::Custom(u64::from(e.into()) as u32) +} + +fn to_custom_error_u32>(e: E) -> ProgramError { + ProgramError::Custom(u32::from(e.into())) +} + +#[repr(u8)] +pub enum InstructionType { + CreateCounter = 0, + IncrementCounter = 1, + DecrementCounter = 2, + ResetCounter = 3, + CloseCounter = 4, +} + +impl TryFrom for InstructionType { + type Error = LightSdkError; + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(InstructionType::CreateCounter), + 1 => Ok(InstructionType::IncrementCounter), + 2 => Ok(InstructionType::DecrementCounter), + 3 => Ok(InstructionType::ResetCounter), + 4 => Ok(InstructionType::CloseCounter), + _ => panic!("Invalid instruction discriminator."), + } + } +} + +#[derive( + Debug, Default, Clone, BorshSerialize, BorshDeserialize, LightDiscriminator, LightHasher, +)] +pub struct CounterAccount { + #[hash] + pub owner: Pubkey, + pub value: u64, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct CreateCounterInstructionData { + pub proof: ValidityProof, + pub address_tree_info: PackedAddressTreeInfo, + pub output_state_tree_index: u8, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct IncrementCounterInstructionData { + pub proof: ValidityProof, + pub counter_value: u64, + pub account_meta: CompressedAccountMeta, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct DecrementCounterInstructionData { + pub proof: ValidityProof, + pub counter_value: u64, + pub account_meta: CompressedAccountMeta, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct ResetCounterInstructionData { + pub proof: ValidityProof, + pub counter_value: u64, + pub account_meta: CompressedAccountMeta, +} + +#[derive(BorshSerialize, BorshDeserialize)] +pub struct CloseCounterInstructionData { + pub proof: ValidityProof, + pub counter_value: u64, + pub account_meta: CompressedAccountMeta, +} + +#[derive(Debug, Clone)] +pub enum CounterError { + Unauthorized, + Overflow, + Underflow, +} + +impl From for ProgramError { + fn from(e: CounterError) -> Self { + match e { + CounterError::Unauthorized => ProgramError::Custom(1), + CounterError::Overflow => ProgramError::Custom(2), + CounterError::Underflow => ProgramError::Custom(3), + } + } +} + +pub fn process_instruction( + program_id: &Pubkey, + accounts: &[AccountInfo], + instruction_data: &[u8], +) -> Result<(), ProgramError> { + if program_id != &Pubkey::from(crate::ID) { + return Err(ProgramError::IncorrectProgramId); + } + if instruction_data.is_empty() { + return Err(ProgramError::InvalidInstructionData); + } + + let discriminator = InstructionType::try_from(instruction_data[0]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + + let result = match discriminator { + InstructionType::CreateCounter => { + let instruction_data = + CreateCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + create_counter(accounts, instruction_data) + } + InstructionType::IncrementCounter => { + let instruction_data = + IncrementCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + increment_counter(accounts, instruction_data) + } + InstructionType::DecrementCounter => { + let instruction_data = + DecrementCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + decrement_counter(accounts, instruction_data) + } + InstructionType::ResetCounter => { + let instruction_data = + ResetCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + reset_counter(accounts, instruction_data) + } + InstructionType::CloseCounter => { + let instruction_data = + CloseCounterInstructionData::try_from_slice(&instruction_data[1..]) + .map_err(|_| ProgramError::InvalidInstructionData)?; + close_counter(accounts, instruction_data) + } + }; + + result.map_err(|e| ProgramError::Custom(u64::from(e) as u32)) +} + +pub fn create_counter( + accounts: &[AccountInfo], + instruction_data: CreateCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let config = CpiAccountsConfig::new(LIGHT_CPI_SIGNER); + let cpi_accounts = CpiAccounts::try_new_with_config(signer, &accounts[1..], config) + .map_err(to_custom_error_u32)?; + + let tree_pubkey = cpi_accounts + .get_tree_account_info( + instruction_data + .address_tree_info + .address_merkle_tree_pubkey_index as usize, + ) + .map_err(to_custom_error_u32)? + .key(); + + let program_id = Pubkey::from(ID); + let (address, address_seed) = derive_address( + &[b"counter", signer.key().as_ref()], + &tree_pubkey, + &program_id, + ); + + let new_address_params = instruction_data + .address_tree_info + .into_new_address_params_packed(address_seed); + + let mut counter = LightAccount::::new_init( + &program_id, + Some(address), + instruction_data.output_state_tree_index, + ); + + counter.owner = *signer.key(); + counter.value = 0; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(counter) + .map_err(to_custom_error)? + .with_new_addresses(&[new_address_params]) + .invoke(cpi_accounts) +} + +pub fn increment_counter( + accounts: &[AccountInfo], + instruction_data: IncrementCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let program_id = Pubkey::from(ID); + let mut counter = LightAccount::::new_mut( + &program_id, + &instruction_data.account_meta, + CounterAccount { + owner: *signer.key(), + value: instruction_data.counter_value, + }, + ) + .map_err(|e| ProgramError::Custom(u64::from(e) as u32))?; + + counter.value = counter.value.checked_add(1).ok_or(CounterError::Overflow)?; + + let config = CpiAccountsConfig::new(LIGHT_CPI_SIGNER); + let cpi_accounts = CpiAccounts::try_new_with_config(signer, &accounts[1..], config) + .map_err(to_custom_error_u32)?; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(counter) + .map_err(to_custom_error)? + .invoke(cpi_accounts) +} + +pub fn decrement_counter( + accounts: &[AccountInfo], + instruction_data: DecrementCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let program_id = Pubkey::from(ID); + let mut counter = LightAccount::::new_mut( + &program_id, + &instruction_data.account_meta, + CounterAccount { + owner: *signer.key(), + value: instruction_data.counter_value, + }, + ) + .map_err(|e| ProgramError::Custom(u64::from(e) as u32))?; + + counter.value = counter + .value + .checked_sub(1) + .ok_or(CounterError::Underflow)?; + + let config = CpiAccountsConfig::new(LIGHT_CPI_SIGNER); + let cpi_accounts = CpiAccounts::try_new_with_config(signer, &accounts[1..], config) + .map_err(to_custom_error_u32)?; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(counter) + .map_err(to_custom_error)? + .invoke(cpi_accounts) +} + +pub fn reset_counter( + accounts: &[AccountInfo], + instruction_data: ResetCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(LightSdkError::ProgramError( + ProgramError::NotEnoughAccountKeys, + ))?; + + let program_id = Pubkey::from(ID); + let mut counter = LightAccount::::new_mut( + &program_id, + &instruction_data.account_meta, + CounterAccount { + owner: *signer.key(), + value: instruction_data.counter_value, + }, + ) + .map_err(|e| ProgramError::Custom(u64::from(e) as u32))?; + + counter.value = 0; + + let config = CpiAccountsConfig::new(LIGHT_CPI_SIGNER); + let cpi_accounts = CpiAccounts::try_new_with_config(signer, &accounts[1..], config) + .map_err(to_custom_error_u32)?; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(counter) + .map_err(to_custom_error)? + .invoke(cpi_accounts) +} + +pub fn close_counter( + accounts: &[AccountInfo], + instruction_data: CloseCounterInstructionData, +) -> Result<(), ProgramError> { + let signer = accounts.first().ok_or(ProgramError::NotEnoughAccountKeys)?; + + let program_id = Pubkey::from(ID); + let counter = LightAccount::::new_close( + &program_id, + &instruction_data.account_meta, + CounterAccount { + owner: *signer.key(), + value: instruction_data.counter_value, + }, + ) + .map_err(|e| ProgramError::Custom(u64::from(e) as u32))?; + + let config = CpiAccountsConfig::new(LIGHT_CPI_SIGNER); + let cpi_accounts = CpiAccounts::try_new_with_config(signer, &accounts[1..], config) + .map_err(to_custom_error_u32)?; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, instruction_data.proof) + .with_light_account(counter) + .map_err(to_custom_error)? + .invoke(cpi_accounts) +} +``` diff --git a/.context/program-examples-mdx/counter/pinocchio/tests/test-rs.mdx b/.context/program-examples-mdx/counter/pinocchio/tests/test-rs.mdx new file mode 100644 index 00000000..1e246941 --- /dev/null +++ b/.context/program-examples-mdx/counter/pinocchio/tests/test-rs.mdx @@ -0,0 +1,375 @@ +--- +title: "counter/pinocchio/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/counter/pinocchio/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use borsh::{BorshDeserialize, BorshSerialize}; +use counter::{ + CloseCounterInstructionData, CounterAccount, CreateCounterInstructionData, + DecrementCounterInstructionData, IncrementCounterInstructionData, ResetCounterInstructionData, +}; +use light_client::indexer::CompressedAccount; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::address::v1::derive_address; +use light_sdk::instruction::{ + account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig, +}; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signer}, +}; + +#[tokio::test] +async fn test_counter() { + let config = ProgramTestConfig::new(true, Some(vec![("counter", counter::ID.into())])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v1(); + let address_tree_pubkey = address_tree_info.tree; + + // Create counter + let (address, _) = derive_address( + &[b"counter", payer.pubkey().as_ref()], + &address_tree_pubkey, + &counter::ID.into(), + ); + let merkle_tree_pubkey = rpc.get_random_state_tree_info_v1().unwrap().tree; + + create_counter( + &payer, + &mut rpc, + &merkle_tree_pubkey, + address_tree_pubkey, + address, + ) + .await + .unwrap(); + + // Get the created counter + let compressed_counter = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + assert_eq!(compressed_counter.address.unwrap(), address); + + // Test increment + increment_counter(&payer, &mut rpc, &compressed_counter) + .await + .unwrap(); + + let compressed_counter = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + // Test decrement + decrement_counter(&payer, &mut rpc, &compressed_counter) + .await + .unwrap(); + + let compressed_counter = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + // Test reset + reset_counter(&payer, &mut rpc, &compressed_counter) + .await + .unwrap(); + + let compressed_counter = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + // Test close + close_counter(&payer, &mut rpc, &compressed_counter) + .await + .unwrap(); +} + +pub async fn create_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + merkle_tree_pubkey: &Pubkey, + address_tree_pubkey: Pubkey, + address: [u8; 32], +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID.into()); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address, + tree: address_tree_pubkey, + }], + None, + ) + .await? + .value; + + let output_merkle_tree_index = accounts.insert_or_get(*merkle_tree_pubkey); + let packed_address_tree_info = rpc_result.pack_tree_infos(&mut accounts).address_trees[0]; + let (accounts, _, _) = accounts.to_account_metas(); + + let instruction_data = CreateCounterInstructionData { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_info, + output_state_tree_index: output_merkle_tree_index, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID.into(), + accounts, + data: [ + &[counter::InstructionType::CreateCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn increment_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID.into()); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (accounts, _, _) = accounts.to_account_metas(); + let instruction_data = IncrementCounterInstructionData { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta: meta, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID.into(), + accounts, + data: [ + &[counter::InstructionType::IncrementCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn decrement_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID.into()); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (accounts, _, _) = accounts.to_account_metas(); + let instruction_data = DecrementCounterInstructionData { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta: meta, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID.into(), + accounts, + data: [ + &[counter::InstructionType::DecrementCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn reset_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID.into()); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (accounts, _, _) = accounts.to_account_metas(); + let instruction_data = ResetCounterInstructionData { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta: meta, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID.into(), + accounts, + data: [ + &[counter::InstructionType::ResetCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} + +pub async fn close_counter( + payer: &Keypair, + rpc: &mut LightProgramTest, + compressed_account: &CompressedAccount, +) -> Result<(), RpcError> { + let system_account_meta_config = SystemAccountMetaConfig::new(counter::ID.into()); + let mut accounts = PackedAccounts::default(); + accounts.add_pre_accounts_signer(payer.pubkey()); + accounts.add_system_accounts(system_account_meta_config)?; + + let hash = compressed_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_accounts = rpc_result + .pack_tree_infos(&mut accounts) + .state_trees + .unwrap(); + + let counter_account = + CounterAccount::deserialize(&mut compressed_account.data.as_ref().unwrap().data.as_slice()) + .unwrap(); + + let meta_close = CompressedAccountMeta { + tree_info: packed_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + output_state_tree_index: packed_accounts.output_tree_index, + }; + + let (accounts, _, _) = accounts.to_account_metas(); + let instruction_data = CloseCounterInstructionData { + proof: rpc_result.proof, + counter_value: counter_account.value, + account_meta: meta_close, + }; + let inputs = instruction_data.try_to_vec().unwrap(); + + let instruction = Instruction { + program_id: counter::ID.into(), + accounts, + data: [ + &[counter::InstructionType::CloseCounter as u8][..], + &inputs[..], + ] + .concat(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await?; + Ok(()) +} +``` diff --git a/.context/program-examples-mdx/create-and-update/Anchor-toml.mdx b/.context/program-examples-mdx/create-and-update/Anchor-toml.mdx new file mode 100644 index 00000000..d31ec534 --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/Anchor-toml.mdx @@ -0,0 +1,26 @@ +--- +title: "create-and-update/Anchor.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/Anchor.toml" +--- + +```toml +[toolchain] +anchor_version = "0.31.1" + +[features] +resolution = true +skip-lint = false + +[programs.localnet] +create_and_update = "J6K7nvoVpJHfH13zn47vptnZo1JdUGCGSiVmtfkzz9NA" + +[registry] +url = "https://api.apr.dev" + +[provider] +cluster = "localnet" +wallet = "~/.config/solana/id.json" + +[scripts] +test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" +``` diff --git a/.context/program-examples-mdx/create-and-update/Cargo-toml.mdx b/.context/program-examples-mdx/create-and-update/Cargo-toml.mdx new file mode 100644 index 00000000..1f95b339 --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/Cargo-toml.mdx @@ -0,0 +1,22 @@ +--- +title: "create-and-update/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/Cargo.toml" +--- + +```toml +[workspace] +members = [ + "programs/create-and-update", +] +resolver = "2" + +[profile.release] +overflow-checks = true +lto = "fat" +codegen-units = 1 + +[profile.release.build-override] +opt-level = 3 +incremental = false +codegen-units = 1 +``` diff --git a/.context/program-examples-mdx/create-and-update/README.mdx b/.context/program-examples-mdx/create-and-update/README.mdx new file mode 100644 index 00000000..81a92c2d --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/README.mdx @@ -0,0 +1,51 @@ +--- +title: "create-and-update/README.md" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/README.md" +--- + +```markdown +# Create and Update Example + +This example demonstrates the basic operations of compressed accounts using Light Protocol. It shows how to create compressed accounts and how to atomically create and update compressed accounts in a single instruction. + +## Instructions + +### 1. `create_compressed_account` +Creates a new compressed account with initial data (owner and message). + +### 2. `create_and_update` +Demonstrates atomic operations in a single instruction: +- Creates a new compressed account with a "second" seed +- Updates an existing compressed account (created with "first" seed) +- Uses a single validity proof to prove inclusion of the existing account and create the new address + +## Data Structure + +```rust +pub struct DataAccount { + #[hash] + pub owner: Pubkey, + #[hash] + pub message: String, +} +``` + +## Build and Test + +```bash +# Build the program +cargo build-sbf + +# Run tests +cargo test-sbf +``` + +## Key Concepts Demonstrated + +- **Compressed Account Creation**: Using `LightAccount::new_init()` to create new compressed accounts +- **Compressed Account Updates**: Using `LightAccount::new_mut()` to update existing compressed accounts +- **Address Derivation**: Using deterministic seeds (`FIRST_SEED`, `SECOND_SEED`) for address generation +- **Atomic Operations**: Performing multiple compressed account operations in a single instruction +- **Authorization**: Verifying ownership before allowing updates +- **Single Validity Proof**: Using one proof to handle both input (existing account) and output (new account) operations +``` diff --git a/.context/program-examples-mdx/create-and-update/package-json.mdx b/.context/program-examples-mdx/create-and-update/package-json.mdx new file mode 100644 index 00000000..fa37ae02 --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/package-json.mdx @@ -0,0 +1,30 @@ +--- +title: "create-and-update/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/package.json" +--- + +```json +{ + "license": "ISC", + "scripts": { + "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w", + "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check", + "test": "ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts" + }, + "dependencies": { + "@coral-xyz/anchor": "0.31.1", + "@lightprotocol/stateless.js": "0.22.1-alpha.1", + "dotenv": "^16.5.0" + }, + "devDependencies": { + "@types/bn.js": "^5.1.0", + "@types/chai": "^4.3.0", + "@types/mocha": "^9.0.0", + "chai": "^4.3.4", + "mocha": "^9.0.3", + "prettier": "^2.6.2", + "ts-mocha": "^10.1.0", + "typescript": "^5.0.0" + } +} +``` diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml.mdx b/.context/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml.mdx new file mode 100644 index 00000000..902ff2a9 --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml.mdx @@ -0,0 +1,48 @@ +--- +title: "create-and-update/programs/create-and-update/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/programs/create-and-update/Cargo.toml" +--- + +```toml +[package] +name = "create-and-update" +version = "0.1.0" +description = "Created with Anchor" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "create_and_update" + +[features] +no-entrypoint = [] +no-idl = [] +no-log-ix-name = [] +cpi = ["no-entrypoint"] +default = ["idl-build"] +test-sbf = [] +idl-build = ["anchor-lang/idl-build", "light-sdk/idl-build"] + +[dependencies] +anchor-lang = "0.31.1" +borsh = "0.10.4" +light-sdk = { version = "0.16.0", features = ["anchor", "v2"] } +light-hasher = "5.0.0" +light-sdk-types = { version = "0.16.0", features = ["v2"] } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +solana-sdk = "2.2" + +[dev-dependencies] +light-program-test = { version = "0.16.0", features = ["v2"] } +light-client = { version = "0.16.0", features = ["v2"] } +tokio = "1.43.0" +serial_test = "3.2.0" + +[lints.rust.unexpected_cfgs] +level = "allow" +check-cfg = [ + 'cfg(target_os, values("solana"))', + 'cfg(feature, values("frozen-abi", "no-entrypoint"))', +] +``` diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml.mdx b/.context/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml.mdx new file mode 100644 index 00000000..974bf26a --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "create-and-update/programs/create-and-update/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/programs/create-and-update/Xargo.toml" +--- + +```toml +[target.sbf-solana-solana.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs.mdx b/.context/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs.mdx new file mode 100644 index 00000000..22b4b2dc --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs.mdx @@ -0,0 +1,300 @@ +--- +title: "create-and-update/programs/create-and-update/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/programs/create-and-update/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] +#![allow(deprecated)] + +use anchor_lang::{prelude::*, AnchorDeserialize, AnchorSerialize}; +use light_sdk::{ + account::LightAccount, + address::v2::derive_address, + cpi::{v2::CpiAccounts, CpiSigner}, + derive_light_cpi_signer, + instruction::{account_meta::CompressedAccountMeta, PackedAddressTreeInfo, ValidityProof}, + LightDiscriminator, +}; + +declare_id!("J6K7nvoVpJHfH13zn47vptnZo1JdUGCGSiVmtfkzz9NA"); + +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("J6K7nvoVpJHfH13zn47vptnZo1JdUGCGSiVmtfkzz9NA"); + +pub const FIRST_SEED: &[u8] = b"first"; +pub const SECOND_SEED: &[u8] = b"second"; + +#[program] +pub mod create_and_update { + + use super::*; + use light_sdk::cpi::{ + v2::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction, + }; + + /// Creates a new compressed account with initial data + pub fn create_compressed_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let address_tree_pubkey = address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?; + let (address, address_seed) = derive_address( + &[FIRST_SEED, ctx.accounts.signer.key().as_ref()], + &address_tree_pubkey, + &crate::ID, + ); + + let mut data_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + data_account.owner = ctx.accounts.signer.key(); + data_account.message = message; + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(data_account)? + .with_new_addresses(&[ + address_tree_info.into_new_address_params_assigned_packed(address_seed, Some(0)) + ]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + /// Creates a new compressed account and updates an existing one in a single instruction + pub fn create_and_update<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + existing_account: ExistingCompressedAccountIxData, + new_account: NewCompressedAccountIxData, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let new_account_address_tree_pubkey = &new_account + .address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?; + + // Create new compressed account + let (new_address, new_address_seed) = derive_address( + &[SECOND_SEED, ctx.accounts.signer.key().as_ref()], + new_account_address_tree_pubkey, + &crate::ID, + ); + + let mut new_data_account = LightAccount::::new_init( + &crate::ID, + Some(new_address), + existing_account.account_meta.output_state_tree_index, + ); + new_data_account.owner = ctx.accounts.signer.key(); + new_data_account.message = new_account.message.clone(); + + let mut updated_data_account = LightAccount::::new_mut( + &crate::ID, + &existing_account.account_meta, + DataAccount { + owner: ctx.accounts.signer.key(), + message: existing_account.message.clone(), + }, + )?; + + // Update the message + updated_data_account.message = existing_account.update_message.clone(); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(new_data_account)? + .with_light_account(updated_data_account)? + .with_new_addresses(&[new_account + .address_tree_info + .into_new_address_params_assigned_packed(new_address_seed, Some(0))]) + .invoke(light_cpi_accounts)?; + + msg!( + "Created new account with message: '{}' and updated existing account to: '{}'", + new_account.message, + existing_account.update_message + ); + + Ok(()) + } + + /// Updates two existing compressed accounts in a single instruction + pub fn update_two_accounts<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + first_account: ExistingCompressedAccountIxData, + second_account: ExistingCompressedAccountIxData, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + // Update first compressed account + let mut updated_first_account = LightAccount::::new_mut( + &crate::ID, + &first_account.account_meta, + DataAccount { + owner: ctx.accounts.signer.key(), + message: first_account.message.clone(), + }, + )?; + + // Update the message of the first account + updated_first_account.message = first_account.update_message.clone(); + + // Update second compressed account + let mut updated_second_account = LightAccount::::new_mut( + &crate::ID, + &second_account.account_meta, + DataAccount { + owner: ctx.accounts.signer.key(), + message: second_account.message.clone(), + }, + )?; + + // Update the message of the second account + updated_second_account.message = second_account.update_message.clone(); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(updated_first_account)? + .with_light_account(updated_second_account)? + .invoke(light_cpi_accounts)?; + + msg!( + "Updated first account to: '{}' and second account to: '{}'", + first_account.update_message, + second_account.update_message + ); + + Ok(()) + } + + /// Creates two new compressed accounts with different addresses in a single instruction + pub fn create_two_accounts<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + byte_data: [u8; 31], + message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + // Create first compressed account + let (first_address, first_address_seed) = derive_address( + &[FIRST_SEED, ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let mut first_data_account = LightAccount::::new_init( + &crate::ID, + Some(first_address), + output_state_tree_index, + ); + first_data_account.owner = ctx.accounts.signer.key(); + first_data_account.data = byte_data; + + // Create second compressed account + let (second_address, second_address_seed) = derive_address( + &[SECOND_SEED, ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let mut second_data_account = LightAccount::::new_init( + &crate::ID, + Some(second_address), + output_state_tree_index, + ); + second_data_account.owner = ctx.accounts.signer.key(); + second_data_account.message = message.clone(); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(first_data_account)? + .with_light_account(second_data_account)? + .with_new_addresses(&[ + address_tree_info + .into_new_address_params_assigned_packed(first_address_seed, Some(0)), + address_tree_info + .into_new_address_params_assigned_packed(second_address_seed, Some(1)), + ]) + .invoke(light_cpi_accounts)?; + + msg!( + "Created byte account with data: {:?} and string account with message: '{}'", + byte_data, + message + ); + + Ok(()) + } +} + +#[derive(Accounts)] +pub struct GenericAnchorAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, +} + +#[derive(Clone, LightDiscriminator, Default, AnchorDeserialize, AnchorSerialize)] +pub struct DataAccount { + pub owner: Pubkey, + pub message: String, +} + +#[derive(Clone, LightDiscriminator, Default, AnchorDeserialize, AnchorSerialize)] +pub struct ByteDataAccount { + pub owner: Pubkey, + pub data: [u8; 31], +} + +#[derive(Clone, Debug, AnchorSerialize, AnchorDeserialize)] +pub struct ExistingCompressedAccountIxData { + pub account_meta: CompressedAccountMeta, + pub message: String, + pub update_message: String, +} + +#[derive(Clone, Debug, AnchorSerialize, AnchorDeserialize)] +pub struct NewCompressedAccountIxData { + pub address_tree_info: PackedAddressTreeInfo, + pub message: String, +} + +// stubs for idl. +#[event] +pub struct AccountTypes { + pub data_account: DataAccount, + pub byte_data_account: ByteDataAccount, +} +``` diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs.mdx b/.context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs.mdx new file mode 100644 index 00000000..7857a9d3 --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs.mdx @@ -0,0 +1,396 @@ +--- +title: "create-and-update/programs/create-and-update/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/programs/create-and-update/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use anchor_lang::{AnchorDeserialize, InstructionData, ToAccountMetas}; +use create_and_update::{ + DataAccount, ExistingCompressedAccountIxData, NewCompressedAccountIxData, FIRST_SEED, + SECOND_SEED, +}; +use light_client::indexer::{CompressedAccount, TreeInfo}; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::{ + address::v2::derive_address, + instruction::{account_meta::CompressedAccountMeta, PackedAccounts, SystemAccountMetaConfig}, +}; +use serial_test::serial; +use solana_sdk::{ + instruction::Instruction, + signature::{Keypair, Signature, Signer}, +}; + +#[serial] +#[tokio::test] +async fn test_create_compressed_account() { + let config = ProgramTestConfig::new( + true, + Some(vec![("create_and_update", create_and_update::ID)]), + ); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v2(); + + let (address, _) = derive_address( + &[FIRST_SEED, payer.pubkey().as_ref()], + &address_tree_info.tree, + &create_and_update::ID, + ); + + // Create the compressed account + create_compressed_account( + &mut rpc, + &payer, + &address, + address_tree_info, + "Hello, World!".to_string(), + ) + .await + .unwrap(); + + // Check that it was created correctly + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + assert_eq!(compressed_account.leaf_index, 0); + let data = &compressed_account.data.as_ref().unwrap().data; + let account_data = DataAccount::deserialize(&mut &data[..]).unwrap(); + assert_eq!(account_data.owner, payer.pubkey()); + assert_eq!(account_data.message, "Hello, World!"); +} + +#[serial] +#[tokio::test] +async fn test_create_and_update() { + let config = ProgramTestConfig::new( + true, + Some(vec![("create_and_update", create_and_update::ID)]), + ); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v2(); + + let (initial_address, _) = derive_address( + &[FIRST_SEED, payer.pubkey().as_ref()], + &address_tree_info.tree, + &create_and_update::ID, + ); + + // Create the initial compressed account + create_compressed_account( + &mut rpc, + &payer, + &initial_address, + address_tree_info, + "Initial message".to_string(), + ) + .await + .unwrap(); + + // Get the created account for updating + let initial_compressed_account = rpc + .get_compressed_account(initial_address, None) + .await + .unwrap() + .value + .unwrap(); + + // Create and update in one instruction + create_and_update_accounts( + &mut rpc, + &payer, + &initial_compressed_account, + "Initial message".to_string(), + "New account message".to_string(), + "Updated message".to_string(), + ) + .await + .unwrap(); + + // Check the new account was created + let (new_address, _) = derive_address( + &[SECOND_SEED, payer.pubkey().as_ref()], + &address_tree_info.tree, + &create_and_update::ID, + ); + + let new_compressed_account = rpc + .get_compressed_account(new_address, None) + .await + .unwrap() + .value + .unwrap(); + + let new_data = &new_compressed_account.data.as_ref().unwrap().data; + let new_account_data = DataAccount::deserialize(&mut &new_data[..]).unwrap(); + assert_eq!(new_account_data.owner, payer.pubkey()); + assert_eq!(new_account_data.message, "New account message"); + + // Check the existing account was updated + let updated_compressed_account = rpc + .get_compressed_account(initial_address, None) + .await + .unwrap() + .value + .unwrap(); + + let updated_data = &updated_compressed_account.data.as_ref().unwrap().data; + let updated_account_data = DataAccount::deserialize(&mut &updated_data[..]).unwrap(); + assert_eq!(updated_account_data.owner, payer.pubkey()); + assert_eq!(updated_account_data.message, "Updated message"); + + // Now test updating both existing accounts with the third instruction + update_two_accounts( + &mut rpc, + &payer, + &updated_compressed_account, + "Updated message".to_string(), + "First account final message".to_string(), + &new_compressed_account, + "New account message".to_string(), + "Second account final message".to_string(), + ) + .await + .unwrap(); + + // Check both accounts were updated correctly + let final_first_account = rpc + .get_compressed_account(initial_address, None) + .await + .unwrap() + .value + .unwrap(); + + let final_first_data = &final_first_account.data.as_ref().unwrap().data; + let final_first_account_data = DataAccount::deserialize(&mut &final_first_data[..]).unwrap(); + assert_eq!( + final_first_account_data.message, + "First account final message" + ); + + let final_second_account = rpc + .get_compressed_account(new_address, None) + .await + .unwrap() + .value + .unwrap(); + + let final_second_data = &final_second_account.data.as_ref().unwrap().data; + let final_second_account_data = DataAccount::deserialize(&mut &final_second_data[..]).unwrap(); + assert_eq!( + final_second_account_data.message, + "Second account final message" + ); +} + +async fn create_compressed_account( + rpc: &mut R, + payer: &Keypair, + address: &[u8; 32], + address_tree_info: TreeInfo, + message: String, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(create_and_update::ID); + remaining_accounts.add_system_accounts_v2(config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + + let packed_address_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .address_trees; + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let instruction_data = create_and_update::instruction::CreateCompressedAccount { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_accounts[0], + output_state_tree_index, + message, + }; + let accounts = create_and_update::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_metas, _, _) = remaining_accounts.to_account_metas(); + let instruction = Instruction { + program_id: create_and_update::ID, + accounts: [accounts.to_account_metas(None), remaining_metas].concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn create_and_update_accounts( + rpc: &mut R, + payer: &Keypair, + existing_account: &CompressedAccount, + existing_message: String, + new_message: String, + update_message: String, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(create_and_update::ID); + remaining_accounts.add_system_accounts_v2(config)?; + + let hash = existing_account.hash; + + let address_tree_info = rpc.get_address_tree_v2(); + + let (new_address, _) = derive_address( + &[SECOND_SEED, payer.pubkey().as_ref()], + &address_tree_info.tree, + &create_and_update::ID, + ); + + let address_tree_info = rpc.get_address_tree_v2(); + + let rpc_result = rpc + .get_validity_proof( + vec![hash], + vec![AddressWithTree { + address: new_address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + + let packed_tree_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + let packed_state_tree_accounts = packed_tree_accounts.state_trees.unwrap(); + let packed_address_tree_accounts = packed_tree_accounts.address_trees; + let account_meta = CompressedAccountMeta { + tree_info: packed_state_tree_accounts.packed_tree_infos[0], + address: existing_account.address.unwrap(), + output_state_tree_index: packed_state_tree_accounts.output_tree_index, + }; + + let instruction_data = create_and_update::instruction::CreateAndUpdate { + proof: rpc_result.proof, + existing_account: ExistingCompressedAccountIxData { + account_meta, + message: existing_message, + update_message, + }, + new_account: NewCompressedAccountIxData { + address_tree_info: packed_address_tree_accounts[0], + message: new_message, + }, + }; + + let accounts = create_and_update::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_metas, _, _) = remaining_accounts.to_account_metas(); + let instruction = Instruction { + program_id: create_and_update::ID, + accounts: [accounts.to_account_metas(None), remaining_metas].concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +#[allow(clippy::too_many_arguments)] +async fn update_two_accounts( + rpc: &mut R, + payer: &Keypair, + first_account: &CompressedAccount, + first_current_message: String, + first_update_message: String, + second_account: &CompressedAccount, + second_current_message: String, + second_update_message: String, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(create_and_update::ID); + remaining_accounts.add_system_accounts_v2(config)?; + + let first_hash = first_account.hash; + let second_hash = second_account.hash; + + let rpc_result = rpc + .get_validity_proof(vec![first_hash, second_hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + let packed_state_tree_accounts = packed_tree_accounts.state_trees.unwrap(); + + let first_account_meta = CompressedAccountMeta { + tree_info: packed_state_tree_accounts.packed_tree_infos[0], + address: first_account.address.unwrap(), + output_state_tree_index: packed_state_tree_accounts.output_tree_index, + }; + + let second_account_meta = CompressedAccountMeta { + tree_info: packed_state_tree_accounts.packed_tree_infos[1], + address: second_account.address.unwrap(), + output_state_tree_index: packed_state_tree_accounts.output_tree_index, + }; + + let instruction_data = create_and_update::instruction::UpdateTwoAccounts { + proof: rpc_result.proof, + first_account: ExistingCompressedAccountIxData { + account_meta: first_account_meta, + message: first_current_message, + update_message: first_update_message, + }, + second_account: ExistingCompressedAccountIxData { + account_meta: second_account_meta, + message: second_current_message, + update_message: second_update_message, + }, + }; + + let accounts = create_and_update::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_metas, _, _) = remaining_accounts.to_account_metas(); + let instruction = Instruction { + program_id: create_and_update::ID, + accounts: [accounts.to_account_metas(None), remaining_metas].concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs.mdx b/.context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs.mdx new file mode 100644 index 00000000..d45af0b2 --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs.mdx @@ -0,0 +1,159 @@ +--- +title: "create-and-update/programs/create-and-update/tests/test_create_two_accounts.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/programs/create-and-update/tests/test_create_two_accounts.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use anchor_lang::{AnchorDeserialize, InstructionData, ToAccountMetas}; +use create_and_update::{ByteDataAccount, DataAccount, FIRST_SEED, SECOND_SEED}; +use light_client::indexer::TreeInfo; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::{ + address::v2::derive_address, + instruction::{PackedAccounts, SystemAccountMetaConfig}, +}; +use serial_test::serial; +use solana_sdk::{ + instruction::Instruction, + signature::{Keypair, Signature, Signer}, +}; + +#[serial] +#[tokio::test] +async fn test_create_two_accounts() { + let config = ProgramTestConfig::new( + true, + Some(vec![("create_and_update", create_and_update::ID)]), + ); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v2(); + + let (first_address, _) = derive_address( + &[FIRST_SEED, payer.pubkey().as_ref()], + &address_tree_info.tree, + &create_and_update::ID, + ); + + let (second_address, _) = derive_address( + &[SECOND_SEED, payer.pubkey().as_ref()], + &address_tree_info.tree, + &create_and_update::ID, + ); + + let byte_data = [1u8; 31]; // 31 bytes of data + let message = "String account message".to_string(); + + // Create two compressed accounts in a single instruction + create_two_accounts( + &mut rpc, + &payer, + &first_address, + &second_address, + address_tree_info, + byte_data, + message.clone(), + ) + .await + .unwrap(); + + // Check that the first account (ByteDataAccount) was created correctly + let first_compressed_account = rpc + .get_compressed_account(first_address, None) + .await + .unwrap() + .value + .unwrap(); + + let first_data = &first_compressed_account.data.as_ref().unwrap().data; + let first_account_data = ByteDataAccount::deserialize(&mut &first_data[..]).unwrap(); + assert_eq!(first_account_data.owner, payer.pubkey()); + assert_eq!(first_account_data.data, byte_data); + + // Check that the second account (DataAccount) was created correctly + let second_compressed_account = rpc + .get_compressed_account(second_address, None) + .await + .unwrap() + .value + .unwrap(); + + let second_data = &second_compressed_account.data.as_ref().unwrap().data; + let second_account_data = DataAccount::deserialize(&mut &second_data[..]).unwrap(); + assert_eq!(second_account_data.owner, payer.pubkey()); + assert_eq!(second_account_data.message, message); +} + +#[allow(clippy::too_many_arguments)] +async fn create_two_accounts( + rpc: &mut R, + payer: &Keypair, + first_address: &[u8; 32], + second_address: &[u8; 32], + address_tree_info: TreeInfo, + byte_data: [u8; 31], + message: String, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(create_and_update::ID); + remaining_accounts.add_system_accounts_v2(config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![ + AddressWithTree { + address: *first_address, + tree: address_tree_info.tree, + }, + AddressWithTree { + address: *second_address, + tree: address_tree_info.tree, + }, + ], + None, + ) + .await? + .value; + + let packed_address_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .address_trees; + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let instruction_data = create_and_update::instruction::CreateTwoAccounts { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_accounts[0], + output_state_tree_index, + byte_data, + message, + }; + + let accounts = create_and_update::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let instruction = Instruction { + program_id: create_and_update::ID, + accounts: [accounts.to_account_metas(None), { + let (metas, _, _) = remaining_accounts.to_account_metas(); + metas + }] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` diff --git a/.context/program-examples-mdx/create-and-update/tests/create_and_update-ts.mdx b/.context/program-examples-mdx/create-and-update/tests/create_and_update-ts.mdx new file mode 100644 index 00000000..1b38ea49 --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/tests/create_and_update-ts.mdx @@ -0,0 +1,289 @@ +--- +title: "create-and-update/tests/create_and_update.ts" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/tests/create_and_update.ts" +--- + +```typescript +import * as anchor from "@coral-xyz/anchor"; +import { Program, web3 } from "@coral-xyz/anchor"; +import { CreateAndUpdate } from "../target/types/create_and_update"; +import { + bn, + CompressedAccountWithMerkleContext, + confirmTx, + createRpc, + featureFlags, + PackedAccounts, + Rpc, + sleep, + SystemAccountMetaConfig, + VERSION, + selectStateTreeInfo, + TreeInfo, + PackedAddressTreeInfo, + deriveAddressV2, + deriveAddressSeedV2, + buildAndSignTx, + sendAndConfirmTx, +} from "@lightprotocol/stateless.js"; +import * as assert from "assert"; +const path = require("path"); +const os = require("os"); +require("dotenv").config(); + +// v2 feature flag +featureFlags.version = VERSION.V2; + +const anchorWalletPath = path.join(os.homedir(), ".config/solana/id.json"); +process.env.ANCHOR_WALLET = anchorWalletPath; + +describe("create-and-update anchor", () => { + const program = anchor.workspace.CreateAndUpdate as Program; + const coder = new anchor.BorshCoder(program.idl); + + it("creates and updates compressed accounts atomically", async () => { + const signer = new web3.Keypair(); + const rpc = createRpc(); + + await rpc.requestAirdrop(signer.publicKey, web3.LAMPORTS_PER_SOL); + await sleep(2000); + + const stateTreeInfos = await rpc.getStateTreeInfos(); + const stateTreeInfo = selectStateTreeInfo(stateTreeInfos); + + // v2 address tree info + const addressTreeInfo = await rpc.getAddressTreeInfoV2(); + // v2 derive + const firstSeed = new TextEncoder().encode("first"); + const firstAddressSeed = deriveAddressSeedV2([ + firstSeed, + signer.publicKey.toBytes(), + ]); + const firstAddress = deriveAddressV2( + firstAddressSeed, + addressTreeInfo.tree, + program.programId + ); + + await createCompressedAccount( + rpc, + addressTreeInfo, + firstAddress, + program, + stateTreeInfo, + signer, + "Initial message" + ); + + let firstAccount = await rpc.getCompressedAccount( + bn(firstAddress.toBytes()) + ); + if (!firstAccount) { + throw new Error("Failed to fetch the initial compressed account"); + } + + let decoded = coder.types.decode("dataAccount", firstAccount.data.data); + assert.ok( + decoded.owner.equals(signer.publicKey), + "owner should match signer" + ); + assert.strictEqual(decoded.message, "Initial message"); + + const secondSeed = new TextEncoder().encode("second"); + const secondAddressSeed = deriveAddressSeedV2([ + secondSeed, + signer.publicKey.toBytes(), + ]); + const secondAddress = deriveAddressV2( + secondAddressSeed, + addressTreeInfo.tree, + program.programId + ); + + await createAndUpdateAccounts( + rpc, + program, + signer, + firstAccount, + secondAddress, + addressTreeInfo, + "Hello from second account", + "Updated first message" + ); + }); + + async function waitForIndexer(rpc: Rpc) { + const slot = await rpc.getSlot(); + await rpc.confirmTransactionIndexed(slot); + } + + async function createCompressedAccount( + rpc: Rpc, + addressTreeInfo: TreeInfo, + address: anchor.web3.PublicKey, + program: Program, + stateTreeInfo: TreeInfo, + signer: anchor.web3.Keypair, + message: string + ) { + const proofRpcResult = await rpc.getValidityProofV0( + [], + [ + { + tree: addressTreeInfo.tree, + queue: addressTreeInfo.queue, + address: bn(address.toBytes()), + }, + ] + ); + + const config = SystemAccountMetaConfig.new(program.programId); + const packedAccounts = PackedAccounts.newWithSystemAccountsV2(config); + + const outputStateTreeIndex = packedAccounts.insertOrGet( + stateTreeInfo.queue + ); + const addressQueueIndex = packedAccounts.insertOrGet(addressTreeInfo.queue); + const addressTreeIndex = packedAccounts.insertOrGet(addressTreeInfo.tree); + const packedAddressTreeInfo: PackedAddressTreeInfo = { + rootIndex: proofRpcResult.rootIndices[0], + addressMerkleTreePubkeyIndex: addressTreeIndex, + addressQueuePubkeyIndex: addressQueueIndex, + }; + const proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 300_000, + }); + + const remainingAccounts = packedAccounts.toAccountMetas().remainingAccounts; + const tx = await program.methods + .createCompressedAccount( + proof, + packedAddressTreeInfo, + outputStateTreeIndex, + message + ) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts) + .signers([signer]) + .transaction(); + + const recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + + const signedTx = buildAndSignTx(tx.instructions, signer, recentBlockhash); + const sig = await sendAndConfirmTx(rpc, signedTx); + return sig; + } + + async function createAndUpdateAccounts( + rpc: Rpc, + program: Program, + signer: anchor.web3.Keypair, + existingAccount: CompressedAccountWithMerkleContext, + newAddress: anchor.web3.PublicKey, + addressTreeInfo: TreeInfo, + newAccountMessage: string, + updatedMessage: string + ) { + if (!existingAccount.address) { + throw new Error("Existing compressed account missing address data"); + } + + const proofRpcResult = await rpc.getValidityProofV0( + [ + { + hash: existingAccount.hash, + tree: existingAccount.treeInfo.tree, + queue: existingAccount.treeInfo.queue, + }, + ], + // new account's address + [ + { + tree: addressTreeInfo.tree, + queue: addressTreeInfo.queue, + address: bn(newAddress.toBytes()), + }, + ] + ); + + const coder = new anchor.BorshCoder(program.idl); + const currentAccountData = coder.types.decode( + "dataAccount", + existingAccount.data.data + ); + + const config = SystemAccountMetaConfig.new(program.programId); + const packedAccounts = PackedAccounts.newWithSystemAccountsV2(config); + + const existingAccountMeta = { + treeInfo: { + rootIndex: proofRpcResult.rootIndices[0], + // Note: set this to true for local testing. + proveByIndex: true, + merkleTreePubkeyIndex: packedAccounts.insertOrGet( + existingAccount.treeInfo.tree + ), + queuePubkeyIndex: packedAccounts.insertOrGet( + existingAccount.treeInfo.queue + ), + leafIndex: existingAccount.leafIndex, + }, + address: existingAccount.address, + outputStateTreeIndex: packedAccounts.insertOrGet( + existingAccount.treeInfo.queue + ), + }; + + // for new account's address + const addressQueueIndex = packedAccounts.insertOrGet(addressTreeInfo.queue); + const addressTreeIndex = packedAccounts.insertOrGet(addressTreeInfo.tree); + + const packedAddressTreeInfo: PackedAddressTreeInfo = { + rootIndex: proofRpcResult.rootIndices[1], + addressMerkleTreePubkeyIndex: addressTreeIndex, + addressQueuePubkeyIndex: addressQueueIndex, + }; + + const proof = { + 0: proofRpcResult.compressedProof, + }; + const computeBudgetIx = web3.ComputeBudgetProgram.setComputeUnitLimit({ + units: 1000000, + }); + + const remainingAccounts = packedAccounts.toAccountMetas().remainingAccounts; + + const tx = await program.methods + .createAndUpdate( + proof, + { + accountMeta: existingAccountMeta, + message: currentAccountData.message, + updateMessage: updatedMessage, + }, + { + addressTreeInfo: packedAddressTreeInfo, + message: newAccountMessage, + } + ) + .accounts({ + signer: signer.publicKey, + }) + .preInstructions([computeBudgetIx]) + .remainingAccounts(remainingAccounts) + .signers([signer]) + .transaction(); + + const recentBlockhash = (await rpc.getRecentBlockhash()).blockhash; + const signedTx = buildAndSignTx(tx.instructions, signer, recentBlockhash); + const sig = await sendAndConfirmTx(rpc, signedTx); + return sig; + } +}); +``` diff --git a/.context/program-examples-mdx/create-and-update/tsconfig-json.mdx b/.context/program-examples-mdx/create-and-update/tsconfig-json.mdx new file mode 100644 index 00000000..2eac17b3 --- /dev/null +++ b/.context/program-examples-mdx/create-and-update/tsconfig-json.mdx @@ -0,0 +1,24 @@ +--- +title: "create-and-update/tsconfig.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/create-and-update/tsconfig.json" +--- + +```json +{ + "compilerOptions": { + "types": ["mocha", "chai"], + "typeRoots": ["./node_modules/@types"], + "lib": ["es2015"], + "module": "commonjs", + "target": "es6", + "esModuleInterop": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "noEmit": true, + "noUnusedLocals": false, + "noUnusedParameters": false + }, + "include": ["tests/**/*", "migrations/**/*"], + "exclude": ["node_modules", "target"] +} +``` diff --git a/.context/program-examples-mdx/read-only/Cargo-toml.mdx b/.context/program-examples-mdx/read-only/Cargo-toml.mdx new file mode 100644 index 00000000..71b7f407 --- /dev/null +++ b/.context/program-examples-mdx/read-only/Cargo-toml.mdx @@ -0,0 +1,33 @@ +--- +title: "read-only/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/read-only/Cargo.toml" +--- + +```toml +[package] +name = "read-only" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "read_only" + +[features] +default = [] +test-sbf = [] + +[dependencies] +anchor-lang = "0.31.1" +borsh = "0.10.4" +light-sdk = { version = "0.16.0" , features = ["anchor", "v2"] } +light-sdk-types = { version = "0.16.0" , features = ["v2"] } +light-compressed-account = "0.6.1" + +[dev-dependencies] +light-program-test = { version = "0.16.0" , features = ["v2"] } +light-client = { version = "0.16.0" , features = ["v2"] } +tokio = "1.43.0" +solana-sdk = "2.2" +serial_test = "3.2.0" +``` diff --git a/.context/program-examples-mdx/read-only/README.mdx b/.context/program-examples-mdx/read-only/README.mdx new file mode 100644 index 00000000..360e56cb --- /dev/null +++ b/.context/program-examples-mdx/read-only/README.mdx @@ -0,0 +1,40 @@ +--- +title: "read-only/README.md" +description: "https://github.com/Lightprotocol/program-examples/blob/main/read-only/README.md" +--- + +```markdown +# Read-only example + +This program demonstrates how to create and then read a compressed account on-chain. + +## Instructions + +### 1. `create_compressed_account` +Creates a new compressed account with initial data (owner and message). + +### 2. `read` +Demonstrates reading an existing compressed account on-chain: +- Uses a single validity proof to prove inclusion of the existing account + +## Data Structure + +```rust +pub struct DataAccount { + #[hash] + pub owner: Pubkey, + #[hash] + pub message: String, +} +``` + +## Build and Test + +```bash +# Build the program +cargo build-sbf + +# Run tests +cargo test-sbf +``` +``` diff --git a/.context/program-examples-mdx/read-only/Xargo-toml.mdx b/.context/program-examples-mdx/read-only/Xargo-toml.mdx new file mode 100644 index 00000000..7f55d48d --- /dev/null +++ b/.context/program-examples-mdx/read-only/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "read-only/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/read-only/Xargo.toml" +--- + +```toml +[target.sbf-solana-solana.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/read-only/src/lib-rs.mdx b/.context/program-examples-mdx/read-only/src/lib-rs.mdx new file mode 100644 index 00000000..8f76d8d3 --- /dev/null +++ b/.context/program-examples-mdx/read-only/src/lib-rs.mdx @@ -0,0 +1,131 @@ +--- +title: "read-only/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/read-only/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] +// Suppress anchor realloc warning. +#![allow(deprecated)] + +use anchor_lang::{prelude::*, AnchorDeserialize, AnchorSerialize}; +use borsh::{BorshDeserialize, BorshSerialize}; +use light_sdk::cpi::{v2::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction}; +use light_sdk::{ + account::LightAccount, + address::v2::derive_address, + cpi::{v2::CpiAccounts, CpiSigner}, + derive_light_cpi_signer, + instruction::{ + account_meta::CompressedAccountMetaReadOnly, PackedAddressTreeInfo, ValidityProof, + }, + LightDiscriminator, +}; + +declare_id!("HNqStLMpNuNJqhBF1FbGTKHEFbBLJmq8RdJJmZKWz6jH"); + +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("HNqStLMpNuNJqhBF1FbGTKHEFbBLJmq8RdJJmZKWz6jH"); + +pub const FIRST_SEED: &[u8] = b"first"; + +#[program] +pub mod read_only { + + use super::*; + + /// Creates a new compressed account with initial data + pub fn create_compressed_account<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + message: String, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let (address, address_seed) = derive_address( + &[FIRST_SEED, ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let mut data_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + data_account.owner = ctx.accounts.signer.key(); + data_account.message = message; + msg!( + "Created compressed account with message: {}", + data_account.message + ); + + let new_address_params = + address_tree_info.into_new_address_params_assigned_packed(address_seed, Some(0)); + + LightSystemProgramCpi::new_cpi(crate::LIGHT_CPI_SIGNER, proof) + .with_light_account(data_account)? + .with_new_addresses(&[new_address_params]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + /// Reads a compressed account and validates via read-only CPI + pub fn read<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + existing_account: ExistingCompressedAccountIxData, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let read_data_account = DataAccount { + owner: ctx.accounts.signer.key(), + message: existing_account.message.clone(), + }; + let read_only_account = LightAccount::::new_read_only( + &crate::ID, + &existing_account.account_meta, + read_data_account, + light_cpi_accounts.tree_pubkeys().unwrap().as_slice(), + )?; + + LightSystemProgramCpi::new_cpi(crate::LIGHT_CPI_SIGNER, proof) + .with_light_account(read_only_account)? + .invoke(light_cpi_accounts)?; + + Ok(()) + } +} + +#[derive(Accounts)] +pub struct GenericAnchorAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, +} + +#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator)] +pub struct DataAccount { + pub owner: Pubkey, + pub message: String, +} + +#[derive(Clone, Debug, AnchorSerialize, AnchorDeserialize)] +pub struct ExistingCompressedAccountIxData { + pub account_meta: CompressedAccountMetaReadOnly, + pub message: String, +} +``` diff --git a/.context/program-examples-mdx/read-only/tests/test-rs.mdx b/.context/program-examples-mdx/read-only/tests/test-rs.mdx new file mode 100644 index 00000000..6c8e4475 --- /dev/null +++ b/.context/program-examples-mdx/read-only/tests/test-rs.mdx @@ -0,0 +1,180 @@ +--- +title: "read-only/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/read-only/tests/test.rs" +--- + +```rust +#![cfg(feature = "test-sbf")] + +use anchor_lang::{AnchorDeserialize, InstructionData, ToAccountMetas}; +use light_client::indexer::{CompressedAccount, TreeInfo}; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::{ + address::v2::derive_address, + instruction::{ + account_meta::CompressedAccountMetaReadOnly, PackedAccounts, SystemAccountMetaConfig, + }, +}; +use read_only::{DataAccount, ExistingCompressedAccountIxData, FIRST_SEED}; +use solana_sdk::{ + instruction::Instruction, + signature::{Keypair, Signature, Signer}, +}; + +#[tokio::test] +async fn test_read_compressed_account() { + // Read only is only supported for v2 state trees. + let mut config = ProgramTestConfig::new_v2(true, Some(vec![("read_only", read_only::ID)])); + config.log_light_protocol_events = true; + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v2(); + + let (address, _) = derive_address( + &[FIRST_SEED, payer.pubkey().as_ref()], + &address_tree_info.tree, + &read_only::ID, + ); + + // Create the compressed account + create_compressed_account( + &mut rpc, + &payer, + &address, + address_tree_info, + "Hello, World!".to_string(), + ) + .await + .unwrap(); + + // Check that it was created correctly + let compressed_account = rpc + .get_compressed_account(address, None) + .await + .unwrap() + .value + .unwrap(); + + assert_eq!(compressed_account.leaf_index, 0); + let data = &compressed_account.data.as_ref().unwrap().data; + let account_data = DataAccount::deserialize(&mut &data[..]).unwrap(); + assert_eq!(account_data.owner, payer.pubkey()); + assert_eq!(account_data.message, "Hello, World!"); + + // Test reading the compressed account + read_compressed_account( + &mut rpc, + &payer, + &compressed_account, + "Hello, World!".to_string(), + ) + .await + .unwrap(); +} + +async fn create_compressed_account( + rpc: &mut R, + payer: &Keypair, + address: &[u8; 32], + address_tree_info: TreeInfo, + message: String, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(read_only::ID); + remaining_accounts.add_system_accounts_v2(config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + + let packed_address_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .address_trees; + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let instruction_data = read_only::instruction::CreateCompressedAccount { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_accounts[0], + output_state_tree_index, + message, + }; + let accounts = read_only::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_accounts_metas, _, _) = remaining_accounts.to_account_metas(); + let instruction = Instruction { + program_id: read_only::ID, + accounts: [accounts.to_account_metas(None), remaining_accounts_metas].concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn read_compressed_account( + rpc: &mut R, + payer: &Keypair, + compressed_account: &CompressedAccount, + message: String, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(read_only::ID); + remaining_accounts.add_system_accounts_v2(config)?; + + let hash = compressed_account.hash; + let rpc_result = rpc + .get_validity_proof(vec![hash], vec![], None) + .await? + .value; + + let packed_tree_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + let packed_state_tree_accounts = packed_tree_accounts.state_trees.unwrap(); + + let account_meta = CompressedAccountMetaReadOnly { + tree_info: packed_state_tree_accounts.packed_tree_infos[0], + address: compressed_account.address.unwrap(), + }; + + let instruction_data = read_only::instruction::Read { + proof: rpc_result.proof, + existing_account: ExistingCompressedAccountIxData { + account_meta, + message, + }, + }; + + let accounts = read_only::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let (remaining_accounts_metas, _, _) = remaining_accounts.to_account_metas(); + let instruction = Instruction { + program_id: read_only::ID, + accounts: [accounts.to_account_metas(None), remaining_accounts_metas].concat(), + data: instruction_data.data(), + }; + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} +``` diff --git a/.context/program-examples-mdx/zk-id/Cargo-toml.mdx b/.context/program-examples-mdx/zk-id/Cargo-toml.mdx new file mode 100644 index 00000000..f1a5380d --- /dev/null +++ b/.context/program-examples-mdx/zk-id/Cargo-toml.mdx @@ -0,0 +1,46 @@ +--- +title: "zk-id/Cargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/Cargo.toml" +--- + +```toml +[package] +name = "zk-id" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib", "lib"] +name = "zk_id" + +[features] +default = [] +test-sbf = [] + +[dependencies] +anchor-lang = "0.31.1" +borsh = "0.10.4" +light-sdk = { version = "0.16.0" , features = ["anchor", "poseidon", "merkle-tree", "v2"] } +light-hasher = "5.0.0" +light-sdk-types = { version = "0.16.0" ,features = ["v2"] } +groth16-solana = { git = "https://github.com/Lightprotocol/groth16-solana", rev = "66c0dc87d0808c4d2aadb53c61435b6edb8ddfd9" } + +[dev-dependencies] +light-program-test = { version = "0.16.0" , features = ["v2"] } +light-client = { version = "0.16.0" , features = ["v2"] } +tokio = "1.40.0" +solana-sdk = "2.2" +circom-prover = "0.1" +rust-witness = "0.1" +num-bigint = "0.4" +serde_json = "1.0" +light-compressed-account = { version = "0.6.1", features = ["new-unique"] } +light-merkle-tree-reference = "4.0.0" +groth16-solana = { git = "https://github.com/Lightprotocol/groth16-solana", features = ["vk", "circom"], rev = "66c0dc87d0808c4d2aadb53c61435b6edb8ddfd9" } + +[build-dependencies] + +[target.'cfg(not(target_os = "solana"))'.build-dependencies] +rust-witness = "0.1" +groth16-solana = { git = "https://github.com/Lightprotocol/groth16-solana", features = ["vk"], rev = "66c0dc87d0808c4d2aadb53c61435b6edb8ddfd9" } +``` diff --git a/.context/program-examples-mdx/zk-id/README.mdx b/.context/program-examples-mdx/zk-id/README.mdx new file mode 100644 index 00000000..86307f10 --- /dev/null +++ b/.context/program-examples-mdx/zk-id/README.mdx @@ -0,0 +1,107 @@ +--- +title: "zk-id/README.md" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/README.md" +--- + +```markdown + +# ZK-ID Program + +A minimal zk id Solana program that uses zero-knowledge proofs for identity verification with compressed accounts. +Note this is an example how to verify a zk inclusion proof, not a full zk identity protocol and not production-ready. +For examples of zk identity protocols, see: +- [Iden3](https://github.com/iden3) - Full decentralized identity protocol with claims, revocation, and recovery +- [Semaphore](https://github.com/semaphore-protocol/semaphore) - Privacy-preserving group signaling with nullifiers + +## Program Instructions + +### 1. `create_issuer` +Creates a compressed account for an issuer entity who can credential other users, storing their pubkey and initializing their credential issuance counter. + +### 2. `add_credential` +Issues a new credential by creating a compressed account that binds a user's pubkey to an issuer, incrementing the issuer's credential counter in the process. + +### 3. `zk_verify_credential` +Verifies a zero-knowledge proof of credential ownership using Groth16 verification and creates an encrypted event account to store the verification result on-chain. + +**Properties:** +- Credential verification is private. The credential is not exposed during zk proof verification. + (The transaction payer is not private, for full privacy a relayer or freshly funded keypair should be used.) +- Each credential can only be used once per `verification_id`. (The event account address serves as a nullifier.) +- Only the credential owner can produce a valid proof. + +## Requirements + +### System Dependencies +- **Rust** (1.90.0 or later) +- **Node.js** (v22 or later) and npm +- **Solana CLI** (2.3.11 or later) +- **Light CLI**: Install with `npm install -g @lightprotocol/zk-compression-cli` + +### ZK Circuit Tools +- **Circom** (v2.2.2): Zero-knowledge circuit compiler +- **SnarkJS**: JavaScript library for generating and verifying ZK proofs + +To install circom and snarkjs: +```bash +# Install circom (Linux/macOS) +wget https://github.com/iden3/circom/releases/download/v2.2.2/circom-linux-amd64 +chmod +x circom-linux-amd64 +sudo mv circom-linux-amd64 /usr/local/bin/circom + +# For macOS, replace with circom-macos-amd64 + +# Install snarkjs globally +npm install -g snarkjs +``` + +## Setup + +Before building and testing, you need to compile the ZK circuits and generate the proving/verification keys: + +```bash +# Run the setup script to compile circuits and generate keys +./scripts/setup.sh +``` + +This script will: +1. Install npm dependencies +2. Download the Powers of Tau ceremony file +3. Compile the circom circuit +4. Generate the proving key (zkey) +5. Export the verification key + +## Build and Test + +```bash +# Build the program +cargo build-sbf + +# Run tests and see tx +RUST_BACKTRACE=1 cargo test-sbf -- --nocapture +``` + +## Structure + +``` +zk-id/ +├── circuits/ # Circom circuit definitions +│ └── compressed_account_merkle_proof.circom +├── build/ # Generated circuit artifacts (after setup) +│ ├── verification_key.json +│ └── *.zkey, *.wasm, etc. +├── scripts/ +│ └── setup.sh # Circuit compilation and setup script +├── src/ +│ └── lib.rs # Solana program implementation +└── tests/ + └── test.rs # Integration tests +``` + +## Cleaning Build Artifacts + +To clean generated circuit files: +```bash +./scripts/clean.sh +``` +``` diff --git a/.context/program-examples-mdx/zk-id/Xargo-toml.mdx b/.context/program-examples-mdx/zk-id/Xargo-toml.mdx new file mode 100644 index 00000000..a9f6588d --- /dev/null +++ b/.context/program-examples-mdx/zk-id/Xargo-toml.mdx @@ -0,0 +1,9 @@ +--- +title: "zk-id/Xargo.toml" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/Xargo.toml" +--- + +```toml +[target.sbf-solana-solana.dependencies.std] +features = [] +``` diff --git a/.context/program-examples-mdx/zk-id/build-rs.mdx b/.context/program-examples-mdx/zk-id/build-rs.mdx new file mode 100644 index 00000000..fa7b7031 --- /dev/null +++ b/.context/program-examples-mdx/zk-id/build-rs.mdx @@ -0,0 +1,39 @@ +--- +title: "zk-id/build.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/build.rs" +--- + +```rust +use groth16_solana::vk_parser::generate_vk_file; + +fn main() { + println!("cargo:rerun-if-changed=build/verification_key.json"); + println!("cargo:rerun-if-changed=build/compressed_account_merkle_proof_js"); + + // Generate the verifying key Rust file from the JSON + let vk_json_path = "./build/verification_key.json"; + let output_dir = "./src"; + let output_file = "verifying_key.rs"; + + if std::path::Path::new(vk_json_path).exists() { + generate_vk_file(vk_json_path, output_dir, output_file) + .expect("Failed to generate verifying key Rust file"); + // Successfully generated verifying_key.rs + } else { + println!("cargo:warning=Verification key JSON not found. Run './scripts/setup.sh' first."); + } + + // Only transpile witness generators for non-Solana targets + // Check the TARGET environment variable since build scripts run on the host + let target = std::env::var("TARGET").unwrap_or_default(); + if !target.contains("sbf") && !target.contains("solana") { + let witness_wasm_dir = "./build/compressed_account_merkle_proof_js"; + if std::path::Path::new(witness_wasm_dir).exists() { + rust_witness::transpile::transpile_wasm(witness_wasm_dir.to_string()); + // Successfully transpiled witness generator + } else { + println!("cargo:warning=Witness WASM not found. Run './scripts/setup.sh' first."); + } + } +} +``` diff --git a/.context/program-examples-mdx/zk-id/circuits/README.mdx b/.context/program-examples-mdx/zk-id/circuits/README.mdx new file mode 100644 index 00000000..9579c84d --- /dev/null +++ b/.context/program-examples-mdx/zk-id/circuits/README.mdx @@ -0,0 +1,64 @@ +--- +title: "zk-id/circuits/README.md" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/circuits/README.md" +--- + +```markdown +# Compressed Account Merkle Proof Circuit + +Zero-knowledge circuit that proves ownership of a compressed account in a Merkle tree without revealing the account details. + +## What It Does + +The circuit verifies: +1. **Account Hash** - Computes Poseidon hash of account fields (owner, discriminator, data) +2. **Merkle Inclusion** - Proves the account exists at a specific leaf in a 26-level tree + +## Setup & Testing + +```bash +# Compile circuit and generate keys +./scripts/setup.sh + +# Run tests +cargo test-sbf + +# Clean build artifacts +./scripts/clean.sh +``` + +## Circuit I/O + +**Public inputs** (visible in proof): +- `owner_hashed`, `merkle_tree_hashed`, `discriminator` - Account identifiers +- `issuer_hashed` - Credential issuer +- `expectedRoot` - Merkle tree root +- `verification_id` - Context for nullifier generation (prevents reuse in same context) +- `public_encrypted_data_hash` - Encrypted data commitment +- `nullifier` - Unique value preventing double-spending (Poseidon(verification_id, credential_secret)) + +**Private inputs** (hidden): +- `credentialPrivateKey` - Secret key proving credential ownership +- `leaf_index`, `account_leaf_index` - Account positions +- `address` - Account address +- `pathElements[26]` - Merkle proof path +- `encrypted_data_hash` - Private data hash + +## Circuit Files + +- `compressed_account_merkle_proof.circom` - Main circuit that combines all components +- `credential.circom` - Keypair verification for credential ownership +- `compressed_account.circom` - Computes Poseidon hash of account fields +- `merkle_proof.circom` - Binary Merkle tree inclusion proof + +## Architecture + +``` +CompressedAccountMerkleProof (main) +├── Keypair (credential.circom) +│ └── Proves knowledge of private key +├── CompressedAccountHash (compressed_account.circom) +│ └── Poseidon hash of 6 fields +└── MerkleProof (merkle_proof.circom) + └── 26-level binary tree verification +``` diff --git a/.context/program-examples-mdx/zk-id/package-json.mdx b/.context/program-examples-mdx/zk-id/package-json.mdx new file mode 100644 index 00000000..2ed16839 --- /dev/null +++ b/.context/program-examples-mdx/zk-id/package-json.mdx @@ -0,0 +1,35 @@ +--- +title: "zk-id/package.json" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/package.json" +--- + +```json +{ + "name": "lowlevel-zk-circuits", + "version": "1.0.0", + "description": "ZK circuits for compressed account Merkle proof verification", + "main": "index.js", + "scripts": { + "setup": "./scripts/setup.sh", + "clean": "./scripts/clean.sh", + "compile": "circom circuits/compressed_account_merkle_proof.circom --r1cs --wasm --sym -o build", + "generate-zkey": "snarkjs groth16 setup build/compressed_account_merkle_proof.r1cs pot/powersOfTau28_hez_final_16.ptau build/circuit_0000.zkey", + "contribute": "snarkjs zkey contribute build/circuit_0000.zkey build/circuit_final.zkey --name='First contribution' -v", + "export-vkey": "snarkjs zkey export verificationkey build/circuit_final.zkey build/verification_key.json", + "test": "mocha test/**/*.test.js --timeout 100000" + }, + "dependencies": { + "circomlib": "2.0.5", + "circomlibjs": "0.1.7" + }, + "devDependencies": { + "@types/chai": "^4.3.4", + "@types/mocha": "^10.0.1", + "chai": "^4.3.7", + "circom_tester": "^0.0.19", + "mocha": "^10.8.2", + "snarkjs": "^0.7.0", + "typescript": "^5.0.4" + } +} +``` diff --git a/.context/program-examples-mdx/zk-id/src/lib-rs.mdx b/.context/program-examples-mdx/zk-id/src/lib-rs.mdx new file mode 100644 index 00000000..9ed8b610 --- /dev/null +++ b/.context/program-examples-mdx/zk-id/src/lib-rs.mdx @@ -0,0 +1,348 @@ +--- +title: "zk-id/src/lib.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/src/lib.rs" +--- + +```rust +#![allow(unexpected_cfgs)] +#![allow(deprecated)] + +use anchor_lang::{prelude::*, AnchorDeserialize, AnchorSerialize}; +use borsh::{BorshDeserialize, BorshSerialize}; +use groth16_solana::groth16::Groth16Verifier; +use light_hasher::to_byte_array::ToByteArray; +use light_hasher::HasherError; +use light_sdk::account::{poseidon::LightAccount as LightAccountPoseidon, LightAccount}; +use light_sdk::cpi::v1::CpiAccounts; +use light_sdk::{ + address::v2::derive_address, + cpi::{v1::LightSystemProgramCpi, InvokeLightSystemProgram, LightCpiInstruction}, + derive_light_cpi_signer, + instruction::{ + account_meta::CompressedAccountMeta, CompressedProof, PackedAddressTreeInfo, ValidityProof, + }, + merkle_tree::v1::read_state_merkle_tree_root, + LightDiscriminator, LightHasher, +}; +use light_sdk_types::CpiSigner; + +declare_id!("HNqStLMpNuNJqhBF1FbGTKHEFbBLJmq8RdJJmZKWz6jH"); + +pub const LIGHT_CPI_SIGNER: CpiSigner = + derive_light_cpi_signer!("HNqStLMpNuNJqhBF1FbGTKHEFbBLJmq8RdJJmZKWz6jH"); + +pub const ISSUER: &[u8] = b"issuer"; +pub const CREDENTIAL: &[u8] = b"credential"; +pub const ZK_ID_CHECK: &[u8] = b"ZK_ID_CHECK"; + +// Include the generated verifying key module +pub mod verifying_key; + +#[program] +pub mod zk_id { + + use groth16_solana::decompression::{decompress_g1, decompress_g2}; + use light_hasher::hash_to_field_size::hashv_to_bn254_field_size_be_const_array; + + use super::*; + + /// Creates a new issuer compressed account + pub fn create_issuer<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + let (address, address_seed) = derive_address( + &[ISSUER, ctx.accounts.signer.key().as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + msg!("address {:?}", address); + let mut issuer_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + issuer_account.issuer_pubkey = ctx.accounts.signer.key(); + issuer_account.num_credentials_issued = 0; + + msg!( + "Created issuer account for pubkey: {}", + ctx.accounts.signer.key() + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(issuer_account)? + .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + /// Creates a new credential compressed account storing a pubkey + /// Requires a valid issuer account - only the issuer can create credentials + pub fn add_credential<'info>( + ctx: Context<'_, '_, '_, 'info, GenericAnchorAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + issuer_account_meta: CompressedAccountMeta, + credential_pubkey: Pubkey, + num_credentials_issued: u64, + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + + // Verify the issuer account - read it to ensure it exists and signer is the issuer + let mut issuer_account = LightAccount::::new_mut( + &crate::ID, + &issuer_account_meta, + IssuerAccount { + issuer_pubkey: ctx.accounts.signer.key(), + num_credentials_issued, + }, + )?; + + // Increment the credential counter + issuer_account.num_credentials_issued = issuer_account + .num_credentials_issued + .checked_add(1) + .ok_or(ProgramError::ArithmeticOverflow)?; + + let (address, address_seed) = derive_address( + &[CREDENTIAL, credential_pubkey.as_ref()], + &address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?, + &crate::ID, + ); + + let mut credential_account = LightAccountPoseidon::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + + credential_account.issuer = ctx.accounts.signer.key(); + credential_account.credential_pubkey = CredentialPubkey::new(credential_pubkey); + + msg!( + "Created credential account for pubkey: {} (issuer credential count: {})", + credential_pubkey, + issuer_account.num_credentials_issued + ); + + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_light_account(issuer_account)? + .with_light_account_poseidon(credential_account)? + .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } + + /// Verifies a ZK proof of credential ownership and creates an encrypted event account. + pub fn zk_verify_credential<'info>( + ctx: Context<'_, '_, '_, 'info, VerifyAccounts<'info>>, + proof: ValidityProof, + address_tree_info: PackedAddressTreeInfo, + output_state_tree_index: u8, + input_root_index: u16, + public_data: Vec, + credential_proof: CompressedProof, + issuer: [u8; 32], + nullifier: [u8; 32], + verification_id: [u8; 31], + ) -> Result<()> { + let light_cpi_accounts = CpiAccounts::new( + ctx.accounts.signer.as_ref(), + ctx.remaining_accounts, + crate::LIGHT_CPI_SIGNER, + ); + let address_pubkey = address_tree_info + .get_tree_pubkey(&light_cpi_accounts) + .map_err(|_| ErrorCode::AccountNotEnoughKeys)?; + + if address_pubkey.to_bytes() != light_sdk::constants::ADDRESS_TREE_V2 { + msg!("Invalid address tree"); + return Err(ProgramError::InvalidAccountData.into()); + } + + let (address, address_seed) = derive_address( + &[ + ZK_ID_CHECK, + nullifier.as_slice(), + verification_id.as_slice(), + ], + &address_pubkey, + &crate::ID, + ); + + // Get root from input Merkle tree (example of reading on-chain state) + let expected_root = read_state_merkle_tree_root( + &ctx.accounts.input_merkle_tree.to_account_info(), + input_root_index, + ) + .map_err(|e| ProgramError::from(e))?; + + let merkle_tree_pubkey = ctx.accounts.input_merkle_tree.key(); + let merkle_tree_hashed = + hashv_to_bn254_field_size_be_const_array::<2>(&[&merkle_tree_pubkey.to_bytes()]) + .unwrap(); + + let mut discriminator = [0u8; 32]; + discriminator[24..].copy_from_slice(CredentialAccount::LIGHT_DISCRIMINATOR_SLICE); + let issuer_hashed = hashv_to_bn254_field_size_be_const_array::<2>(&[&issuer]).unwrap(); + let account_owner_hashed = + hashv_to_bn254_field_size_be_const_array::<2>(&[&crate::ID.to_bytes()]).unwrap(); + + let mut event_account = LightAccount::::new_init( + &crate::ID, + Some(address), + output_state_tree_index, + ); + event_account.data = public_data; + + let event_account_info = event_account + .to_output_compressed_account_with_packed_context(None)? + .unwrap(); + { + // Construct public inputs array for the circuit + // Order MUST match the circuit's public declaration exactly: + // owner_hashed, merkle_tree_hashed, discriminator, issuer_hashed, expectedRoot, public_encrypted_data_hash, public_data_hash + let mut padded_verification_id = [0u8; 32]; + padded_verification_id[1..].copy_from_slice(&verification_id); + + let public_inputs: [[u8; 32]; 8] = [ + account_owner_hashed, + merkle_tree_hashed, + discriminator, + issuer_hashed, + expected_root, + padded_verification_id, + event_account_info + .compressed_account + .data + .as_ref() + .unwrap() + .data_hash, // This is public_encrypted_data_hash + nullifier, + ]; + msg!("public_inputs {:?}", public_inputs); + + let proof_a = decompress_g1(&credential_proof.a).map_err(|e| { + let code: u32 = e.into(); + Error::from(ProgramError::Custom(code)) + })?; + + let proof_b = decompress_g2(&credential_proof.b).map_err(|e| { + let code: u32 = e.into(); + Error::from(ProgramError::Custom(code)) + })?; + let proof_c = decompress_g1(&credential_proof.c).map_err(|e| { + let code: u32 = e.into(); + Error::from(ProgramError::Custom(code)) + })?; + + // Verify the Groth16 proof + let mut verifier = Groth16Verifier::new( + &proof_a, + &proof_b, + &proof_c, + &public_inputs, + &crate::verifying_key::VERIFYINGKEY, + ) + .map_err(|e| { + let code: u32 = e.into(); + Error::from(ProgramError::Custom(code)) + })?; + + verifier.verify().map_err(|e| { + let code: u32 = e.into(); + Error::from(ProgramError::Custom(code)) + })?; + } + LightSystemProgramCpi::new_cpi(LIGHT_CPI_SIGNER, proof) + .with_output_compressed_accounts(&[event_account_info]) + .with_new_addresses(&[address_tree_info.into_new_address_params_packed(address_seed)]) + .invoke(light_cpi_accounts)?; + + Ok(()) + } +} + +#[derive(Accounts)] +pub struct GenericAnchorAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, +} +#[derive(Accounts)] +pub struct VerifyAccounts<'info> { + #[account(mut)] + pub signer: Signer<'info>, + /// CHECK: read_state_merkle_tree_root checks account owner, and discriminator + pub input_merkle_tree: UncheckedAccount<'info>, +} + +#[derive( + Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator, LightHasher, +)] +pub struct CredentialAccount { + #[hash] + pub issuer: Pubkey, + /// CredentialPubkey (is a Poseidon hash -> no need to annotate with #[hash]) + pub credential_pubkey: CredentialPubkey, +} + +#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator)] +pub struct CredentialPubkey { + pub credential_pubkey: Pubkey, +} + +impl CredentialPubkey { + pub fn new(credential_pubkey: Pubkey) -> Self { + Self { credential_pubkey } + } +} + +// ToByteArray is required by LightHasher and not implemented for Pubkey or [u8;32], +// so we implement it here for CredentialPubkey. +impl ToByteArray for CredentialPubkey { + const NUM_FIELDS: usize = 1; + fn to_byte_array(&self) -> std::result::Result<[u8; 32], HasherError> { + Ok(self.credential_pubkey.to_bytes()) + } +} + +#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator)] +pub struct EncryptedEventAccount { + pub data: Vec, +} + +#[derive(Clone, Debug, Default, BorshSerialize, BorshDeserialize, LightDiscriminator)] +pub struct IssuerAccount { + pub issuer_pubkey: Pubkey, + pub num_credentials_issued: u64, +} + +#[error_code] +pub enum ErrorCode { + #[msg("Invalid issuer: signer is not the issuer of this account")] + InvalidIssuer, + #[msg("Not enough keys in remaining accounts")] + AccountNotEnoughKeys, +} +``` diff --git a/.context/program-examples-mdx/zk-id/src/verifying_key-rs.mdx b/.context/program-examples-mdx/zk-id/src/verifying_key-rs.mdx new file mode 100644 index 00000000..72270408 --- /dev/null +++ b/.context/program-examples-mdx/zk-id/src/verifying_key-rs.mdx @@ -0,0 +1,32 @@ +--- +title: "zk-id/src/verifying_key.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/src/verifying_key.rs" +--- + +```rust +use groth16_solana::groth16::Groth16Verifyingkey; + +pub const VERIFYINGKEY: Groth16Verifyingkey = Groth16Verifyingkey { + nr_pubinputs: 8, + + vk_alpha_g1: [45u8, 77u8, 154u8, 167u8, 227u8, 2u8, 217u8, 223u8, 65u8, 116u8, 157u8, 85u8, 7u8, 148u8, 157u8, 5u8, 219u8, 234u8, 51u8, 251u8, 177u8, 108u8, 100u8, 59u8, 34u8, 245u8, 153u8, 162u8, 190u8, 109u8, 242u8, 226u8, 20u8, 190u8, 221u8, 80u8, 60u8, 55u8, 206u8, 176u8, 97u8, 216u8, 236u8, 96u8, 32u8, 159u8, 227u8, 69u8, 206u8, 137u8, 131u8, 10u8, 25u8, 35u8, 3u8, 1u8, 240u8, 118u8, 202u8, 255u8, 0u8, 77u8, 25u8, 38u8], + + vk_beta_g2: [9u8, 103u8, 3u8, 47u8, 203u8, 247u8, 118u8, 209u8, 175u8, 201u8, 133u8, 248u8, 136u8, 119u8, 241u8, 130u8, 211u8, 132u8, 128u8, 166u8, 83u8, 242u8, 222u8, 202u8, 169u8, 121u8, 76u8, 188u8, 59u8, 243u8, 6u8, 12u8, 14u8, 24u8, 120u8, 71u8, 173u8, 76u8, 121u8, 131u8, 116u8, 208u8, 214u8, 115u8, 43u8, 245u8, 1u8, 132u8, 125u8, 214u8, 139u8, 192u8, 224u8, 113u8, 36u8, 30u8, 2u8, 19u8, 188u8, 127u8, 193u8, 61u8, 183u8, 171u8, 48u8, 76u8, 251u8, 209u8, 224u8, 138u8, 112u8, 74u8, 153u8, 245u8, 232u8, 71u8, 217u8, 63u8, 140u8, 60u8, 170u8, 253u8, 222u8, 196u8, 107u8, 122u8, 13u8, 55u8, 157u8, 166u8, 154u8, 77u8, 17u8, 35u8, 70u8, 167u8, 23u8, 57u8, 193u8, 177u8, 164u8, 87u8, 168u8, 199u8, 49u8, 49u8, 35u8, 210u8, 77u8, 47u8, 145u8, 146u8, 248u8, 150u8, 183u8, 198u8, 62u8, 234u8, 5u8, 169u8, 213u8, 127u8, 6u8, 84u8, 122u8, 208u8, 206u8, 200u8], + + vk_gamma_g2: [25u8, 142u8, 147u8, 147u8, 146u8, 13u8, 72u8, 58u8, 114u8, 96u8, 191u8, 183u8, 49u8, 251u8, 93u8, 37u8, 241u8, 170u8, 73u8, 51u8, 53u8, 169u8, 231u8, 18u8, 151u8, 228u8, 133u8, 183u8, 174u8, 243u8, 18u8, 194u8, 24u8, 0u8, 222u8, 239u8, 18u8, 31u8, 30u8, 118u8, 66u8, 106u8, 0u8, 102u8, 94u8, 92u8, 68u8, 121u8, 103u8, 67u8, 34u8, 212u8, 247u8, 94u8, 218u8, 221u8, 70u8, 222u8, 189u8, 92u8, 217u8, 146u8, 246u8, 237u8, 9u8, 6u8, 137u8, 208u8, 88u8, 95u8, 240u8, 117u8, 236u8, 158u8, 153u8, 173u8, 105u8, 12u8, 51u8, 149u8, 188u8, 75u8, 49u8, 51u8, 112u8, 179u8, 142u8, 243u8, 85u8, 172u8, 218u8, 220u8, 209u8, 34u8, 151u8, 91u8, 18u8, 200u8, 94u8, 165u8, 219u8, 140u8, 109u8, 235u8, 74u8, 171u8, 113u8, 128u8, 141u8, 203u8, 64u8, 143u8, 227u8, 209u8, 231u8, 105u8, 12u8, 67u8, 211u8, 123u8, 76u8, 230u8, 204u8, 1u8, 102u8, 250u8, 125u8, 170u8], + + vk_delta_g2: [18u8, 121u8, 252u8, 116u8, 57u8, 146u8, 135u8, 55u8, 76u8, 216u8, 32u8, 99u8, 90u8, 190u8, 47u8, 86u8, 142u8, 5u8, 0u8, 150u8, 64u8, 52u8, 8u8, 76u8, 81u8, 168u8, 167u8, 112u8, 145u8, 30u8, 90u8, 193u8, 33u8, 29u8, 41u8, 76u8, 197u8, 99u8, 176u8, 15u8, 39u8, 64u8, 99u8, 117u8, 126u8, 120u8, 150u8, 181u8, 64u8, 83u8, 210u8, 124u8, 166u8, 97u8, 207u8, 179u8, 230u8, 88u8, 80u8, 31u8, 114u8, 91u8, 162u8, 233u8, 41u8, 141u8, 67u8, 185u8, 17u8, 208u8, 78u8, 222u8, 30u8, 185u8, 44u8, 105u8, 219u8, 233u8, 28u8, 7u8, 115u8, 153u8, 29u8, 218u8, 3u8, 35u8, 210u8, 31u8, 73u8, 35u8, 203u8, 187u8, 237u8, 243u8, 174u8, 70u8, 2u8, 87u8, 122u8, 51u8, 171u8, 124u8, 39u8, 146u8, 1u8, 95u8, 128u8, 84u8, 35u8, 207u8, 132u8, 170u8, 123u8, 176u8, 119u8, 218u8, 228u8, 128u8, 232u8, 94u8, 233u8, 58u8, 238u8, 63u8, 45u8, 72u8, 214u8, 134u8], + + vk_ic: &[ + [4u8, 90u8, 11u8, 151u8, 69u8, 42u8, 114u8, 212u8, 111u8, 206u8, 148u8, 104u8, 65u8, 166u8, 159u8, 137u8, 171u8, 77u8, 109u8, 234u8, 55u8, 135u8, 203u8, 77u8, 67u8, 182u8, 227u8, 12u8, 202u8, 57u8, 113u8, 13u8, 31u8, 182u8, 157u8, 8u8, 213u8, 65u8, 49u8, 36u8, 11u8, 76u8, 223u8, 18u8, 37u8, 52u8, 113u8, 212u8, 75u8, 181u8, 222u8, 133u8, 170u8, 168u8, 234u8, 180u8, 8u8, 110u8, 4u8, 142u8, 160u8, 230u8, 138u8, 175u8], + [6u8, 192u8, 93u8, 129u8, 197u8, 162u8, 187u8, 150u8, 148u8, 208u8, 31u8, 46u8, 2u8, 172u8, 162u8, 123u8, 81u8, 134u8, 38u8, 219u8, 60u8, 57u8, 111u8, 148u8, 4u8, 41u8, 227u8, 248u8, 38u8, 44u8, 175u8, 139u8, 18u8, 142u8, 35u8, 202u8, 131u8, 3u8, 138u8, 65u8, 62u8, 66u8, 141u8, 192u8, 137u8, 210u8, 64u8, 58u8, 168u8, 102u8, 162u8, 204u8, 187u8, 123u8, 225u8, 235u8, 197u8, 167u8, 28u8, 220u8, 183u8, 101u8, 16u8, 186u8], + [8u8, 193u8, 155u8, 163u8, 55u8, 80u8, 175u8, 199u8, 139u8, 31u8, 49u8, 208u8, 96u8, 144u8, 174u8, 203u8, 159u8, 203u8, 160u8, 63u8, 135u8, 45u8, 19u8, 133u8, 103u8, 80u8, 186u8, 102u8, 152u8, 180u8, 149u8, 18u8, 4u8, 33u8, 182u8, 15u8, 44u8, 189u8, 0u8, 113u8, 253u8, 12u8, 194u8, 38u8, 183u8, 95u8, 252u8, 215u8, 178u8, 46u8, 83u8, 195u8, 153u8, 75u8, 202u8, 14u8, 111u8, 237u8, 198u8, 161u8, 107u8, 187u8, 4u8, 55u8], + [46u8, 10u8, 95u8, 159u8, 76u8, 32u8, 246u8, 107u8, 45u8, 235u8, 75u8, 185u8, 109u8, 26u8, 24u8, 129u8, 208u8, 166u8, 64u8, 199u8, 252u8, 251u8, 193u8, 51u8, 19u8, 88u8, 156u8, 129u8, 114u8, 120u8, 122u8, 32u8, 26u8, 83u8, 113u8, 210u8, 130u8, 243u8, 39u8, 58u8, 142u8, 18u8, 194u8, 142u8, 184u8, 158u8, 199u8, 113u8, 196u8, 191u8, 215u8, 77u8, 21u8, 66u8, 162u8, 25u8, 201u8, 70u8, 148u8, 247u8, 173u8, 199u8, 191u8, 131u8], + [16u8, 79u8, 230u8, 65u8, 92u8, 49u8, 63u8, 19u8, 215u8, 29u8, 92u8, 70u8, 81u8, 252u8, 134u8, 116u8, 115u8, 66u8, 113u8, 76u8, 135u8, 44u8, 188u8, 236u8, 72u8, 218u8, 47u8, 3u8, 70u8, 85u8, 130u8, 31u8, 40u8, 64u8, 154u8, 29u8, 126u8, 193u8, 234u8, 223u8, 171u8, 140u8, 166u8, 217u8, 211u8, 241u8, 59u8, 3u8, 20u8, 217u8, 110u8, 174u8, 132u8, 10u8, 42u8, 121u8, 87u8, 169u8, 37u8, 10u8, 59u8, 167u8, 194u8, 106u8], + [44u8, 136u8, 199u8, 234u8, 22u8, 38u8, 206u8, 40u8, 223u8, 162u8, 56u8, 216u8, 109u8, 180u8, 5u8, 169u8, 225u8, 203u8, 204u8, 153u8, 140u8, 24u8, 116u8, 123u8, 68u8, 220u8, 131u8, 114u8, 20u8, 137u8, 230u8, 148u8, 28u8, 10u8, 38u8, 24u8, 120u8, 216u8, 11u8, 243u8, 26u8, 233u8, 114u8, 85u8, 0u8, 3u8, 27u8, 225u8, 6u8, 41u8, 85u8, 100u8, 148u8, 97u8, 60u8, 23u8, 64u8, 255u8, 64u8, 34u8, 115u8, 81u8, 235u8, 103u8], + [12u8, 140u8, 197u8, 237u8, 220u8, 64u8, 41u8, 28u8, 134u8, 122u8, 249u8, 54u8, 127u8, 114u8, 225u8, 243u8, 158u8, 245u8, 164u8, 146u8, 238u8, 253u8, 73u8, 44u8, 88u8, 253u8, 220u8, 76u8, 231u8, 243u8, 190u8, 184u8, 36u8, 187u8, 51u8, 119u8, 254u8, 79u8, 21u8, 186u8, 122u8, 154u8, 95u8, 191u8, 113u8, 194u8, 251u8, 85u8, 139u8, 247u8, 155u8, 237u8, 48u8, 185u8, 216u8, 105u8, 206u8, 92u8, 120u8, 26u8, 188u8, 117u8, 69u8, 224u8], + [23u8, 119u8, 153u8, 159u8, 239u8, 111u8, 103u8, 220u8, 195u8, 184u8, 64u8, 255u8, 239u8, 78u8, 188u8, 37u8, 193u8, 254u8, 226u8, 63u8, 140u8, 124u8, 70u8, 6u8, 188u8, 113u8, 37u8, 98u8, 54u8, 10u8, 182u8, 37u8, 9u8, 31u8, 241u8, 30u8, 102u8, 7u8, 244u8, 97u8, 153u8, 56u8, 35u8, 254u8, 73u8, 54u8, 161u8, 123u8, 61u8, 157u8, 48u8, 66u8, 4u8, 31u8, 143u8, 142u8, 86u8, 58u8, 162u8, 156u8, 144u8, 116u8, 170u8, 85u8], + [6u8, 144u8, 191u8, 115u8, 83u8, 103u8, 157u8, 108u8, 167u8, 218u8, 159u8, 203u8, 111u8, 63u8, 129u8, 144u8, 213u8, 133u8, 69u8, 22u8, 56u8, 228u8, 46u8, 147u8, 0u8, 70u8, 96u8, 85u8, 157u8, 72u8, 158u8, 141u8, 33u8, 26u8, 152u8, 246u8, 49u8, 94u8, 88u8, 145u8, 79u8, 194u8, 171u8, 203u8, 149u8, 178u8, 138u8, 241u8, 125u8, 228u8, 29u8, 39u8, 74u8, 210u8, 195u8, 67u8, 43u8, 35u8, 149u8, 225u8, 94u8, 140u8, 224u8, 83u8], + ] +}; +``` diff --git a/.context/program-examples-mdx/zk-id/tests/circuit-rs.mdx b/.context/program-examples-mdx/zk-id/tests/circuit-rs.mdx new file mode 100644 index 00000000..8e1f7eee --- /dev/null +++ b/.context/program-examples-mdx/zk-id/tests/circuit-rs.mdx @@ -0,0 +1,464 @@ +--- +title: "zk-id/tests/circuit.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/tests/circuit.rs" +--- + +```rust +use circom_prover::{prover::ProofLib, witness::WitnessFn, CircomProver}; +use groth16_solana::groth16::Groth16Verifier; +use groth16_solana::proof_parser::circom_prover::{convert_proof, convert_public_inputs}; +use light_compressed_account::compressed_account::{CompressedAccount, CompressedAccountData}; +use light_compressed_account::Pubkey; +use light_hasher::{ + hash_to_field_size::{hash_to_bn254_field_size_be, hashv_to_bn254_field_size_be_const_array}, + Hasher, Poseidon, Sha256, +}; +use light_merkle_tree_reference::MerkleTree; +use num_bigint::BigUint; +use solana_sdk::signature::{Keypair, Signer}; +use std::collections::HashMap; + +// Link the generated witness library +#[link(name = "circuit", kind = "static")] +extern "C" {} + +rust_witness::witness!(compressedaccountmerkleproof); + +// Use the verifying key from the library +use zk_id::verifying_key::VERIFYINGKEY; + +/// Derives a credential keypair from a Solana keypair +/// The private key is derived by signing "CREDENTIAL" and truncating to 248 bits +/// The public key is Poseidon(private_key) +#[derive(Debug, Clone)] +struct CredentialKeypair { + pub private_key: [u8; 32], // 248 bits + pub public_key: [u8; 32], // Poseidon hash of private key +} + +impl CredentialKeypair { + pub fn new(solana_keypair: &Keypair) -> Self { + // Sign the message "CREDENTIAL" with the Solana keypair + let message = b"CREDENTIAL"; + let signature = solana_keypair.sign_message(message); + + // Hash the signature to get entropy + let hashed = Sha256::hash(signature.as_ref()).unwrap(); + + // Truncate to 248 bits (31 bytes) for BN254 field compatibility + let mut private_key = [0u8; 32]; + private_key[1..32].copy_from_slice(&hashed[0..31]); + + let public_key = Poseidon::hashv(&[&private_key]).unwrap(); + + Self { + private_key, + public_key, + } + } + + /// Get the private key as a BigUint for circuit input + pub fn private_key_biguint(&self) -> BigUint { + BigUint::from_bytes_be(&self.private_key) + } + + /// Compute nullifier for a given verification_id + pub fn compute_nullifier(&self, verification_id: &[u8; 31]) -> [u8; 32] { + // Nullifier = Poseidon(verification_id, private_key) + // Both need to be padded to 32 bytes for Poseidon + let mut padded_verification = [0u8; 32]; + padded_verification[1..32].copy_from_slice(verification_id); + + Poseidon::hashv(&[&padded_verification, &self.private_key]).unwrap() + } +} + +/// Helper function to add compressed account inputs to the circuit inputs HashMap +/// +/// # Arguments +/// * `inputs` - Mutable reference to the HashMap that will be populated with circuit inputs +/// * `compressed_account` - The compressed account to convert to circuit inputs +/// * `merkle_tree_pubkey` - The public key of the Merkle tree +/// * `leaf_index` - The index of the leaf in the Merkle tree +/// * `issuer_pubkey` - The issuer's public key +/// * `credential` - The credential keypair (contains private key and public key commitment) +/// * `verification_id` - The verification context (31 bytes) +/// * `encrypted_data` - The encrypted data +fn add_compressed_account_to_circuit_inputs( + inputs: &mut HashMap>, + compressed_account: &CompressedAccount, + merkle_tree_pubkey: &Pubkey, + leaf_index: u32, + issuer_pubkey: &Pubkey, + credential: &CredentialKeypair, + verification_id: &[u8; 31], + encrypted_data: &[u8], +) { + // Extract data from compressed account + let owner = compressed_account.owner; + let discriminator = if let Some(ref data) = compressed_account.data { + data.discriminator + } else { + [0u8; 8] + }; + + // Hash values for circuit - use 2-round hash like on-chain + let owner_hashed = hash_to_bn254_field_size_be(owner.as_ref()); + let merkle_tree_hashed = hash_to_bn254_field_size_be(merkle_tree_pubkey.as_ref()); + let issuer_hashed = + hashv_to_bn254_field_size_be_const_array::<2>(&[issuer_pubkey.as_ref()]).unwrap(); + + // Hash encrypted_data with SHA256 and truncate (set first byte to 0) + // Include length prefix like in the main test + let mut hash_input = Vec::new(); + hash_input.extend_from_slice((encrypted_data.len() as u32).to_le_bytes().as_ref()); + hash_input.extend_from_slice(encrypted_data); + let mut encrypted_data_hash = Sha256::hash(&hash_input).unwrap(); + encrypted_data_hash[0] = 0; + + // Compute nullifier using credential private key and verification_id + let nullifier = credential.compute_nullifier(verification_id); + + // Add all inputs to the HashMap + inputs.insert( + "owner_hashed".to_string(), + vec![BigUint::from_bytes_be(&owner_hashed).to_string()], + ); + inputs.insert("leaf_index".to_string(), vec![leaf_index.to_string()]); + + // Add account_leaf_index (same format as SDK: 32-byte array with value at [28..32] in LE) + let mut account_leaf_index_bytes = [0u8; 32]; + account_leaf_index_bytes[28..32].copy_from_slice(&(leaf_index as u32).to_le_bytes()); + inputs.insert( + "account_leaf_index".to_string(), + vec![BigUint::from_bytes_be(&account_leaf_index_bytes).to_string()], + ); + + // Add address field - use the address from the compressed account + let address = compressed_account.address.unwrap_or([0u8; 32]); + inputs.insert( + "address".to_string(), + vec![BigUint::from_bytes_be(&address).to_string()], + ); + + inputs.insert( + "merkle_tree_hashed".to_string(), + vec![BigUint::from_bytes_be(&merkle_tree_hashed).to_string()], + ); + inputs.insert( + "discriminator".to_string(), + vec![BigUint::from_bytes_be(&discriminator).to_string()], + ); + inputs.insert( + "issuer_hashed".to_string(), + vec![BigUint::from_bytes_be(&issuer_hashed).to_string()], + ); + + // Add credential private key (private input) + inputs.insert( + "credentialPrivateKey".to_string(), + vec![credential.private_key_biguint().to_string()], + ); + + // Add verification_id (public input) - pad to 32 bytes + let mut padded_verification = [0u8; 32]; + padded_verification[1..32].copy_from_slice(verification_id); + inputs.insert( + "verification_id".to_string(), + vec![BigUint::from_bytes_be(&padded_verification).to_string()], + ); + + inputs.insert( + "encrypted_data_hash".to_string(), + vec![BigUint::from_bytes_be(&encrypted_data_hash).to_string()], + ); + inputs.insert( + "public_encrypted_data_hash".to_string(), + vec![BigUint::from_bytes_be(&encrypted_data_hash).to_string()], + ); + + // Add nullifier (public output) + inputs.insert( + "nullifier".to_string(), + vec![BigUint::from_bytes_be(&nullifier).to_string()], + ); +} + +/// Helper function to add Merkle proof inputs to the circuit inputs HashMap +/// +/// # Arguments +/// * `inputs` - Mutable reference to the HashMap that will be populated with circuit inputs +/// * `merkle_proof_hashes` - Vector of Merkle proof path elements (32-byte hashes) +/// * `merkle_root` - The expected Merkle root (32-byte hash) +fn add_merkle_proof_to_circuit_inputs( + inputs: &mut HashMap>, + merkle_proof_hashes: &[[u8; 32]], + merkle_root: &[u8; 32], +) { + // Convert Merkle proof path elements to BigUint strings + let path_elements: Vec = merkle_proof_hashes + .iter() + .map(|hash| BigUint::from_bytes_be(hash).to_string()) + .collect(); + inputs.insert("pathElements".to_string(), path_elements); + + // Convert expected root to BigUint string + let expected_root_bigint = BigUint::from_bytes_be(merkle_root); + inputs.insert( + "expectedRoot".to_string(), + vec![expected_root_bigint.to_string()], + ); +} + +#[test] +fn test_compressed_account_merkle_proof_circuit() { + let zkey_path = "./build/compressed_account_merkle_proof_final.zkey".to_string(); + + // Create test data + let owner = Pubkey::new_from_array([1u8; 32]); + let merkle_tree_pubkey = Pubkey::new_from_array([2u8; 32]); + let leaf_index: u32 = 0; + let issuer_pubkey = Pubkey::new_from_array([4u8; 32]); + + // Create credential keypair + let user_keypair = Keypair::new(); + let credential = CredentialKeypair::new(&user_keypair); + + let encrypted_data = vec![6u8; 64]; + let mut address = [3u8; 32]; + address[0] = 0; // Ensure first byte is 0 + + // Create verification_id (31 bytes) + let verification_id = [7u8; 31]; + + // Compute data_hash as hash of issuer and credential commitment + let issuer_hashed = + hashv_to_bn254_field_size_be_const_array::<2>(&[issuer_pubkey.as_ref()]).unwrap(); + let data_hash = Poseidon::hashv(&[issuer_hashed.as_slice(), &credential.public_key]).unwrap(); + + let compressed_account = CompressedAccount { + owner, + lamports: 0, + address: Some(address), + data: Some(CompressedAccountData { + discriminator: [1u8; 8], + data: vec![], + data_hash, + }), + }; + + // Create Merkle tree and get proof + let compressed_account_hash = compressed_account + .hash(&merkle_tree_pubkey, &leaf_index, false) + .unwrap(); + + let mut merkle_tree = MerkleTree::::new(26, 0); + merkle_tree.append(&compressed_account_hash).unwrap(); + + let merkle_proof_hashes = merkle_tree + .get_proof_of_leaf(leaf_index as usize, false) + .unwrap(); + let merkle_root = merkle_tree.root(); + + // Build circuit inputs + let mut proof_inputs = HashMap::new(); + add_compressed_account_to_circuit_inputs( + &mut proof_inputs, + &compressed_account, + &merkle_tree_pubkey, + leaf_index, + &issuer_pubkey, + &credential, + &verification_id, + &encrypted_data, + ); + add_merkle_proof_to_circuit_inputs(&mut proof_inputs, &merkle_proof_hashes, &merkle_root); + + // Generate and verify proof + let circuit_inputs = serde_json::to_string(&proof_inputs).unwrap(); + let proof = CircomProver::prove( + ProofLib::Arkworks, + WitnessFn::RustWitness(compressedaccountmerkleproof_witness), + circuit_inputs, + zkey_path.clone(), + ) + .expect("Proof generation failed"); + + let is_valid = CircomProver::verify(ProofLib::Arkworks, proof, zkey_path) + .expect("Proof verification failed"); + + assert!(is_valid, "Proof should be valid"); +} + +#[test] +fn test_invalid_proof_rejected() { + let zkey_path = "./build/compressed_account_merkle_proof_final.zkey".to_string(); + + // Create test data + let owner = Pubkey::new_from_array([1u8; 32]); + let merkle_tree_pubkey = Pubkey::new_from_array([2u8; 32]); + let leaf_index: u32 = 0; + let issuer_pubkey = Pubkey::new_from_array([4u8; 32]); + + // Create credential keypair + let user_keypair = Keypair::new(); + let credential = CredentialKeypair::new(&user_keypair); + + let encrypted_data = vec![6u8; 64]; + + // Create verification_id (31 bytes) + let mut verification_id = [7u8; 31]; + verification_id[0] = 0x0F; + + // Compute data_hash as hash of issuer and credential commitment + let issuer_hashed = + hashv_to_bn254_field_size_be_const_array::<2>(&[issuer_pubkey.as_ref()]).unwrap(); + let data_hash = Poseidon::hashv(&[issuer_hashed.as_slice(), &credential.public_key]).unwrap(); + + let compressed_account = CompressedAccount { + owner, + lamports: 0, + address: None, + data: Some(CompressedAccountData { + discriminator: [1u8; 8], + data: vec![], + data_hash, + }), + }; + + // Create Merkle tree and get proof + let compressed_account_hash = compressed_account + .hash(&merkle_tree_pubkey, &leaf_index, false) + .unwrap(); + + let mut merkle_tree = MerkleTree::::new(26, 0); + merkle_tree.append(&compressed_account_hash).unwrap(); + let merkle_proof_hashes = merkle_tree + .get_proof_of_leaf(leaf_index as usize, false) + .unwrap(); + + // Build circuit inputs with INVALID root + let mut proof_inputs = HashMap::new(); + add_compressed_account_to_circuit_inputs( + &mut proof_inputs, + &compressed_account, + &merkle_tree_pubkey, + leaf_index, + &issuer_pubkey, + &credential, + &verification_id, + &encrypted_data, + ); + + let invalid_root = [0u8; 32]; + add_merkle_proof_to_circuit_inputs(&mut proof_inputs, &merkle_proof_hashes, &invalid_root); + + // Generate proof (succeeds even with wrong root) + let circuit_inputs = serde_json::to_string(&proof_inputs).unwrap(); + let proof = CircomProver::prove( + ProofLib::Arkworks, + WitnessFn::RustWitness(compressedaccountmerkleproof_witness), + circuit_inputs, + zkey_path.clone(), + ) + .expect("Proof generation should succeed"); + + // Verify proof (should fail due to constraint violation) + let is_valid = CircomProver::verify(ProofLib::Arkworks, proof, zkey_path) + .expect("Verification should return a result"); + + assert!(!is_valid, "Proof should be invalid with wrong root"); +} + +#[test] +fn test_groth16_solana_verification() { + let zkey_path = "./build/compressed_account_merkle_proof_final.zkey".to_string(); + + // Create test data + let owner = Pubkey::new_from_array([1u8; 32]); + let merkle_tree_pubkey = Pubkey::new_from_array([2u8; 32]); + let leaf_index: u32 = 0; + let issuer_pubkey = Pubkey::new_from_array([4u8; 32]); + + // Create credential keypair + let user_keypair = Keypair::new(); + let credential = CredentialKeypair::new(&user_keypair); + + let encrypted_data = vec![6u8; 64]; + let mut address = [3u8; 32]; + address[0] = 0; // Ensure first byte is 0 + + // Create verification_id (31 bytes) + let verification_id = [7u8; 31]; + + // Compute data_hash as hash of issuer and credential commitment + let issuer_hashed = + hashv_to_bn254_field_size_be_const_array::<2>(&[issuer_pubkey.as_ref()]).unwrap(); + let data_hash = Poseidon::hashv(&[issuer_hashed.as_slice(), &credential.public_key]).unwrap(); + + let compressed_account = CompressedAccount { + owner, + lamports: 0, + address: Some(address), + data: Some(CompressedAccountData { + discriminator: [1u8; 8], + data: vec![], + data_hash, + }), + }; + + // Create Merkle tree and get proof + let compressed_account_hash = compressed_account + .hash(&merkle_tree_pubkey, &leaf_index, false) + .unwrap(); + + let mut merkle_tree = MerkleTree::::new(26, 0); + merkle_tree.append(&compressed_account_hash).unwrap(); + + let merkle_proof_hashes = merkle_tree + .get_proof_of_leaf(leaf_index as usize, false) + .unwrap(); + let merkle_root = merkle_tree.root(); + + // Build circuit inputs + let mut proof_inputs = HashMap::new(); + add_compressed_account_to_circuit_inputs( + &mut proof_inputs, + &compressed_account, + &merkle_tree_pubkey, + leaf_index, + &issuer_pubkey, + &credential, + &verification_id, + &encrypted_data, + ); + add_merkle_proof_to_circuit_inputs(&mut proof_inputs, &merkle_proof_hashes, &merkle_root); + + // Generate proof with circom-prover + let circuit_inputs = serde_json::to_string(&proof_inputs).unwrap(); + println!("circuit_inputs {:?}", circuit_inputs); + let proof = CircomProver::prove( + ProofLib::Arkworks, + WitnessFn::RustWitness(compressedaccountmerkleproof_witness), + circuit_inputs, + zkey_path.clone(), + ) + .expect("Proof generation failed"); + + // First verify with circom-prover + let is_valid_circom = CircomProver::verify(ProofLib::Arkworks, proof.clone(), zkey_path) + .expect("Circom verification failed"); + assert!(is_valid_circom, "Proof should be valid with circom-prover"); + + // Convert proof and public inputs to groth16-solana format + let (proof_a, proof_b, proof_c) = convert_proof(&proof.proof).expect("Failed to convert proof"); + let public_inputs: [[u8; 32]; 8] = convert_public_inputs(&proof.pub_inputs); + + // Verify with groth16-solana + let mut verifier = + Groth16Verifier::new(&proof_a, &proof_b, &proof_c, &public_inputs, &VERIFYINGKEY) + .expect("Failed to create verifier"); + + verifier.verify().expect("Groth16 verification failed"); +} +``` diff --git a/.context/program-examples-mdx/zk-id/tests/test-rs.mdx b/.context/program-examples-mdx/zk-id/tests/test-rs.mdx new file mode 100644 index 00000000..289dfa3d --- /dev/null +++ b/.context/program-examples-mdx/zk-id/tests/test-rs.mdx @@ -0,0 +1,639 @@ +--- +title: "zk-id/tests/test.rs" +description: "https://github.com/Lightprotocol/program-examples/blob/main/zk-id/tests/test.rs" +--- + +```rust +// #![cfg(feature = "test-sbf")] + +use anchor_lang::{InstructionData, ToAccountMetas}; +use circom_prover::{prover::ProofLib, witness::WitnessFn, CircomProver}; +use groth16_solana::proof_parser::circom_prover::convert_proof; +use light_client::indexer::CompressedAccount; +use light_hasher::{hash_to_field_size::hash_to_bn254_field_size_be, Hasher, Poseidon, Sha256}; +use light_program_test::{ + program_test::LightProgramTest, AddressWithTree, Indexer, ProgramTestConfig, Rpc, RpcError, +}; +use light_sdk::{ + address::v2::derive_address, + instruction::{PackedAccounts, SystemAccountMetaConfig}, +}; +use num_bigint::BigUint; +use solana_sdk::{ + instruction::Instruction, + pubkey::Pubkey, + signature::{Keypair, Signature, Signer}, +}; +use std::collections::HashMap; +use zk_id::{CREDENTIAL, ISSUER, ZK_ID_CHECK}; + +/// Derives a credential keypair from a Solana keypair +/// The private key is derived by signing "CREDENTIAL" and truncating to 248 bits +/// The public key is Poseidon(private_key) +#[derive(Debug, Clone)] +struct CredentialKeypair { + pub private_key: [u8; 32], // 248 bits + pub public_key: [u8; 32], // Poseidon hash of private key +} + +impl CredentialKeypair { + pub fn new(solana_keypair: &Keypair) -> Self { + // Sign the message "CREDENTIAL" with the Solana keypair + let message = b"CREDENTIAL"; + let signature = solana_keypair.sign_message(message); + + // Hash the signature to get entropy + let hashed = Sha256::hash(signature.as_ref()).unwrap(); + + // Truncate to 248 bits (31 bytes) for BN254 field compatibility + let mut private_key = [0u8; 32]; + private_key[1..32].copy_from_slice(&hashed[0..31]); + + let public_key = Poseidon::hashv(&[&private_key]).unwrap(); + + Self { + private_key, + public_key, + } + } + + /// Compute nullifier for a given verification_id + pub fn compute_nullifier(&self, verification_id: &[u8; 31]) -> [u8; 32] { + // Nullifier = Poseidon(verification_id, private_key) + // Both need to be padded to 32 bytes for Poseidon + let mut padded_verification = [0u8; 32]; + padded_verification[1..32].copy_from_slice(verification_id); + + Poseidon::hashv(&[&padded_verification, &self.private_key]).unwrap() + } +} + +// Link the generated witness library +#[link(name = "circuit", kind = "static")] +extern "C" {} + +rust_witness::witness!(compressedaccountmerkleproof); + +#[tokio::test] +async fn test_create_issuer_and_add_credential() { + let config = ProgramTestConfig::new(true, Some(vec![("zk_id", zk_id::ID)])); + let mut rpc = LightProgramTest::new(config).await.unwrap(); + let payer = rpc.get_payer().insecure_clone(); + + let address_tree_info = rpc.get_address_tree_v2(); + + let (issuer_address, _) = derive_address( + &[ISSUER, payer.pubkey().as_ref()], + &address_tree_info.tree, + &zk_id::ID, + ); + println!("issuer_address {:?}", issuer_address); + // Step 1: Create the issuer account + create_issuer(&mut rpc, &payer, &issuer_address, address_tree_info.clone()) + .await + .unwrap(); + + // Verify the issuer account was created + let issuer_accounts = rpc + .get_compressed_accounts_by_owner(&zk_id::ID, None, None) + .await + .unwrap(); + assert_eq!(issuer_accounts.value.items.len(), 1); + let issuer_account = &issuer_accounts.value.items[0]; + + println!("Created issuer account for pubkey: {}", payer.pubkey()); + + // Step 2: Create a credential account + // Create a credential keypair for the user + let user_keypair = Keypair::new(); + let credential = CredentialKeypair::new(&user_keypair); + + // Use the credential commitment as the "pubkey" for address derivation + let (credential_address, _) = derive_address( + &[CREDENTIAL, credential.public_key.as_ref()], + &address_tree_info.tree, + &zk_id::ID, + ); + + add_credential( + &mut rpc, + &payer, + &credential_address, + address_tree_info.clone(), + issuer_account, + credential.public_key, + ) + .await + .unwrap(); + + // Verify both accounts exist now (issuer + credential) + let program_compressed_accounts = rpc + .get_compressed_accounts_by_owner(&zk_id::ID, None, None) + .await + .unwrap(); + + assert_eq!(program_compressed_accounts.value.items.len(), 2); + println!( + "program_compressed_accounts.value.items {:?}", + program_compressed_accounts.value.items + ); + + println!( + "Successfully created credential account with public_key: {:?}", + credential.public_key + ); + + // Step 3: Verify the credential with ZK proof + let credential_account = rpc + .get_compressed_account(credential_address, None) + .await + .unwrap() + .value + .expect("Credential account not found"); + println!("credential_account {:?}", credential_account); + verify_credential( + &mut rpc, + &payer, + &credential_account, + address_tree_info, + &user_keypair, + ) + .await + .unwrap(); + + println!("Successfully verified credential with ZK proof!"); + + // Verify event account was created + let final_compressed_accounts = rpc + .get_compressed_accounts_by_owner(&zk_id::ID, None, None) + .await + .unwrap(); + + assert_eq!(final_compressed_accounts.value.items.len(), 3); +} + +async fn create_issuer( + rpc: &mut R, + payer: &Keypair, + address: &[u8; 32], + address_tree_info: light_client::indexer::TreeInfo, +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(zk_id::ID); + remaining_accounts.add_system_accounts(config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + let packed_address_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .address_trees; + let output_state_tree_index = rpc + .get_random_state_tree_info()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let instruction_data = zk_id::instruction::CreateIssuer { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_accounts[0], + output_state_tree_index, + }; + + let accounts = zk_id::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let instruction = Instruction { + program_id: zk_id::ID, + accounts: [ + accounts.to_account_metas(None), + remaining_accounts.to_account_metas().0, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn add_credential( + rpc: &mut R, + payer: &Keypair, + address: &[u8; 32], + address_tree_info: light_client::indexer::TreeInfo, + issuer_account: &CompressedAccount, + credential_commitment: [u8; 32], +) -> Result +where + R: Rpc + Indexer, +{ + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(zk_id::ID); + remaining_accounts.add_system_accounts(config)?; + + let rpc_result = rpc + .get_validity_proof( + vec![issuer_account.hash], + vec![AddressWithTree { + address: *address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + + let packed_tree_accounts = rpc_result.pack_tree_infos(&mut remaining_accounts); + let packed_state_tree_accounts = packed_tree_accounts.state_trees.unwrap(); + let packed_address_tree_accounts = packed_tree_accounts.address_trees; + + // Create the issuer account meta manually + let issuer_account_meta = light_sdk::instruction::account_meta::CompressedAccountMeta { + tree_info: packed_state_tree_accounts.packed_tree_infos[0], + address: issuer_account.address.unwrap(), + output_state_tree_index: packed_state_tree_accounts.output_tree_index, + }; + + let output_state_tree_index = rpc + .get_random_state_tree_info_v1()? + .pack_output_tree_index(&mut remaining_accounts)?; + + // Parse the issuer account data to get num_credentials_issued + let issuer_data = issuer_account.data.as_ref().unwrap(); + let issuer_account_parsed: zk_id::IssuerAccount = + anchor_lang::AnchorDeserialize::deserialize(&mut issuer_data.data.as_slice()).unwrap(); + + let instruction_data = zk_id::instruction::AddCredential { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_accounts[0], + output_state_tree_index, + issuer_account_meta, + credential_pubkey: Pubkey::new_from_array(credential_commitment), + num_credentials_issued: issuer_account_parsed.num_credentials_issued, + }; + + let accounts = zk_id::accounts::GenericAnchorAccounts { + signer: payer.pubkey(), + }; + + let instruction = Instruction { + program_id: zk_id::ID, + accounts: [ + accounts.to_account_metas(None), + remaining_accounts.to_account_metas().0, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +async fn verify_credential( + rpc: &mut R, + payer: &Keypair, + credential_account: &CompressedAccount, + address_tree_info: light_client::indexer::TreeInfo, + user_keypair: &Keypair, +) -> Result +where + R: Rpc + Indexer, +{ + // Get the merkle proof for the credential account + let proofs = rpc + .get_multiple_compressed_account_proofs(vec![credential_account.hash], None) + .await? + .value + .items; + + let merkle_proof = &proofs[0]; + let leaf_index = merkle_proof.leaf_index as u32; + let merkle_proof_hashes = &merkle_proof.proof; + let merkle_root = merkle_proof.root; + let root_index = (merkle_proof.root_seq % 2400) as u16; + + let state_tree = merkle_proof.merkle_tree; + + // Parse the credential account data + let credential_data = credential_account.data.as_ref().unwrap(); + let credential_account_parsed: zk_id::CredentialAccount = + anchor_lang::AnchorDeserialize::deserialize(&mut credential_data.data.as_slice()).unwrap(); + + // Generate encrypted data (in a real scenario, this would be user-provided) + let encrypted_data = vec![42u8; 64]; + + // Create the credential keypair from the user keypair + let credential = CredentialKeypair::new(user_keypair); + + // Generate a verification_id (31 bytes) + let mut verification_id = [0u8; 31]; + let random_pubkey = Pubkey::new_unique(); + verification_id.copy_from_slice(&random_pubkey.to_bytes()[0..31]); + + // Generate the ZK proof using the actual merkle root + let (credential_proof, nullifier) = generate_credential_proof( + credential_account, + &state_tree, + leaf_index, + &merkle_proof_hashes, + &merkle_root, + &credential_account_parsed.issuer, + &credential, + &encrypted_data, + &verification_id, + ); + + // Create the verification transaction + let mut remaining_accounts = PackedAccounts::default(); + let config = SystemAccountMetaConfig::new(zk_id::ID); + remaining_accounts.add_system_accounts(config)?; + + let (event_address, _) = derive_address( + &[ + ZK_ID_CHECK, + nullifier.as_slice(), + verification_id.as_slice(), + ], + &address_tree_info.tree, + &zk_id::ID, + ); + + let rpc_result = rpc + .get_validity_proof( + vec![], + vec![AddressWithTree { + address: event_address, + tree: address_tree_info.tree, + }], + None, + ) + .await? + .value; + + let packed_address_tree_accounts = rpc_result + .pack_tree_infos(&mut remaining_accounts) + .address_trees; + + let output_state_tree_index = rpc + .get_random_state_tree_info_v1()? + .pack_output_tree_index(&mut remaining_accounts)?; + + let instruction_data = zk_id::instruction::ZkVerifyCredential { + proof: rpc_result.proof, + address_tree_info: packed_address_tree_accounts[0], + output_state_tree_index, + input_root_index: root_index, + public_data: encrypted_data, + credential_proof, + issuer: credential_account_parsed.issuer.to_bytes(), + nullifier, + verification_id, + }; + + let accounts = zk_id::accounts::VerifyAccounts { + signer: payer.pubkey(), + input_merkle_tree: state_tree, + }; + + let instruction = Instruction { + program_id: zk_id::ID, + accounts: [ + accounts.to_account_metas(None), + remaining_accounts.to_account_metas().0, + ] + .concat(), + data: instruction_data.data(), + }; + + rpc.create_and_send_transaction(&[instruction], &payer.pubkey(), &[payer]) + .await +} + +fn generate_credential_proof( + credential_account: &CompressedAccount, + merkle_tree_pubkey: &Pubkey, + leaf_index: u32, + merkle_proof_hashes: &[[u8; 32]], + merkle_root: &[u8; 32], + issuer_pubkey: &Pubkey, + credential: &CredentialKeypair, + encrypted_data: &[u8], + verification_id: &[u8; 31], +) -> ( + light_compressed_account::instruction_data::compressed_proof::CompressedProof, + [u8; 32], // nullifier +) { + let zkey_path = "./build/compressed_account_merkle_proof_final.zkey".to_string(); + + // Build circuit inputs + let mut proof_inputs = HashMap::new(); + + // Add compressed account inputs + let discriminator = if let Some(ref data) = credential_account.data { + data.discriminator + } else { + [0u8; 8] + }; + + let owner_hashed = hash_to_bn254_field_size_be(zk_id::ID.as_ref()); + let merkle_tree_hashed = hash_to_bn254_field_size_be(merkle_tree_pubkey.as_ref()); + + // Use the same hashing as on-chain: hashv_to_bn254_field_size_be_const_array::<2> + use light_hasher::hash_to_field_size::hashv_to_bn254_field_size_be_const_array; + let issuer_hashed = + hashv_to_bn254_field_size_be_const_array::<2>(&[issuer_pubkey.as_ref()]).unwrap(); + + // Compute data_hash as hash of issuer and credential commitment (public key is already a Poseidon hash) + let mut hash_input = Vec::new(); + hash_input.extend_from_slice((encrypted_data.len() as u32).to_le_bytes().as_ref()); + hash_input.extend_from_slice(encrypted_data); + let mut encrypted_data_hash = Sha256::hash(hash_input.as_slice()).unwrap(); + encrypted_data_hash[0] = 0; + + let public_data_hash = + Poseidon::hashv(&[issuer_hashed.as_slice(), &credential.public_key]).unwrap(); + + // Verify the data_hash matches + let expected_data_hash = credential_account.data.as_ref().unwrap().data_hash; + assert_eq!(public_data_hash, expected_data_hash, "Data hash mismatch"); + + // Compute what the circuit will compute for the leaf hash + // The circuit adds 36893488147419103232 (0x2000000000000000) to discriminator + // This effectively puts a 2 prefix at byte 23 (counting from right in BE) + + // SDK format: 32-byte array with leaf_index in LE at [28..32] + let mut leaf_index_bytes = [0u8; 32]; + leaf_index_bytes[28..32].copy_from_slice(&(leaf_index as u32).to_le_bytes()); + + // SDK format: 32-byte array with discriminator at [24..32] and prefix 2 at [23] + let mut discriminator_bytes = [0u8; 32]; + discriminator_bytes[24..32].copy_from_slice(&discriminator); + discriminator_bytes[23] = 2; + + let computed_leaf_hash = Poseidon::hashv(&[ + owner_hashed.as_slice(), + leaf_index_bytes.as_slice(), + merkle_tree_hashed.as_slice(), + credential_account.address.as_ref().unwrap().as_ref(), + discriminator_bytes.as_slice(), + public_data_hash.as_slice(), + ]) + .unwrap(); + + assert_eq!( + computed_leaf_hash, credential_account.hash, + "Leaf hash mismatch - circuit cannot recreate account hash" + ); + + proof_inputs.insert( + "owner_hashed".to_string(), + vec![BigUint::from_bytes_be(&owner_hashed).to_string()], + ); + proof_inputs.insert("leaf_index".to_string(), vec![leaf_index.to_string()]); + + // account_leaf_index needs to be in the same format as SDK: 32-byte array with value at [28..32] in LE + let mut account_leaf_index_bytes = [0u8; 32]; + account_leaf_index_bytes[28..32] + .copy_from_slice(&(credential_account.leaf_index as u32).to_le_bytes()); + proof_inputs.insert( + "account_leaf_index".to_string(), + vec![BigUint::from_bytes_be(&account_leaf_index_bytes).to_string()], + ); + + // Add address field - credential account has an address + let address = credential_account.address.unwrap_or([0u8; 32]); + proof_inputs.insert( + "address".to_string(), + vec![BigUint::from_bytes_be(&address).to_string()], + ); + + proof_inputs.insert( + "merkle_tree_hashed".to_string(), + vec![BigUint::from_bytes_be(&merkle_tree_hashed).to_string()], + ); + proof_inputs.insert( + "discriminator".to_string(), + vec![BigUint::from_bytes_be(&discriminator).to_string()], + ); + proof_inputs.insert( + "issuer_hashed".to_string(), + vec![BigUint::from_bytes_be(&issuer_hashed).to_string()], + ); + + // Add credential private key (private input) - already padded to 32 bytes + proof_inputs.insert( + "credentialPrivateKey".to_string(), + vec![BigUint::from_bytes_be(&credential.private_key).to_string()], + ); + + proof_inputs.insert( + "encrypted_data_hash".to_string(), + vec![BigUint::from_bytes_be(&encrypted_data_hash).to_string()], + ); + proof_inputs.insert( + "public_encrypted_data_hash".to_string(), + vec![BigUint::from_bytes_be(&encrypted_data_hash).to_string()], + ); + + // Add verification_id (public input) - pad to 32 bytes + let mut padded_verification = [0u8; 32]; + padded_verification[1..32].copy_from_slice(verification_id); + proof_inputs.insert( + "verification_id".to_string(), + vec![BigUint::from_bytes_be(&padded_verification).to_string()], + ); + + // Compute nullifier + let nullifier = credential.compute_nullifier(verification_id); + proof_inputs.insert( + "nullifier".to_string(), + vec![BigUint::from_bytes_be(&nullifier).to_string()], + ); + + // Add merkle proof inputs + let path_elements: Vec = merkle_proof_hashes + .iter() + .map(|hash| BigUint::from_bytes_be(hash).to_string()) + .collect(); + proof_inputs.insert("pathElements".to_string(), path_elements); + + // Use the actual merkle root from the indexer + let expected_root_bigint = BigUint::from_bytes_be(merkle_root); + proof_inputs.insert( + "expectedRoot".to_string(), + vec![expected_root_bigint.to_string()], + ); + + // Generate proof + let circuit_inputs = serde_json::to_string(&proof_inputs).unwrap(); + let proof = CircomProver::prove( + ProofLib::Arkworks, + WitnessFn::RustWitness(compressedaccountmerkleproof_witness), + circuit_inputs, + zkey_path.clone(), + ) + .expect("Proof generation failed"); + + // Verify proof locally + let is_valid = CircomProver::verify(ProofLib::Arkworks, proof.clone(), zkey_path.clone()) + .expect("Proof verification failed"); + assert!(is_valid, "Local circom proof verification should pass"); + + // Convert to groth16-solana format and compress + let (proof_a_uncompressed, proof_b_uncompressed, proof_c_uncompressed) = + convert_proof(&proof.proof).expect("Failed to convert proof"); + + use groth16_solana::proof_parser::circom_prover::convert_proof_to_compressed; + let (proof_a, proof_b, proof_c) = convert_proof_to_compressed( + &proof_a_uncompressed, + &proof_b_uncompressed, + &proof_c_uncompressed, + ) + .expect("Failed to compress proof"); + + // Verify with groth16-solana locally (same as on-chain) + { + use groth16_solana::groth16::Groth16Verifier; + use groth16_solana::proof_parser::circom_prover::convert_public_inputs; + + // Convert public inputs from the circom proof (8 public inputs in circuit) + let public_inputs_converted: [[u8; 32]; 8] = convert_public_inputs(&proof.pub_inputs); + println!("public_inputs_converted {:?}", public_inputs_converted); + // Create verifier using the uncompressed proofs (which have proof_a negated) + let mut verifier = Groth16Verifier::new( + &proof_a_uncompressed, + &proof_b_uncompressed, + &proof_c_uncompressed, + &public_inputs_converted, + &zk_id::verifying_key::VERIFYINGKEY, + ) + .expect("Failed to create verifier"); + + // Verify + verifier + .verify() + .expect("Local groth16-solana verification failed"); + } + + let compressed_proof = + light_compressed_account::instruction_data::compressed_proof::CompressedProof { + a: proof_a, + b: proof_b, + c: proof_c, + }; + + (compressed_proof, nullifier) +} +``` From 0b5171ab5a122cae20964369221f15f5207e1241 Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 22:15:45 +0000 Subject: [PATCH 06/19] Add hidden navigation for program examples source code Move program examples MDX files from .context/ to compressed-pdas/ and configure hidden group in docs.json with nested structure. - Add PackedAccounts section to client guide with offset explanation - Update avoid.md with address derivation and pre accounts patterns - Remove deprecated client-library.mdx file - Add program-examples-mdx as hidden group with 6 nested sections - Clarify tree account requirements in client guide --- .claude/avoid.md | 2 + .claude/commands/improve.md | 117 ++++++ client-library/client-guide.mdx | 369 ++++++++---------- compressed-pdas/client-library.mdx | 4 - .../program-examples-mdx/README.mdx | 0 .../account-comparison/Anchor-toml.mdx | 0 .../account-comparison/Cargo-toml.mdx | 0 .../account-comparison/package-json.mdx | 0 .../account-comparison/Cargo-toml.mdx | 0 .../account-comparison/Xargo-toml.mdx | 0 .../account-comparison/src/lib-rs.mdx | 0 .../tests/test_compressed_account-rs.mdx | 0 .../tests/test_solana_account-rs.mdx | 0 .../account-comparison/tsconfig-json.mdx | 0 .../basic-operations/anchor/README.mdx | 0 .../anchor/burn/Anchor-toml.mdx | 0 .../anchor/burn/Cargo-toml.mdx | 0 .../anchor/burn/package-json.mdx | 0 .../anchor/burn/programs/burn/Cargo-toml.mdx | 0 .../anchor/burn/programs/burn/Xargo-toml.mdx | 0 .../anchor/burn/programs/burn/src/lib-rs.mdx | 0 .../burn/programs/burn/tests/test-rs.mdx | 0 .../anchor/burn/tests/burn-ts.mdx | 0 .../anchor/burn/tsconfig-json.mdx | 0 .../anchor/close/Anchor-toml.mdx | 0 .../anchor/close/Cargo-toml.mdx | 0 .../anchor/close/package-json.mdx | 0 .../close/programs/close/Cargo-toml.mdx | 0 .../close/programs/close/Xargo-toml.mdx | 0 .../close/programs/close/src/lib-rs.mdx | 0 .../close/programs/close/tests/test-rs.mdx | 0 .../anchor/close/tests/close-ts.mdx | 0 .../anchor/close/tsconfig-json.mdx | 0 .../anchor/create/Anchor-toml.mdx | 0 .../anchor/create/Cargo-toml.mdx | 0 .../anchor/create/package-json.mdx | 0 .../create/programs/create/Cargo-toml.mdx | 0 .../create/programs/create/Xargo-toml.mdx | 0 .../create/programs/create/src/lib-rs.mdx | 0 .../create/programs/create/tests/test-rs.mdx | 0 .../anchor/create/tests/create-ts.mdx | 0 .../anchor/create/tsconfig-json.mdx | 0 .../basic-operations/anchor/package-json.mdx | 0 .../anchor/reinit/Anchor-toml.mdx | 0 .../anchor/reinit/Cargo-toml.mdx | 0 .../anchor/reinit/package-json.mdx | 0 .../reinit/programs/reinit/Cargo-toml.mdx | 0 .../reinit/programs/reinit/Xargo-toml.mdx | 0 .../reinit/programs/reinit/src/lib-rs.mdx | 0 .../reinit/programs/reinit/tests/test-rs.mdx | 0 .../anchor/reinit/tests/reinit-ts.mdx | 0 .../anchor/reinit/tsconfig-json.mdx | 0 .../anchor/update/Anchor-toml.mdx | 0 .../anchor/update/Cargo-toml.mdx | 0 .../anchor/update/package-json.mdx | 0 .../update/programs/update/Cargo-toml.mdx | 0 .../update/programs/update/Xargo-toml.mdx | 0 .../update/programs/update/src/lib-rs.mdx | 0 .../update/programs/update/tests/test-rs.mdx | 0 .../anchor/update/tests/update-ts.mdx | 0 .../anchor/update/tsconfig-json.mdx | 0 .../basic-operations/native/Cargo-toml.mdx | 0 .../basic-operations/native/README.mdx | 0 .../basic-operations/native/package-json.mdx | 0 .../native/programs/burn/Cargo-toml.mdx | 0 .../native/programs/burn/src/lib-rs.mdx | 0 .../programs/burn/src/test_helpers-rs.mdx | 0 .../native/programs/burn/tests/test-rs.mdx | 0 .../native/programs/close/Cargo-toml.mdx | 0 .../native/programs/close/src/lib-rs.mdx | 0 .../programs/close/src/test_helpers-rs.mdx | 0 .../native/programs/close/tests/test-rs.mdx | 0 .../native/programs/create/Cargo-toml.mdx | 0 .../native/programs/create/Xargo-toml.mdx | 0 .../native/programs/create/src/lib-rs.mdx | 0 .../programs/create/src/test_helpers-rs.mdx | 0 .../native/programs/create/tests/test-rs.mdx | 0 .../native/programs/reinit/Cargo-toml.mdx | 0 .../native/programs/reinit/src/lib-rs.mdx | 0 .../programs/reinit/src/test_helpers-rs.mdx | 0 .../native/programs/reinit/tests/test-rs.mdx | 0 .../native/programs/update/Cargo-toml.mdx | 0 .../native/programs/update/Xargo-toml.mdx | 0 .../native/programs/update/src/lib-rs.mdx | 0 .../programs/update/src/test_helpers-rs.mdx | 0 .../native/programs/update/tests/test-rs.mdx | 0 .../basic-operations/native/tsconfig-json.mdx | 0 .../counter/anchor/Anchor-toml.mdx | 0 .../counter/anchor/Cargo-toml.mdx | 0 .../counter/anchor/README.mdx | 0 .../counter/anchor/package-json.mdx | 0 .../anchor/programs/counter/Cargo-toml.mdx | 0 .../anchor/programs/counter/Xargo-toml.mdx | 0 .../anchor/programs/counter/src/lib-rs.mdx | 0 .../anchor/programs/counter/tests/test-rs.mdx | 0 .../counter/anchor/tests/test-ts.mdx | 0 .../counter/anchor/tsconfig-json.mdx | 0 .../counter/native/Cargo-toml.mdx | 0 .../counter/native/Xargo-toml.mdx | 0 .../counter/native/src/lib-rs.mdx | 0 .../counter/native/tests/test-rs.mdx | 0 .../counter/pinocchio/Cargo-toml.mdx | 0 .../counter/pinocchio/Xargo-toml.mdx | 0 .../counter/pinocchio/src/lib-rs.mdx | 0 .../counter/pinocchio/tests/test-rs.mdx | 0 .../create-and-update/Anchor-toml.mdx | 0 .../create-and-update/Cargo-toml.mdx | 0 .../create-and-update/README.mdx | 0 .../create-and-update/package-json.mdx | 0 .../programs/create-and-update/Cargo-toml.mdx | 0 .../programs/create-and-update/Xargo-toml.mdx | 0 .../programs/create-and-update/src/lib-rs.mdx | 0 .../create-and-update/tests/test-rs.mdx | 0 .../tests/test_create_two_accounts-rs.mdx | 0 .../tests/create_and_update-ts.mdx | 0 .../create-and-update/tsconfig-json.mdx | 0 .../read-only/Cargo-toml.mdx | 0 .../program-examples-mdx/read-only/README.mdx | 0 .../read-only/Xargo-toml.mdx | 0 .../read-only/src/lib-rs.mdx | 0 .../read-only/tests/test-rs.mdx | 0 .../program-examples-mdx/zk-id/Cargo-toml.mdx | 0 .../program-examples-mdx/zk-id/README.mdx | 0 .../program-examples-mdx/zk-id/Xargo-toml.mdx | 0 .../program-examples-mdx/zk-id/build-rs.mdx | 0 .../zk-id/circuits/README.mdx | 0 .../zk-id/package-json.mdx | 0 .../program-examples-mdx/zk-id/src/lib-rs.mdx | 0 .../zk-id/src/verifying_key-rs.mdx | 0 .../zk-id/tests/circuit-rs.mdx | 0 .../zk-id/tests/test-rs.mdx | 0 compressed-pdas/program-examples.mdx | 10 +- docs.json | 238 +++++++++++ 133 files changed, 515 insertions(+), 225 deletions(-) create mode 100644 .claude/commands/improve.md delete mode 100644 compressed-pdas/client-library.mdx rename {.context => compressed-pdas}/program-examples-mdx/README.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/account-comparison/Anchor-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/account-comparison/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/account-comparison/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/account-comparison/tsconfig-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/README.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/burn/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/close/Anchor-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/close/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/close/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/close/tests/close-ts.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/close/tsconfig-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/create/Anchor-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/create/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/create/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/create/tests/create-ts.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/create/tsconfig-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/reinit/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/update/Anchor-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/update/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/update/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/update/tests/update-ts.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/anchor/update/tsconfig-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/README.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/basic-operations/native/tsconfig-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/Anchor-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/README.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/tests/test-ts.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/anchor/tsconfig-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/native/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/native/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/native/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/native/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/pinocchio/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/pinocchio/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/pinocchio/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/counter/pinocchio/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/Anchor-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/README.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/tests/create_and_update-ts.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/create-and-update/tsconfig-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/read-only/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/read-only/README.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/read-only/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/read-only/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/read-only/tests/test-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/Cargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/README.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/Xargo-toml.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/build-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/circuits/README.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/package-json.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/src/lib-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/src/verifying_key-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/tests/circuit-rs.mdx (100%) rename {.context => compressed-pdas}/program-examples-mdx/zk-id/tests/test-rs.mdx (100%) diff --git a/.claude/avoid.md b/.claude/avoid.md index 00979448..ca40ddf6 100644 --- a/.claude/avoid.md +++ b/.claude/avoid.md @@ -5,3 +5,5 @@ Reference this doc to ensure clear, direct technical writing with proper informa | Don't | Do | |-------|-----| | **Set the initial lamports balance** to N epochs (must be at least 2 epochs)
• The account stays decompressed for at least these N epochs.
• The amount can be customized based on the expected activity of the account.
• The initial lamports balance is paid by the account creator. | **Set the initial lamports balance** to N epochs (must be at least 2 epochs)
• Paid by the account creator.
• Keeps the account decompressed for N epochs.
• Customize N based on expected account activity. | +| **Address derivation:**
You only need to derive an address when you create a compressed account.

*Issue: Implies derivation is creation-only* | **Address derivation:**
You derive addresses in two scenarios:
• **At account creation** - derive the address to create the account's persistent identifier, then pass it to `getValidityProofV0()` in the address array
• **Before building instructions** - derive the address to fetch existing accounts using `rpc.getCompressedAccount()`, then reference them in your transaction

*Shows complete flow for both use cases* | +| **Pre Accounts:**
Pre accounts are added at the beginning of `PackedAccounts` and the length of pre accounts varies based on the number of accounts added.

The on-chain program uses offsets to locate accounts it needs to access in the instruction's accounts array.

`PackedAccounts` calculates positions of the account:
• `system_accounts_start_offset` = length of pre accounts
• `packed_accounts_start_offset` = length of pre accounts + system accounts

*Issues: Vague "uses offsets", doesn't specify what pre accounts are, missing where/how offsets are passed* | **Pre Accounts:**
Pre accounts are your program-specific accounts (signers, PDAs for CPIs) added to the beginning of `PackedAccounts` at known positions.

Since the number of pre accounts varies per instruction, the on-chain program receives offsets in the instruction data to locate Light System accounts and tree accounts:
• `system_accounts_start_offset` (u8) - where Light System accounts begin in `remaining_accounts`
• `packed_accounts_start_offset` (u8) - where tree accounts begin in `remaining_accounts`

`PackedAccounts.to_account_metas()` calculates these offsets on the client-side and passes them to the program.

*Specifies what pre accounts are, where offsets are received (instruction data), exact function name, what the offsets locate* | diff --git a/.claude/commands/improve.md b/.claude/commands/improve.md new file mode 100644 index 00000000..668b19c7 --- /dev/null +++ b/.claude/commands/improve.md @@ -0,0 +1,117 @@ +--- +description: Improves ZK Compression documentation by replacing vague statements with precise technical details verified against light-protocol codebase. Use when documentation lacks function names, data types, or specific mechanisms. +argument-hint: +allowed-tools: [Read, Edit, Glob, Grep, mcp__deepwiki__read_wiki_structure, mcp__deepwiki__read_wiki_contents, mcp__deepwiki__ask_question] +--- + +# /improve + +Improve: $ARGUMENTS + +**WHY:** Developers need exact function names, data types, and mechanisms—not vague verbs like "handles" or "uses". Precision prevents implementation confusion. + +## MANDATORY: Before ANY task execution + +### First, output your understanding and plan + +- State which section/file you'll improve +- Identify specific vague statements (quote them) +- List technical questions for DeepWiki (HOW? WHERE? WHAT function?) + +### Then assess if clarification is needed + +- Which specific section if multiple exist? +- Focus on all vague statements or specific areas? +- Preserve structure or reorganize? + +## Step 1: Read Context and Identify Vague Statements + +Read the full documentation file to understand page flow and audience. + +**Flag vague statements using `/home/tilo/.claude/context/terminology-reference.md` rules:** + +- Vague verbs: "handles", "manages", "processes", "uses" → needs HOW +- Missing function names: "calculates offsets" → WHICH function? +- Unclear data flow: "passes data" → WHERE? (instruction data vs accounts) +- Missing types: "offset value" → WHAT type? (u8, u16?) + +**For each vague statement, document:** + +```text +"[exact quote]" → Missing: [function name / data type / mechanism] +``` + +## Step 2: Query DeepWiki for Missing Details + +For each flagged statement, query `Lightprotocol/light-protocol`: + +**Query pattern:** + +- Browse: `mcp__deepwiki__read_wiki_structure("Lightprotocol/light-protocol")` +- Read: `mcp__deepwiki__read_wiki_contents("Lightprotocol/light-protocol")` +- Ask: `mcp__deepwiki__ask_question("Lightprotocol/light-protocol", "[precise question]")` + +**Questions to ask:** + +- "Which function [performs operation]? What is its signature?" +- "What data type is [field/parameter]?" +- "Where is [value] passed—instruction data or accounts array?" +- "How does the on-chain program [mechanism]? What steps occur?" + +## Step 3: Rewrite with Precision + +Apply precision rules (reference `/home/tilo/.claude/context/terminology-reference.md`): + +**AVOID:** "handles", "manages", "processes", "operations", "enables" + +**USE:** Exact function names (`PackedAccounts.to_account_metas()`), data types ((u8)), specific operations ("verifies proof against state root") + +**For each vague statement:** + +Original: `"[quote]"` + +Improved: `"[rewrite with function names, data types, and mechanism]"` + +Changes: + +- Added: `function_name()`, data type ([type]) +- Replaced: "[vague verb]" → "[specific operation]" +- Clarified: [WHERE/HOW detail] + +## Step 4: Apply Edits and Validate + +Use Edit tool to replace statements. Preserve structure and formatting. + +**Post-edit validation:** + +- [ ] All vague verbs replaced with specific operations +- [ ] Function names and data types included +- [ ] Mechanisms explain HOW, not just WHAT +- [ ] Data flow clarified (WHERE) + +--- + +## Example: Pre Accounts Accordion Improvement + +**BEFORE (vague):** +> "Pre accounts are added at the beginning. The on-chain program uses offsets to locate accounts." + +**Issues:** "are added" (WHAT are they?), "uses offsets" (HOW? WHERE received?), missing function/types + +**DeepWiki queries:** + +- "How does the on-chain program receive system_accounts_start_offset?" +- "Which function calculates these offsets?" + +**Answers:** Offsets in instruction data as u8, calculated by `PackedAccounts.to_account_metas()`, used via `ctx.remaining_accounts.split_at()` + +**AFTER (precise):** + +> "Pre accounts are your program-specific accounts (signers, PDAs for CPIs) added at known positions. +> +> The on-chain program receives offsets in the instruction data to locate Light System accounts and tree accounts: +> +> - `system_accounts_start_offset` (u8) - where Light System accounts begin +> - `packed_accounts_start_offset` (u8) - where tree accounts begin +> +> `PackedAccounts.to_account_metas()` calculates these offsets client-side." \ No newline at end of file diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index 604d64ae..aa9cf843 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -5,6 +5,8 @@ description: >- implementation and full code examples. --- +import SystemAccountsList from '/snippets/compressed-pdas-system-accounts-list.mdx'; + ZK Compression provides Rust and Typescript clients to interact with compressed accounts and tokens on Solana. @@ -194,19 +196,18 @@ light test-validator -## Address (Create only) - - -You only need to derive an address when you create a compressed account. - +## Address - - Derive a persistent address as a unique identifier for your compressed account, similar to [program-derived addresses (PDAs)](https://solana.com/docs/core/pda). -* Like PDAs, compressed account addresses don't belong to a private key; rather, they're derived from the program that owns them. -* The key difference to PDAs is that compressed accounts require an **address tree** parameter. -* An address tree is a Merkle tree that stores the compressed account addresses. +You derive addresses in two scenarios: +* **At account creation** - derive the address to create the account's persistent identifier, then pass it to `getValidityProofV0()` in the address array (see Step 3 "Validity Proof") +* **Before building instructions** - derive the address to fetch existing accounts using `rpc.getCompressedAccount()`, then reference them in your transaction + +### Derivation +Like PDAs, compressed account addresses don't belong to a private key; rather, they're derived from the program that owns them. +* The key difference to PDAs is that compressed accounts you include an **address tree** parameter. +* An address tree is a Merkle tree that stores the compressed account address. The protocol maintains Merkle trees. You don't need to initialize custom trees. Find the [pubkeys for Merkle trees here](https://www.zkcompression.com/resources/addresses-and-urls). @@ -303,12 +304,6 @@ let (address, _) = derive_address( * `&program_id`: Specify your program ID. - - -**Use the same address** tree for all subsequent instructions **in client and program**. - - - @@ -320,7 +315,7 @@ Transactions with compressed accounts must include a validity proof: * You can **combine multiple addresses and hashes in one proof** to optimize compute cost and instruction data. -You fetch a validity proof from your RPC provider that supports ZK Compression (Helius, Triton, ...). +You fetch a validity proof from your RPC provider that supports ZK Compression, such as Helius or Triton. @@ -447,19 +442,10 @@ Specify the **account hash**, `tree` and `queue` pubkeys from the compressed acc ### Optimize with Combined Proofs -A single proof can contain: +Depending on the **Merkle tree version** (V1 or V2) you are using, you can prove **in a single proof**: * multiple addresses, * multiple account hashes, or -* a combination of addresses and account hashes - - -**Advantages of combined proofs**: -* You only add **one 128 byte validity proof** to your instruction data **for multiple instructions**. -* This can **optimize** your **transaction's size** to stay inside the 1232 byte limit. -* **Compute unit consumption is reduced by at least 100k CU**, since combined proofs are verified in a single CPI by the Light System Program. - - -Depending on the **Merkle tree version** (V1 or V2) you are using, you can prove the following **in a single proof**: +* a combination of addresses and account hashes. @@ -480,9 +466,12 @@ Depending on the **Merkle tree version** (V1 or V2) you are using, you can prove - -View the [source code for the proof combinations here](https://github.com/Lightprotocol/light-protocol/tree/871215642b4b5b69d2bcd7eca22542346d0e2cfa/program-libs/verifier/src/verifying_keys). - + +**Advantages of combined proofs**: +* You only add **one 128 byte validity proof** to your instruction data **for multiple instructions**. +* This can **optimize** your **transaction's size** to stay inside the 1232 byte limit. +* **Compute unit consumption is reduced by at least 100k CU**, since combined proofs are verified in a single CPI by the Light System Program. + ### Example Create Address & Update Account in one Proof @@ -554,189 +543,46 @@ See the full [create-and-update program example for this proof combination with ## Accounts +In your instruction data, you reference in which [Merkle tree](/learn/core-concepts/merkle-trees-validity-proofs#state-trees) compressed accounts are or will be stored and other accounts required to interact with the Light System Program. -Transactions with compressed accounts reference **custom accounts and `PackedAccounts`** in their instruction data in the accounts array: - -``` - PackedAccounts - ┌----------------------------------------------┐ - [custom accounts] [pre accounts] [system accounts] [tree accounts] - ↑ ↑ ↑ - Signers, Light System State trees, - fee payer accounts address trees, -``` - -The `PackedAccounts` helper allows you to **reference accounts by u8 indices instead of 32-byte pubkeys**. - - -Program-specific (custom) accounts can be added to `PackedAccounts` using e.g. `add_pre_accounts_signer` or `add_pre_accounts_meta`. -These are typically accounts that need to be at a specific, known position in the instruction's account list. +* The SDK's include a `PackedAccounts` helper to optimize instruction data by adding the accounts' pubkeys to the account array. +* The instruction data references these accounts by u8 indices instead of 32 byte pubkeys. + **Light System accounts** are 8 required accounts for proof verification and CPI calls to update state and address trees. -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#AccountDescription
1Light System ProgramVerifies validity proofs, compressed account ownership checks, CPIs the account compression program to update tree accounts
2CPI SignerPDA to sign CPI calls from your program to Light System Program. Verified by Light System Program during CPI. Derived from your program ID
3Registered Program PDAAccess control to the Account Compression Program
4Noop ProgramLogs compressed account state to Solana ledger. Only used in v1. Indexers parse transaction logs to reconstruct compressed account state
5Account Compression AuthoritySigns CPI calls from Light System Program to Account Compression Program
6Account Compression ProgramWrites to state and address tree accounts. Client and the account compression program do not interact directly
7Invoking ProgramYour program's ID, used by Light System Program to derive the CPI Signer PDA, verify the CPI Signer matches your program ID, and set the owner of created compressed accounts
8System ProgramSolana System Program to transfer lamports
+ +``` + PackedAccounts + ┌--------------------------------------------┐ +[custom accounts] [pre accounts][system accounts][tree accounts] + ↑ ↑ ↑ + Signers, Light System State trees, + fee payer accounts address trees, +``` +Custom accounts are program-specific accounts you pass manually in your instruction, typically through Anchor's account struct. - -**Merkle tree accounts** are state trees, address trees, and their associated queues to store compressed accounts and addresses. - -Depending on your instruction you must include indices for different tree and queue accounts. -* For other instructions than create, **use the state tree of the existing compressed account** as output state tree. -* The pubkey is automatically deduplicated when you pack accounts. - - -When creating or updating multiple accounts in a single transaction, use one output state tree. - - - -V2 is on Devnet and reduces compute unit consumption by up to 70%. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
InstructionAddress TreeState TreeNullifier QueueOutput State Tree
Create--
Update / Close / Reinit-
Burn--
- -* **Address tree**: only used to derive and store a new address. -* **State tree**: used to reference the existing compressed account hash. Therefore not used by create. -* **Nullifier queue**: used to nullify the existing compressed account hash to prevent double spending. Therefore not used by create. -* **Output State tree**: used to store the new or updated compressed account hash. Burn does not produce output state. - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
InstructionAddress TreeState Tree (includes nullifier queue)Output Queue
Create-
Update / Close / Reinit-
Burn--
+* We recommend to append `PackedAccounts` after your program specific accounts and in anchor in `remaining_accounts`. +* Custom accounts can be added to `PackedAccounts` using `add_pre_accounts_signer`, `add_pre_accounts_signer_mut` or `add_pre_accounts_meta`. -* **Address tree**: only used to derive and store a new address. -* **State tree**: used to reference the existing compressed account hash. Therefore not used by create. V2 combines the state tree and nullifier queue into a single account. -* **Output State tree**: used to store the new or updated compressed account hash. Burn does not produce output state. -* **Output Queue**: used to store compressed account hashes. A forester node updates the Merkle tree asynchronously. + +Pre accounts are your program-specific accounts (signers, PDAs for CPIs) and other accounts added to the beginning of `PackedAccounts` at known positions. -
-
+Since the number of pre accounts varies per instruction, the on-chain program receives offsets in the instruction data to locate Light System accounts and tree accounts: +* `system_accounts_start_offset` (u8) - where Light System accounts begin in `remaining_accounts` +* `packed_accounts_start_offset` (u8) - where tree accounts begin in `remaining_accounts` +`PackedAccounts.to_account_metas()` calculates these offsets on the client-side and passes them to the program.
- -In your instruction, -1. Put your program-specific accounts first -2. Append Packed accounts to end of the vector (recommended due to variable length) - + +Depending on your instruction you include different tree and queue accounts. +You will learn about this after the code example of `PackedAccounts`. + @@ -810,13 +656,6 @@ const { remainingAccounts } ``` - - -* Create uses address tree and address queue from the new address derivation -* Update/Close/Reinit/Burn use state tree and nullifier queue from the existing compressed account's TreeInfo -* Create derives a new address (no existing account) -* Update/Close/Reinit/Burn reference the existing compressed account - @@ -872,14 +711,111 @@ let (remaining_accounts_metas, _, _) - -* Create packs address tree for the new address, then adds output state tree separately -* Update/Close/Reinit/Burn pack state tree that includes output tree automatically - +**Tree Accounts Explained:** + +Depending on your instruction you must include different tree and queue accounts. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
InstructionAddress TreeState TreeNullifier QueueOutput State Tree
Create--
Update / Close / Reinit-
Burn--
+ +* The **Address tree** is used to derive and store a new address (create-only) +* The **State tree** is used to reference the existing compressed account hash. Therefore not used by create. +* The **Nullifier queue** is used to nullify the existing compressed account hash to prevent double spending. Therefore not used by create. +* The **Output State tree** is used to store the new or updated compressed account hash. + * **Create only** - Choose any available state tree, or use a pre-selected tree to store the new compressed account. + * **Update/Close/Reinit** - Use the state tree of the existing compressed account as output state tree. + * **Mixed instructions (create + update in same tx)** - Use the state tree from the existing account as output state tree. + * **Burn** - Burn does not produce output state and does not need an output state tree. + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
InstructionAddress TreeState Tree (includes nullifier queue)Output Queue
Create-
Update / Close / Reinit-
Burn--
+ +* **Address tree**: only used to derive and store a new address. +* **State tree**: used to reference the existing compressed account hash. Therefore not used by create. V2 combines the state tree and nullifier queue into a single account. +* **Output Queue**: used to store compressed account hashes. A forester node updates the state tree asynchronously. + * **Create only** - Choose any available queue, or use a pre-selected queue to store the new compressed account. + * **Update/Close/Reinit** - Use the queue of the existing compressed account as output queue. + * **Mixed instructions (create + update in same tx)** - Use the queue from the existing account as output queue. + * **Burn** - Do not include an output queue. +
+
+ + +V2 is on Devnet and reduces compute unit consumption by up to 70%. + + @@ -1143,11 +1079,12 @@ let instruction_data = burn::instruction::BurnAccount { Build the instruction with your `program_id`, `accounts`, and `data` from Step 5. - -In `accounts`, -1. Put your program-specific accounts first -2. Append Packed accounts to end of the vector (recommended due to variable length) - +In `accounts`, put your program-specific accounts first and append `PackedAccounts` to end of the vector (recommended due to variable length) +``` + Accounts +┌---------------------------------┐ +[custom accounts] [PackedAccounts] +``` ```rust let instruction = Instruction { diff --git a/compressed-pdas/client-library.mdx b/compressed-pdas/client-library.mdx deleted file mode 100644 index 8244cc27..00000000 --- a/compressed-pdas/client-library.mdx +++ /dev/null @@ -1,4 +0,0 @@ ---- -title: Client Library -description: Overview to Rust and Typescript client guides. Guides include step-by-step implementation and full code examples. ---- diff --git a/.context/program-examples-mdx/README.mdx b/compressed-pdas/program-examples-mdx/README.mdx similarity index 100% rename from .context/program-examples-mdx/README.mdx rename to compressed-pdas/program-examples-mdx/README.mdx diff --git a/.context/program-examples-mdx/account-comparison/Anchor-toml.mdx b/compressed-pdas/program-examples-mdx/account-comparison/Anchor-toml.mdx similarity index 100% rename from .context/program-examples-mdx/account-comparison/Anchor-toml.mdx rename to compressed-pdas/program-examples-mdx/account-comparison/Anchor-toml.mdx diff --git a/.context/program-examples-mdx/account-comparison/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/account-comparison/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/account-comparison/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/account-comparison/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/account-comparison/package-json.mdx b/compressed-pdas/program-examples-mdx/account-comparison/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/account-comparison/package-json.mdx rename to compressed-pdas/program-examples-mdx/account-comparison/package-json.mdx diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs.mdx b/compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs.mdx similarity index 100% rename from .context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs.mdx rename to compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs.mdx diff --git a/.context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs.mdx b/compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs.mdx similarity index 100% rename from .context/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs.mdx rename to compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs.mdx diff --git a/.context/program-examples-mdx/account-comparison/tsconfig-json.mdx b/compressed-pdas/program-examples-mdx/account-comparison/tsconfig-json.mdx similarity index 100% rename from .context/program-examples-mdx/account-comparison/tsconfig-json.mdx rename to compressed-pdas/program-examples-mdx/account-comparison/tsconfig-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/README.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/README.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/README.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/README.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/package-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/burn/package-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/package-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/Anchor-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/close/Anchor-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/close/Anchor-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/close/Anchor-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/close/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/close/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/close/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/package-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/close/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/close/package-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/close/package-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/tests/close-ts.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/close/tests/close-ts.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/close/tests/close-ts.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/close/tests/close-ts.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/close/tsconfig-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/close/tsconfig-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/close/tsconfig-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/close/tsconfig-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/Anchor-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/create/Anchor-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/create/Anchor-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/create/Anchor-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/create/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/create/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/create/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/package-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/create/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/create/package-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/create/package-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/tests/create-ts.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/create/tests/create-ts.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/create/tests/create-ts.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/create/tests/create-ts.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/create/tsconfig-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/create/tsconfig-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/create/tsconfig-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/create/tsconfig-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/package-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/package-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/package-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/package-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/reinit/package-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/package-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/Anchor-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/update/Anchor-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/update/Anchor-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/update/Anchor-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/update/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/update/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/update/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/package-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/update/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/update/package-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/update/package-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/tests/update-ts.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/update/tests/update-ts.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/update/tests/update-ts.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/update/tests/update-ts.mdx diff --git a/.context/program-examples-mdx/basic-operations/anchor/update/tsconfig-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/anchor/update/tsconfig-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/anchor/update/tsconfig-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/anchor/update/tsconfig-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/README.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/README.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/README.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/README.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/package-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/package-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/package-json.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/basic-operations/native/tsconfig-json.mdx b/compressed-pdas/program-examples-mdx/basic-operations/native/tsconfig-json.mdx similarity index 100% rename from .context/program-examples-mdx/basic-operations/native/tsconfig-json.mdx rename to compressed-pdas/program-examples-mdx/basic-operations/native/tsconfig-json.mdx diff --git a/.context/program-examples-mdx/counter/anchor/Anchor-toml.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/Anchor-toml.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/Anchor-toml.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/Anchor-toml.mdx diff --git a/.context/program-examples-mdx/counter/anchor/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/counter/anchor/README.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/README.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/README.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/README.mdx diff --git a/.context/program-examples-mdx/counter/anchor/package-json.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/package-json.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/package-json.mdx diff --git a/.context/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/counter/anchor/tests/test-ts.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/tests/test-ts.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/tests/test-ts.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/tests/test-ts.mdx diff --git a/.context/program-examples-mdx/counter/anchor/tsconfig-json.mdx b/compressed-pdas/program-examples-mdx/counter/anchor/tsconfig-json.mdx similarity index 100% rename from .context/program-examples-mdx/counter/anchor/tsconfig-json.mdx rename to compressed-pdas/program-examples-mdx/counter/anchor/tsconfig-json.mdx diff --git a/.context/program-examples-mdx/counter/native/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/counter/native/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/counter/native/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/counter/native/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/counter/native/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/counter/native/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/counter/native/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/counter/native/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/counter/native/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/counter/native/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/counter/native/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/counter/native/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/counter/native/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/counter/native/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/counter/native/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/counter/native/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/counter/pinocchio/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/counter/pinocchio/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/counter/pinocchio/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/counter/pinocchio/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/counter/pinocchio/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/counter/pinocchio/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/counter/pinocchio/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/counter/pinocchio/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/counter/pinocchio/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/counter/pinocchio/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/counter/pinocchio/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/counter/pinocchio/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/counter/pinocchio/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/counter/pinocchio/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/counter/pinocchio/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/counter/pinocchio/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/create-and-update/Anchor-toml.mdx b/compressed-pdas/program-examples-mdx/create-and-update/Anchor-toml.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/Anchor-toml.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/Anchor-toml.mdx diff --git a/.context/program-examples-mdx/create-and-update/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/create-and-update/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/create-and-update/README.mdx b/compressed-pdas/program-examples-mdx/create-and-update/README.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/README.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/README.mdx diff --git a/.context/program-examples-mdx/create-and-update/package-json.mdx b/compressed-pdas/program-examples-mdx/create-and-update/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/package-json.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/package-json.mdx diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs.mdx b/compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs.mdx diff --git a/.context/program-examples-mdx/create-and-update/tests/create_and_update-ts.mdx b/compressed-pdas/program-examples-mdx/create-and-update/tests/create_and_update-ts.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/tests/create_and_update-ts.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/tests/create_and_update-ts.mdx diff --git a/.context/program-examples-mdx/create-and-update/tsconfig-json.mdx b/compressed-pdas/program-examples-mdx/create-and-update/tsconfig-json.mdx similarity index 100% rename from .context/program-examples-mdx/create-and-update/tsconfig-json.mdx rename to compressed-pdas/program-examples-mdx/create-and-update/tsconfig-json.mdx diff --git a/.context/program-examples-mdx/read-only/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/read-only/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/read-only/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/read-only/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/read-only/README.mdx b/compressed-pdas/program-examples-mdx/read-only/README.mdx similarity index 100% rename from .context/program-examples-mdx/read-only/README.mdx rename to compressed-pdas/program-examples-mdx/read-only/README.mdx diff --git a/.context/program-examples-mdx/read-only/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/read-only/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/read-only/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/read-only/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/read-only/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/read-only/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/read-only/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/read-only/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/read-only/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/read-only/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/read-only/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/read-only/tests/test-rs.mdx diff --git a/.context/program-examples-mdx/zk-id/Cargo-toml.mdx b/compressed-pdas/program-examples-mdx/zk-id/Cargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/Cargo-toml.mdx rename to compressed-pdas/program-examples-mdx/zk-id/Cargo-toml.mdx diff --git a/.context/program-examples-mdx/zk-id/README.mdx b/compressed-pdas/program-examples-mdx/zk-id/README.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/README.mdx rename to compressed-pdas/program-examples-mdx/zk-id/README.mdx diff --git a/.context/program-examples-mdx/zk-id/Xargo-toml.mdx b/compressed-pdas/program-examples-mdx/zk-id/Xargo-toml.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/Xargo-toml.mdx rename to compressed-pdas/program-examples-mdx/zk-id/Xargo-toml.mdx diff --git a/.context/program-examples-mdx/zk-id/build-rs.mdx b/compressed-pdas/program-examples-mdx/zk-id/build-rs.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/build-rs.mdx rename to compressed-pdas/program-examples-mdx/zk-id/build-rs.mdx diff --git a/.context/program-examples-mdx/zk-id/circuits/README.mdx b/compressed-pdas/program-examples-mdx/zk-id/circuits/README.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/circuits/README.mdx rename to compressed-pdas/program-examples-mdx/zk-id/circuits/README.mdx diff --git a/.context/program-examples-mdx/zk-id/package-json.mdx b/compressed-pdas/program-examples-mdx/zk-id/package-json.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/package-json.mdx rename to compressed-pdas/program-examples-mdx/zk-id/package-json.mdx diff --git a/.context/program-examples-mdx/zk-id/src/lib-rs.mdx b/compressed-pdas/program-examples-mdx/zk-id/src/lib-rs.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/src/lib-rs.mdx rename to compressed-pdas/program-examples-mdx/zk-id/src/lib-rs.mdx diff --git a/.context/program-examples-mdx/zk-id/src/verifying_key-rs.mdx b/compressed-pdas/program-examples-mdx/zk-id/src/verifying_key-rs.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/src/verifying_key-rs.mdx rename to compressed-pdas/program-examples-mdx/zk-id/src/verifying_key-rs.mdx diff --git a/.context/program-examples-mdx/zk-id/tests/circuit-rs.mdx b/compressed-pdas/program-examples-mdx/zk-id/tests/circuit-rs.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/tests/circuit-rs.mdx rename to compressed-pdas/program-examples-mdx/zk-id/tests/circuit-rs.mdx diff --git a/.context/program-examples-mdx/zk-id/tests/test-rs.mdx b/compressed-pdas/program-examples-mdx/zk-id/tests/test-rs.mdx similarity index 100% rename from .context/program-examples-mdx/zk-id/tests/test-rs.mdx rename to compressed-pdas/program-examples-mdx/zk-id/tests/test-rs.mdx diff --git a/compressed-pdas/program-examples.mdx b/compressed-pdas/program-examples.mdx index 624e9a50..d627a789 100644 --- a/compressed-pdas/program-examples.mdx +++ b/compressed-pdas/program-examples.mdx @@ -10,11 +10,11 @@ description: Program example repository for compressed accounts with tests. Basic Operations include: -- **create** - Initialize a new compressed account. -- **update** - Modify data in an existing compressed account. -- **close** - Clear account data and preserve its address. -- **reinit** - Reinitialize a closed account with the same address. -- **burn** - Permanently delete a compressed account. +- **create** - Initialize a new compressed account +- **update** - Modify data of an existing compressed account +- **close** - Close a compressed account (it can be initialized again). +- **reinit** - Reinitialize a closed account +- **burn** - Permanently delete a compressed account (it cannot be initialized again). ## Counter Program diff --git a/docs.json b/docs.json index 488e9223..f19bc54b 100644 --- a/docs.json +++ b/docs.json @@ -85,6 +85,244 @@ ] }, "compressed-pdas/program-examples", + { + "group": "Program Examples (Source)", + "hidden": true, + "pages": [ + "compressed-pdas/program-examples-mdx/README", + { + "group": "Account Comparison", + "pages": [ + "compressed-pdas/program-examples-mdx/account-comparison/Anchor-toml", + "compressed-pdas/program-examples-mdx/account-comparison/Cargo-toml", + "compressed-pdas/program-examples-mdx/account-comparison/package-json", + "compressed-pdas/program-examples-mdx/account-comparison/tsconfig-json", + "compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/Cargo-toml", + "compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/Xargo-toml", + "compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/src/lib-rs", + "compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_compressed_account-rs", + "compressed-pdas/program-examples-mdx/account-comparison/programs/account-comparison/tests/test_solana_account-rs" + ] + }, + { + "group": "Basic Operations", + "pages": [ + { + "group": "Anchor", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/anchor/README", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/package-json", + { + "group": "Burn", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/Anchor-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/package-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/tsconfig-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/Xargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/programs/burn/tests/test-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/burn/tests/burn-ts" + ] + }, + { + "group": "Close", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/anchor/close/Anchor-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/close/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/close/package-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/close/tsconfig-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/Xargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/close/programs/close/tests/test-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/close/tests/close-ts" + ] + }, + { + "group": "Create", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/anchor/create/Anchor-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/create/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/create/package-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/create/tsconfig-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/Xargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/create/programs/create/tests/test-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/create/tests/create-ts" + ] + }, + { + "group": "Reinit", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/Anchor-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/package-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/tsconfig-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/Xargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/programs/reinit/tests/test-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/reinit/tests/reinit-ts" + ] + }, + { + "group": "Update", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/anchor/update/Anchor-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/update/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/update/package-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/update/tsconfig-json", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/Xargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/update/programs/update/tests/test-rs", + "compressed-pdas/program-examples-mdx/basic-operations/anchor/update/tests/update-ts" + ] + } + ] + }, + { + "group": "Native", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/native/README", + "compressed-pdas/program-examples-mdx/basic-operations/native/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/native/package-json", + "compressed-pdas/program-examples-mdx/basic-operations/native/tsconfig-json", + { + "group": "Burn", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/src/test_helpers-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/burn/tests/test-rs" + ] + }, + { + "group": "Close", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/src/test_helpers-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/close/tests/test-rs" + ] + }, + { + "group": "Create", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/Xargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/src/test_helpers-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/create/tests/test-rs" + ] + }, + { + "group": "Reinit", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/src/test_helpers-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/reinit/tests/test-rs" + ] + }, + { + "group": "Update", + "pages": [ + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/Cargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/Xargo-toml", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/src/lib-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/src/test_helpers-rs", + "compressed-pdas/program-examples-mdx/basic-operations/native/programs/update/tests/test-rs" + ] + } + ] + } + ] + }, + { + "group": "Counter", + "pages": [ + { + "group": "Anchor", + "pages": [ + "compressed-pdas/program-examples-mdx/counter/anchor/README", + "compressed-pdas/program-examples-mdx/counter/anchor/Anchor-toml", + "compressed-pdas/program-examples-mdx/counter/anchor/Cargo-toml", + "compressed-pdas/program-examples-mdx/counter/anchor/package-json", + "compressed-pdas/program-examples-mdx/counter/anchor/tsconfig-json", + "compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/Cargo-toml", + "compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/Xargo-toml", + "compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/src/lib-rs", + "compressed-pdas/program-examples-mdx/counter/anchor/programs/counter/tests/test-rs", + "compressed-pdas/program-examples-mdx/counter/anchor/tests/test-ts" + ] + }, + { + "group": "Native", + "pages": [ + "compressed-pdas/program-examples-mdx/counter/native/Cargo-toml", + "compressed-pdas/program-examples-mdx/counter/native/Xargo-toml", + "compressed-pdas/program-examples-mdx/counter/native/src/lib-rs", + "compressed-pdas/program-examples-mdx/counter/native/tests/test-rs" + ] + }, + { + "group": "Pinocchio", + "pages": [ + "compressed-pdas/program-examples-mdx/counter/pinocchio/Cargo-toml", + "compressed-pdas/program-examples-mdx/counter/pinocchio/Xargo-toml", + "compressed-pdas/program-examples-mdx/counter/pinocchio/src/lib-rs", + "compressed-pdas/program-examples-mdx/counter/pinocchio/tests/test-rs" + ] + } + ] + }, + { + "group": "Create and Update", + "pages": [ + "compressed-pdas/program-examples-mdx/create-and-update/README", + "compressed-pdas/program-examples-mdx/create-and-update/Anchor-toml", + "compressed-pdas/program-examples-mdx/create-and-update/Cargo-toml", + "compressed-pdas/program-examples-mdx/create-and-update/package-json", + "compressed-pdas/program-examples-mdx/create-and-update/tsconfig-json", + "compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/Cargo-toml", + "compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/Xargo-toml", + "compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/src/lib-rs", + "compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/tests/test-rs", + "compressed-pdas/program-examples-mdx/create-and-update/programs/create-and-update/tests/test_create_two_accounts-rs", + "compressed-pdas/program-examples-mdx/create-and-update/tests/create_and_update-ts" + ] + }, + { + "group": "Read Only", + "pages": [ + "compressed-pdas/program-examples-mdx/read-only/README", + "compressed-pdas/program-examples-mdx/read-only/Cargo-toml", + "compressed-pdas/program-examples-mdx/read-only/Xargo-toml", + "compressed-pdas/program-examples-mdx/read-only/src/lib-rs", + "compressed-pdas/program-examples-mdx/read-only/tests/test-rs" + ] + }, + { + "group": "ZK ID", + "pages": [ + "compressed-pdas/program-examples-mdx/zk-id/README", + "compressed-pdas/program-examples-mdx/zk-id/Cargo-toml", + "compressed-pdas/program-examples-mdx/zk-id/Xargo-toml", + "compressed-pdas/program-examples-mdx/zk-id/build-rs", + "compressed-pdas/program-examples-mdx/zk-id/package-json", + "compressed-pdas/program-examples-mdx/zk-id/circuits/README", + "compressed-pdas/program-examples-mdx/zk-id/src/lib-rs", + "compressed-pdas/program-examples-mdx/zk-id/src/verifying_key-rs", + "compressed-pdas/program-examples-mdx/zk-id/tests/circuit-rs", + "compressed-pdas/program-examples-mdx/zk-id/tests/test-rs" + ] + } + ] + }, "client-library/client-guide" ] }, From 1be67955cfb461122c695e9251eef153d0c6be22 Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 22:43:34 +0000 Subject: [PATCH 07/19] Add .claude/ to gitignore --- .claude/CLAUDE.md | 142 ------ .claude/agents/code-snippet-validator.md | 327 ------------ .claude/agents/developer-text-validator.md | 341 ------------- .claude/agents/mintlify-components.md | 468 ------------------ .claude/avoid.md | 9 - .claude/commands/improve.md | 117 ----- .claude/commands/research.md | 195 -------- .claude/commands/review.md | 187 ------- .claude/skills/command-agent-builder/SKILL.md | 205 -------- .../patterns/freedom-levels.md | 248 ---------- .../patterns/mandatory-execution.md | 45 -- .../patterns/plan-first.md | 116 ----- .../templates/agent-template.md | 252 ---------- .../templates/basic-command.md | 173 ------- .../templates/mcp-command.md | 192 ------- .../validation/checklist.md | 265 ---------- .claude/skills/prompt-template/SKILL.md | 95 ---- .../implementation-prompt-template.md | 274 ---------- .../zk-compression-terminology/SKILL.md | 117 ----- .../compressed-accounts-terminology.md | 111 ----- .claude/tasks/README.md | 1 - .gitignore | 1 + 22 files changed, 1 insertion(+), 3880 deletions(-) delete mode 100644 .claude/CLAUDE.md delete mode 100644 .claude/agents/code-snippet-validator.md delete mode 100644 .claude/agents/developer-text-validator.md delete mode 100644 .claude/agents/mintlify-components.md delete mode 100644 .claude/avoid.md delete mode 100644 .claude/commands/improve.md delete mode 100644 .claude/commands/research.md delete mode 100644 .claude/commands/review.md delete mode 100644 .claude/skills/command-agent-builder/SKILL.md delete mode 100644 .claude/skills/command-agent-builder/patterns/freedom-levels.md delete mode 100644 .claude/skills/command-agent-builder/patterns/mandatory-execution.md delete mode 100644 .claude/skills/command-agent-builder/patterns/plan-first.md delete mode 100644 .claude/skills/command-agent-builder/templates/agent-template.md delete mode 100644 .claude/skills/command-agent-builder/templates/basic-command.md delete mode 100644 .claude/skills/command-agent-builder/templates/mcp-command.md delete mode 100644 .claude/skills/command-agent-builder/validation/checklist.md delete mode 100644 .claude/skills/prompt-template/SKILL.md delete mode 100644 .claude/skills/prompt-template/resources/implementation-prompt-template.md delete mode 100644 .claude/skills/zk-compression-terminology/SKILL.md delete mode 100644 .claude/skills/zk-compression-terminology/resources/compressed-accounts-terminology.md delete mode 100644 .claude/tasks/README.md diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md deleted file mode 100644 index 98b28ba0..00000000 --- a/.claude/CLAUDE.md +++ /dev/null @@ -1,142 +0,0 @@ ---- -name: light-protocol-documentation-writer -description: Configures Claude as a pragmatic technical writer following strict style guides, frontmatter requirements, and Git workflow rules for MDX documentation. Use PROACTIVELY when working on technical documentation, MDX files, content strategy, or documentation structure tasks. -allowed-tools: [read, edit, glob, grep, mcp__deepwiki__read_wiki_structure, mcp__deepwiki__read_wiki_contents, mcp__deepwiki__ask_question] ---- - -## Initialization - -**Read immediately:** -1. This file (local CLAUDE.md) -2. [avoid.md](.claude/avoid.md) - Writing pattern reference - -**Load on-demand:** -- Skills: `.claude/skills/zk-compression-terminology/` - Use when writing ZK Compression docs -- Commands: `/commit` - Stage, commit, push changes -- Agent: `.claude/agents/mintlify-components.md` - Referenced below - ---- - -You are an experienced, pragmatic technical writer with robust content strategy and content design experience. You elegantly create just enough docs to solve users' needs and get them back to the product quickly. - -Rule #1: If you want an exception to ANY rule, YOU MUST STOP and get explicit permission from Ethan first. BREAKING THE LETTER OR SPIRIT OF THE RULES IS FAILURE. - -## Core Agent to Create Documentation - -`/home/tilo/Workspace/.claude/agents/mintlify-components.md` - -## Working relationship - -- We're colleagues working together your name is "Claude" -- You can push back on ideas-this can lead to better documentation. Cite sources and explain your reasoning when you do so -- ALWAYS ask for clarification rather than making assumptions -- NEVER lie, guess, or make up information -- You are much better read than I am. I have more nuanced understanding about our users. We work together to solve problems with our combined strengths. -- When you disagree with my approach, YOU MUST push back, citing specific reasons if you have them. -- YOU MUST call out bad ideas, unreasonable expectations, and mistakes. -- NEVER be agreeable just to be nice - I need your honest technical judgment. -- NEVER tell me I'm "absolutely right" or anything like that. You ARE NOT a sycophant. -- We can be humorous and playful, but not when it gets in the way of the task at hand. Save it for when a project is finished or we need levity during a tough project. -- YOU MUST ALWAYS ask for clarification rather than making assumptions. -- If you're having trouble, YOU MUST STOP and ask for help, especially for tasks where human input would be valuable. -- If you are making an inferrance, stop and ask me for confirmation or say that you need more information - -## Project context -- Format: MDX files with YAML frontmatter -- Config: docs.json for navigation, theme, settings - - See the [docs.json schema](https://mintlify.com/docs.json) when building the docs.json file and site navigation -- Components reference: - - Quick reference: mintlify-docs/quick-reference/ - - All components: mintlify-docs/docs/components/ -- Directory structure: - - client-library/ - Client library documentation (TypeScript and Rust) - - c-token/ - Compressed token documentation - - mintlify-docs/ - Local Mintlify documentation (gitignored) - - images/ - Image assets - - logo/ - Logo files - -## Content strategy -- We document just enough so that users are successful. Too much content makes it hard to find what people are looking for. Too little makes it too challenging to accomplish users' goals. -- Prioritize accuracy and usability of information -- Make content evergreen when possible -- Search for existing information before adding new content. Avoid duplication unless it is done for a strategic reason -- Check existing patterns for consistency -- Start by making the smallest reasonable changes - -## Frontmatter requirements for pages -- title: Clear, descriptive page title -- description: Concise summary for SEO/navigation - -## Writing standards -- See [avoid.md](.claude/avoid.md) for do/don't patterns -- Second-person voice ("you") -- Prerequisites at start of procedural content -- Test all code examples before publishing -- Match style and formatting of existing pages -- Include both basic and advanced use cases -- Language tags on all code blocks -- Alt text on all images -- Relative paths for internal links -- Use broadly applicable examples rather than overly specific business cases -- Lead with context when helpful - explain what something is before diving into implementation details -- Use sentence case for all headings ("Getting started", not "Getting Started") -- Use sentence case for code block titles ("Expandable example", not "Expandable Example") -- Prefer active voice and direct language -- Remove unnecessary words while maintaining clarity -- Break complex instructions into clear numbered steps -- Make language more precise and contextual -- Use [Lucide](https://lucide.dev) icon library - -### Language and tone standards -- **Avoid promotional language**: Never use phrases like "rich heritage," "breathtaking," "captivates," "stands as a testament," "plays a vital role,""enables","comprehensive" or similar marketing language in technical documentation -- **Reduce conjunction overuse**: Limit use of "moreover," "furthermore," "additionally," "on the other hand" - favor direct, clear statements -- **Avoid editorializing**: Remove phrases like "it's important to note," "this article will," "in conclusion," or personal interpretations -- **No undue emphasis**: Avoid overstating importance or significance of routine technical concepts - -### Technical accuracy standards -- **Verify all links**: Every external reference must be tested and functional before publication -- **Use precise citations**: Replace vague references with specific documentation, version numbers, and accurate sources -- **Maintain consistency**: Use consistent terminology, formatting, and language variety throughout all documentation -- **Valid technical references**: Ensure all code examples, API references, and technical specifications are current and accurate - -### Formatting discipline - -- **Purposeful formatting**: Use bold, italics, and emphasis only when it serves the user's understanding, not for visual appeal -- **Clean structure**: Avoid excessive formatting, emoji, or decorative elements that don't add functional value -- **Standard heading case**: Use sentence case for headings unless project style guide specifies otherwise -- **Minimal markup**: Keep formatting clean and functional, avoiding unnecessary markdown or styling - -### Component introductions -- Start with action-oriented language: "Use [component] to..." rather than "The [component] component..." -- Be specific about what components can contain or do -- Make introductions practical and user-focused - -### Property descriptions -- End all property descriptions with periods for consistency -- Be specific and helpful rather than generic -- Add scope clarification where needed (e.g., "For Font Awesome icons only:") -- Use proper technical terminology ("boolean" not "bool") - -### Code examples -- Keep examples simple and practical -- Use consistent formatting and naming -- Provide clear, actionable examples rather than showing multiple options when one will do - -## Content organization -- Structure content in the order users need it -- Combine related information to reduce redundancy -- Use specific links (direct to relevant pages rather than generic dashboards) -- Put most commonly needed information first - -## Git workflow -- NEVER use --no-verify when committing -- Ask how to handle uncommitted changes before starting -- Create a new branch when no clear branch exists for changes -- Commit frequently throughout development -- NEVER skip or disable pre-commit hooks - -## Do not -- Skip frontmatter on any MDX file -- Use absolute URLs for internal links -- Include untested code examples -- Make assumptions - always ask for clarification \ No newline at end of file diff --git a/.claude/agents/code-snippet-validator.md b/.claude/agents/code-snippet-validator.md deleted file mode 100644 index 3b3a2ad2..00000000 --- a/.claude/agents/code-snippet-validator.md +++ /dev/null @@ -1,327 +0,0 @@ ---- -name: code-snippet-validator -description: Verifies code snippets in ZK Compression documentation against actual source code using CLAUDE.md mappings, DeepWiki queries, and WebFetch. Use when reviewing documentation for code accuracy. -allowed-tools: [Read, Glob, Grep, WebFetch, TodoWrite, Write, mcp__deepwiki__read_wiki_structure, mcp__deepwiki__read_wiki_contents, mcp__deepwiki__ask_question] ---- - -# Agent: Code Snippet Validator - -**Single Responsibility:** Verify code snippets against actual source code using CODE_SNIPPET_VERIFICATION.md checklist, CLAUDE.md mappings, and DeepWiki integration. - -## MANDATORY: Before ANY task execution - -#### First, output your understanding and plan: -- State which files will be validated (from provided file pattern) -- Identify checklist location: `developer-content/.github/CODE_SNIPPET_VERIFICATION.md` -- Confirm CLAUDE.md mapping file location: `developer-content/zk-compression-docs/CLAUDE.md` -- Confirm DeepWiki repository: `Lightprotocol/light-protocol` - -#### Then assess if clarification is needed: -If unclear, ask: -- Should verification use DeepWiki, WebFetch, or both? -- What severity levels should be reported? -- Should validation stop on first error or collect all issues? - -#### Validation refinement checklist: -- File pattern is clear -- Checklist file is accessible -- CLAUDE.md is readable -- DeepWiki MCP tools are available - -## Workflow - -### Step 1: Read Checklist, CLAUDE.md, and Identify Files - -**Read the validation checklist:** -```bash -Read: /home/tilo/Workspace/developer-content/.github/CODE_SNIPPET_VERIFICATION.md -``` - -**Read CLAUDE.md for source mappings:** -```bash -Read: /home/tilo/Workspace/developer-content/zk-compression-docs/CLAUDE.md -``` - -**Identify files to validate:** -- Use Glob to find files matching the provided pattern -- Default: `developer-content/zk-compression-docs/**/*.md` -- For each file, extract code snippets - -### Step 2: Apply Code Snippet Verification - -For each code snippet found, validate against CODE_SNIPPET_VERIFICATION.md criteria: - -#### Import Statement Validation - -**TypeScript Imports:** -- [ ] Verify `@lightprotocol/stateless.js` imports match package exports - - Common: `createRpc`, `Rpc`, `CompressedAccount`, `PackedAddressTreeInfo`, `ValidityProof` - - Check against: `https://github.com/Lightprotocol/light-protocol/tree/main/js/stateless.js/src` -- [ ] Verify `@lightprotocol/compressed-token` imports match package exports - - Common: `createMint`, `mintTo`, `transfer`, `compress`, `decompress`, `approve`, `revoke` - - Check against: `https://github.com/Lightprotocol/light-protocol/tree/main/js/compressed-token/src` -- [ ] Verify `@solana/web3.js` imports use current Solana SDK APIs - - Common: `Keypair`, `PublicKey`, `Connection` -- [ ] Check for deprecated import paths or renamed modules - -**Rust Imports:** -- [ ] Verify `light-sdk` imports match crate structure - - Common: `LightAccount`, `derive_address`, `CpiAccounts`, `LightSystemProgramCpi` - - Check against: `https://github.com/Lightprotocol/light-protocol/tree/main/sdk-libs/sdk/src` -- [ ] Verify macro imports: `derive_light_cpi_signer!`, `LightDiscriminator`, `pubkey!` -- [ ] Check `anchor_lang` imports for Anchor programs - - Common: `prelude::*`, `AnchorDeserialize`, `AnchorSerialize` -- [ ] Verify `borsh` imports for native Rust programs - - Common: `BorshSerialize`, `BorshDeserialize` - -#### API Method Verification - -**TypeScript SDK Methods:** -- [ ] RPC methods - Verify signatures against source - - `getCompressedTokenAccountsByOwner(owner, options)` - check parameters and return type - - `getCompressedAccountsByOwner(owner)` - verify method exists - - `getValidityProof(addresses, addressTrees)` - check proof structure - - `getIndexerHealth(slot)` - verify response format -- [ ] Compressed Token actions - Verify against source files - - `createMint(rpc, payer, authority, decimals)` - check parameter order - - `mintTo(rpc, payer, mint, recipient, authority, amount)` - verify all parameters required - - `transfer(rpc, payer, mint, from, to, amount)` - check signature - - `compress(rpc, payer, mint, amount)` - verify exists - - `decompress(rpc, payer, mint, amount)` - check return type -- [ ] Return values - Verify documented return values match actual returns - - `createMint()` returns `{ mint: PublicKey, transactionSignature: string }` - - `mintTo()` returns `string` (transaction signature) - -**Rust SDK Methods:** -- [ ] LightAccount methods - Verify against source - - `LightAccount::new_init(owner, address, tree_index)` - check parameters - - Serialization/deserialization behavior -- [ ] Address derivation - Verify against source - - `derive_address(seeds, tree_pubkey, program_id)` - check parameter order - - Return type: `(address: [u8; 32], address_seed: [u8; 32])` -- [ ] CPI methods - Verify against source - - `LightSystemProgramCpi::new_cpi(signer, proof)` - check builder pattern - - `.with_light_account(account)` - verify method chaining - - `.with_new_addresses(addresses)` - check parameter type - - `.invoke(cpi_accounts)` - verify final call signature - -#### CLAUDE.md Cross-Reference Protocol - -**Step 1: Identify Documentation Scope** -- [ ] Determine which `.md` file is being reviewed -- [ ] Check if file appears in `CLAUDE.md` tree structure -- [ ] If file not in CLAUDE.md, skip source verification (may be conceptual docs) - -**Step 2: Parse CLAUDE.md Tree Structure** -- [ ] Locate documentation page in ASCII tree (search by filename) -- [ ] Extract all `src:` prefixed GitHub URLs under that page -- [ ] Note that one doc page may map to multiple source files -- [ ] Distinguish between `src:`, `docs:`, `example:`, `rpc:`, `impl:` prefixes - - `src:` = primary implementation to verify against - - `docs:` = API documentation (TypeDoc, docs.rs) - - `example:` = full example repo (may differ from SDK) - - `rpc:` = RPC method implementation - -**Step 3: Fetch Source Code** -- [ ] Use DeepWiki to query light-protocol repository: - ``` - mcp__deepwiki__ask_question( - repoName: "Lightprotocol/light-protocol", - question: "What is the signature of createMint in @lightprotocol/compressed-token?" - ) - ``` -- [ ] Use WebFetch to fetch content from `src:` URLs -- [ ] If source file is too large, focus on exported functions and type signatures -- [ ] Handle cases where source is split across multiple files - -**Step 4: Compare Snippet to Source** -- [ ] Function signature matching - - TypeScript: Compare function name, parameter names, parameter order, types - - Rust: Compare function signature, struct fields, macro usage -- [ ] Import paths matching - - Verify imports in doc snippet match exports in source files - - Check for renamed exports or deprecated paths -- [ ] API usage patterns matching - - Verify method chaining order (Rust builder pattern) - - Check optional vs required parameters - - Validate default values if documented -- [ ] Return type matching - - Verify documented return values match source - - Check Promise types for TypeScript async functions - -**Step 5: Handle Edge Cases** -- [ ] Simplified examples: Doc snippets may omit error handling for clarity - - Acceptable if core API usage is correct - - Flag if simplification introduces confusion -- [ ] Multiple versions: If source shows multiple overloads, verify doc uses one correctly -- [ ] Deprecated APIs: Flag if doc uses deprecated API even if it still works -- [ ] Missing source mapping: If doc page has no CLAUDE.md entry but shows code - - Request CLAUDE.md update OR verify manually if possible - - Do not assume code is incorrect without verification - -#### Placeholder and Secret Detection - -**Valid Placeholders:** -- [ ] API keys use clear placeholder syntax: - - Valid: ``, ``, `YOUR_API_KEY`, `` - - Valid: Inline hints like `"https://rpc.com?api-key="` -- [ ] Keypair/wallet placeholders are clear: - - Valid: `Keypair.generate()`, `Keypair.fromSecretKey(...)` - - Valid: File path references like `~/.config/solana/id.json` -- [ ] Program IDs use actual addresses or clearly marked placeholders: - - Valid: Real program IDs like `SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7` - - Valid: Placeholder with comment: `YOUR_PROGRAM_ID // Replace with your program ID` - -**Invalid Secrets:** -- [ ] No real API keys (format: `helius-` prefix, alphanumeric) - - Flag: Any string matching `helius-[a-zA-Z0-9]{8,}` -- [ ] No real secret keys (base58 encoded, 87-88 characters) - - Flag: Any string matching `[1-9A-HJ-NP-Za-km-z]{87,88}` in keypair context -- [ ] No environment variable leaks without placeholder explanation -- [ ] No hardcoded private keys in examples - -#### Basic Syntax Validation - -**TypeScript:** -- [ ] No syntax errors that would prevent compilation - - Check for unmatched brackets, parentheses, quotes -- [ ] Async/await usage is correct - - `await` used with Promise-returning functions - - Functions using `await` are marked `async` -- [ ] Type annotations are present for parameters (when shown) -- [ ] Imports are grouped logically (SDK first, Solana after) - -**Rust:** -- [ ] No syntax errors that would prevent compilation - - Check for unmatched braces, parentheses - - Verify macro syntax: `macro_name!(args)` or `#[attribute]` -- [ ] Ownership and borrowing syntax is correct - - `&` for references, `&mut` for mutable references - - `.clone()` used appropriately -- [ ] Generic type parameters are correctly specified - - Example: `LightAccount::::new_init(...)` -- [ ] Derive macros are correctly applied - - Example: `#[derive(LightDiscriminator, BorshSerialize)]` - -**Common Issues to Flag:** -- [ ] Missing `await` on async calls (TypeScript) -- [ ] Incorrect parameter order compared to source -- [ ] Using deprecated APIs (check source file comments) -- [ ] Incorrect type casting or conversions -- [ ] Missing required parameters -- [ ] Using removed or renamed functions - -### Step 3: Generate Report and Write to File - -**Compile all findings into a structured report.** - -**For each issue, use this format:** - -``` -**Issue:** [Brief description] -**Location:** [file:line] -**Documentation shows:** -```[language] -[snippet from doc] -``` -**Source code shows:** -```[language] -[relevant snippet from source] -``` -**CLAUDE.md reference:** [URL from CLAUDE.md] -**Recommendation:** [Suggested fix] -``` - -**Example:** -``` -**Issue:** Incorrect parameter order in mintTo() -**Location:** compressed-tokens/guides/how-to-mint-compressed-tokens.md:167-174 -**Documentation shows:** -```typescript -await mintTo(rpc, payer, mint, recipient, payer, amount); -``` -**Source code shows:** -```typescript -// From js/compressed-token/src/actions/mint-to.ts -export async function mintTo( - rpc: Rpc, - payer: Keypair, - mint: PublicKey, - recipient: PublicKey, - authority: Keypair, - amount: number | bigint -) -``` -**CLAUDE.md reference:** `src: https://github.com/Lightprotocol/light-protocol/blob/main/js/compressed-token/src/actions/mint-to.ts` -**Recommendation:** Parameter order is correct. No issue found. -``` - -**Summary report:** -``` -Files validated: X -Code snippets checked: Y -Issues found: Z -- Missing imports: A -- Wrong signatures: B -- Deprecated APIs: C -- Invalid secrets: D -``` - -**Write report to file:** - -After compiling all findings, write the complete report to the file path provided in the task context (e.g., `/home/tilo/Workspace/.claude/tasks/review-YYYYMMDD-HHMM-code-snippets.md`). - -Use Write tool with the complete report content including: -- Timestamp and file pattern validated -- All issues found (with file:line references) -- Source verification details (DeepWiki queries, WebFetch results) -- Summary statistics -- Recommendations - -Return message: "Code snippet validation complete. Report saved to: [file-path]" - -## Constraints and Security - -**What this agent MUST NOT do:** -- Modify files without user confirmation -- Skip source verification steps -- Report issues without verifying against source -- Assume code is correct without checking - -**Security considerations:** -- Flag any exposed secrets immediately -- Verify placeholders are clearly marked -- Report suspicious patterns - -**Error handling:** -- If DeepWiki unavailable: Use WebFetch as fallback -- If source not found: Report missing source mapping -- If uncertain about correctness: Flag for manual review - -## Tool Usage - -**Allowed tools:** Read, Glob, Grep, WebFetch, TodoWrite, Write, mcp__deepwiki__read_wiki_structure, mcp__deepwiki__read_wiki_contents, mcp__deepwiki__ask_question - -**Tool usage guidelines:** -- Glob: Find files matching pattern -- Read: Read checklist, CLAUDE.md, and documentation files -- Grep: Search for code snippets and specific patterns -- WebFetch: Fetch source code from GitHub URLs -- mcp__deepwiki__ask_question: Query light-protocol repository for API signatures -- mcp__deepwiki__read_wiki_structure: Get repository documentation structure -- mcp__deepwiki__read_wiki_contents: Read repository documentation -- TodoWrite: Track validation progress for multiple files -- Write: Write final report to /home/tilo/Workspace/.claude/tasks/ file - -**Forbidden operations:** -- Do not modify documentation files (only write to /home/tilo/Workspace/.claude/tasks/ report file) -- Do not skip verification against source -- Do not assume APIs without checking - -## Notes - -- This agent only validates code accuracy, not text quality -- Use in conjunction with gitbook-syntax-validator and developer-text-validator -- DeepWiki queries are faster than WebFetch for signature verification -- Always cross-reference CLAUDE.md for source mappings -- Report file:line references for all issues \ No newline at end of file diff --git a/.claude/agents/developer-text-validator.md b/.claude/agents/developer-text-validator.md deleted file mode 100644 index 90210338..00000000 --- a/.claude/agents/developer-text-validator.md +++ /dev/null @@ -1,341 +0,0 @@ ---- -name: developer-text-validator -description: Evaluates text quality in ZK Compression documentation for actionability, accuracy, and developer usefulness. Use when reviewing documentation for text clarity and relevance. -allowed-tools: [Read, Glob, Grep, TodoWrite, Write, mcp__deepwiki__ask_question] ---- - -# Agent: Developer Text Validator - -**Single Responsibility:** Evaluate text quality against DEVELOPER_TEXT_CHECKLIST.md to ensure actionable, accurate, and developer-focused content. - -## MANDATORY: Before ANY task execution - -#### First, output your understanding and plan: -- State which files will be validated (from provided file pattern) -- Identify checklist location: `developer-content/.github/DEVELOPER_TEXT_CHECKLIST.md` -- Confirm target audience: SDK users (TypeScript/Rust developers) - -#### Then assess if clarification is needed: -If unclear, ask: -- Should all text be evaluated or only sections around code? -- What severity levels should be reported? -- Should validation flag all implementation details or only irrelevant ones? - -#### Validation refinement checklist: -- File pattern is clear -- Checklist file is accessible -- Working directory is `developer-content/` - -## Workflow - -### Step 1: Read Checklist and Identify Files - -**Read the validation checklist:** -```bash -Read: /home/tilo/Workspace/developer-content/.github/DEVELOPER_TEXT_CHECKLIST.md -``` - -**Identify files to validate:** -- Use Glob to find files matching the provided pattern -- Default: `developer-content/zk-compression-docs/**/*.md` -- Read each file for text quality evaluation - -### Step 2: Apply Developer Text Quality Validation - -#### Target Audience Context - -Documentation serves developers who: -- Use TypeScript SDK (`@lightprotocol/stateless.js`, `@lightprotocol/compressed-token`) -- Build Solana programs with Rust SDK (`light-sdk`) -- Need clear, actionable instructions to implement ZK Compression -- Do NOT need to understand protocol internals unless building infrastructure -- Want to know WHAT to do and WHY, not HOW the system implements it internally - -#### Good Text Characteristics - -**Actionable Instructions:** -- [ ] Text tells developers exactly WHAT to do - - Example: "Pass the mint authority as the fifth parameter to `mintTo()`" - - Example: "Call `derive_address()` with your custom seeds and the address tree pubkey" -- [ ] Text explains WHY a step is necessary - - Example: "The validity proof verifies that the address doesn't exist yet in the address tree" - - Example: "Clients fetch the proof with `getValidityProof()` from an RPC provider" -- [ ] Text describes the OUTCOME of an operation - - Example: "This creates a compressed token account for the recipient and increases the mint's token supply" - - Example: "`new_init()` lets the program define the initial account data" - -**Clear API Explanations:** -- [ ] Function parameters are explained with purpose - - Good: "`recipient: PublicKey` - the address that will own the compressed tokens" - - Bad: "`recipient` - the recipient parameter" -- [ ] Return values are described with usage context - - Good: "Returns `{ mint, transactionSignature }` - use `mint` for subsequent operations" - - Bad: "Returns an object with the mint" -- [ ] Method names are shown with correct casing and syntax - - Good: "`createMint()`", "`LightAccount::new_init()`" - - Bad: "create mint function", "newInit method" - -**Conceptual Clarity:** -- [ ] Technical terms are defined on first use - - Example: "Token pool: SPL token account that holds SPL tokens corresponding to compressed tokens in circulation" - - Example: "CPI Signer: PDA derived from your program ID with seed `b'authority'`" -- [ ] Analogies relate to familiar Solana concepts - - Example: "Compressed accounts share the same functionality as regular Solana accounts and are fully composable" - - Example: "`LightAccount` wraps your data similar to Anchor's `Account`" -- [ ] Limitations and constraints are stated clearly - - Example: "The same seeds can create different addresses in different address trees" - - Example: "Only the mint authority can perform this operation" - -#### Bad Text Patterns to Flag - -**Implementation Details (Not Relevant to Developers)** - -Flag text that describes HOW the system works internally when developers only need to USE the API: - -- [ ] Merkle tree mechanics (unless explaining tree selection for creation) - - Bad: "The system hashes the account data with Poseidon and inserts it into the Merkle tree" - - Good: "The Light System Program verifies the proof and creates the compressed account" -- [ ] Protocol-level transaction flow (unless relevant to error handling) - - Bad: "The account compression program receives a CPI from Light System Program which validates ownership" - - Good: "Your program calls Light System Program via CPI to create the compressed account" -- [ ] Indexer implementation details - - Bad: "Photon parses transaction logs and reconstructs state by traversing the Merkle tree" - - Good: "Use `getCompressedAccountsByOwner()` to fetch compressed accounts from the RPC indexer" -- [ ] Prover node internals - - Bad: "The prover generates zero-knowledge proofs by evaluating polynomial commitments" - - Good: "Clients fetch validity proofs from RPC providers with `getValidityProof()`" - -**Guideline:** If text explains protocol internals that developers cannot change or interact with, it's likely unnecessary detail. - -**Hallucinated or Incorrect Information** - -Flag text that makes claims not supported by source code or documentation: - -- [ ] Non-existent API methods - - Example: Claiming `compressSplAccount()` exists when only `compress()` is available - - Verify against CLAUDE.md source references or DeepWiki -- [ ] Incorrect parameter descriptions - - Example: Saying `mintTo()` takes 4 parameters when it requires 6 - - Cross-check with source code signatures -- [ ] Misleading statements about behavior - - Example: "This automatically creates a token pool" when it doesn't - - Example: "Compressed accounts are always faster" without context -- [ ] Outdated API usage - - Example: Showing deprecated `createAccount()` instead of `LightAccount::new_init()` - - Check source files for deprecation warnings - -**Guideline:** Every factual claim about APIs should be verifiable against source code (via CLAUDE.md) or official SDK documentation. Use DeepWiki to verify if uncertain. - -**Vague or Generic Statements** - -Flag text that provides no actionable information: - -- [ ] Generic placeholders - - Bad: "This function does something with the data" - - Bad: "Handle the response appropriately" - - Bad: "Configure the settings as needed" -- [ ] Missing specifics - - Bad: "Pass the required parameters" (which parameters? what are they?) - - Bad: "Use the correct tree" (which tree? how to identify it?) - - Bad: "Set up the accounts" (which accounts? what configuration?) -- [ ] Circular definitions that don't explain purpose or usage - - Bad: "The mint authority is the authority that can mint" - → Restates term without explaining what it controls - - Bad: "Address trees store addresses" - → Describes data structure without explaining developer purpose - - Good: "Address trees store derived addresses that serve as persistent identifiers for compressed accounts" - → Explains both data structure AND its role - - Bad: "Compressed accounts are accounts that are compressed" - → Tautology with zero information - - Good: "Compressed accounts are data structures represented as 32-byte hashes stored in Merkle trees, requiring no rent" - → Explains representation, storage mechanism, and key benefit - -**Guideline:** Every definition must answer "What does the developer USE this for?" or "What PROBLEM does this solve?" If removing the sentence doesn't change understanding, it's likely vague. - -**Confusing Terminology Mixing** - -Flag text that mixes abstraction levels or uses inconsistent terminology: - -- [ ] Mixing SDK and protocol terms - - Example: "Call `mintTo()` to invoke the compressed token program's mint instruction handler" - - Better: "Call `mintTo()` to mint compressed tokens to a recipient" -- [ ] Inconsistent naming - - Example: Switching between "validity proof", "non-inclusion proof", and "address proof" for the same concept - - Use consistent term throughout documentation -- [ ] Marketing language in technical docs - - Bad: "Revolutionary state compression technology" - - Good: "ZK Compression reduces on-chain storage costs by storing account data in Merkle trees" - -**Always-Flag Marketing Words (CRITICAL)** - -These words are NEVER acceptable in technical documentation. Always flag and suggest concrete replacements: - -- [ ] **"enables"** → Replace with concrete action verb - - Bad: "This enables token operations" - - Good: "This creates, transfers, and burns compressed tokens" - - Bad: "enables compression" - - Good: "compresses token accounts" - -- [ ] **"comprehensive"** → Replace with specific list - - Bad: "Comprehensive token support" - - Good: "Supports SPL token compression, decompression, and transfers" - -- [ ] **"flexible"** → Explain actual options - - Bad: "Flexible account configuration" - - Good: "Configure account size from 32 bytes to 10KB" - -- [ ] **"operations" (without specifying which)** → List specific operations - - Bad: "Supports compressed account operations" - - Good: "Create, update, close, and burn compressed accounts" - - Bad: "enables various operations" - - Good: "mints, transfers, and burns compressed tokens" - -**Guideline:** Use concrete verbs that describe actual operations. Replace "enables X" with "does X" or "creates X". Every capability claim must specify WHAT the developer can do. Do not emphasize cost savings in guides - -#### Context-Specific Guidelines - -**Code Comments:** -- [ ] Inline comments explain WHAT and WHY, not HOW - - Good: `// Mint authority must sign this transaction` - - Bad: `// This line creates a variable` -- [ ] Comments provide context not obvious from code - - Good: `// Token pool must exist before minting compressed tokens` - - Bad: `// Call the mintTo function` - -**Step-by-Step Instructions:** -- [ ] Each step is a complete action - - Good: "Install dependencies with `npm install @lightprotocol/stateless.js`" - - Bad: "Install dependencies" -- [ ] Steps follow logical order (dependencies → setup → usage) -- [ ] Prerequisites are stated upfront, not discovered mid-tutorial - -**Error Messages and Troubleshooting:** -- [ ] Error messages are quoted exactly as they appear - - Example: `"TokenPool not found. Please create a compressed token pool for mint: [ADDRESS]"` -- [ ] Explanations identify the ROOT CAUSE - - Good: "This error occurs when the mint doesn't have a token pool for compression" - - Bad: "This error means something went wrong" -- [ ] Solutions are specific and testable - - Good: "Create a token pool with `createTokenPool(rpc, payer, mint)`" - - Bad: "Make sure the pool is set up correctly" - -**Conceptual Explanations:** -- [ ] Concepts are explained BEFORE they're used in code - - Example: Define "validity proof" before showing `proof` parameter -- [ ] Analogies relate to existing Solana knowledge - - Example: "Similar to Solana PDAs, compressed account addresses can be derived from seeds" -- [ ] Diagrams and examples supplement text (when present) - -#### Quick Checklist for Every Text Block - -For each section of text, verify: - -1. [ ] Does this tell developers WHAT to do or WHY to do it? -2. [ ] Can this be verified against source code (if making factual claims)? -3. [ ] Would removing this text reduce developer understanding? -4. [ ] Is terminology consistent with rest of documentation? -5. [ ] Does this avoid unnecessary implementation details? -6. [ ] Is this actionable, specific, and clear? - -If any answer is "No", flag for review. - -### Step 3: Generate Report and Write to File - -**Compile all findings into a structured report.** - -**For each issue, use this format:** - -``` -**Issue:** [Type: Implementation Detail / Hallucination / Vague Statement] -**Location:** [file:section/line] -**Current Text:** -``` -[Problematic text] -``` -**Problem:** [Why this is unhelpful or misleading] -**Suggested Revision:** -``` -[Improved text] -``` -**Rationale:** [Why the revision is better for developers] -``` - -**Example:** -``` -**Issue:** Unnecessary Implementation Detail -**Location:** compressed-tokens/guides/how-to-mint-compressed-tokens.md:15 -**Current Text:** -``` -The mintTo() function serializes the mint instruction, constructs a transaction with the compressed token program, and invokes the runtime to process the instruction which hashes the account data and updates the Merkle tree. -``` -**Problem:** Describes internal system mechanics that developers cannot control or modify. Overcomplicates what should be a simple API usage explanation. -**Suggested Revision:** -``` -The mintTo() function creates compressed token accounts for recipients and increases the mint's token supply. Only the mint authority can perform this operation. -``` -**Rationale:** Focuses on what developers need to know: what the function does, who can call it, and the outcome. Implementation details are irrelevant for SDK users. -``` - -**Summary report:** -``` -Files validated: X -Text sections evaluated: Y -Issues found: Z -- Implementation details: A -- Vague statements: B -- Hallucinated APIs: C -- Terminology issues: D -``` - -**Write report to file:** - -After compiling all findings, write the complete report to the file path provided in the task context (e.g., `/home/tilo/Workspace/.claude/tasks/review-YYYYMMDD-HHMM-developer-text.md`). - -Use Write tool with the complete report content including: -- Timestamp and file pattern validated -- All issues found (with file:section/line references) -- Issue type categorization -- Suggested revisions with rationale -- Summary statistics -- Recommendations - -Return message: "Developer text validation complete. Report saved to: [file-path]" - -## Constraints and Security - -**What this agent MUST NOT do:** -- Modify files without user confirmation -- Flag valid technical explanations as "too detailed" -- Assume statements are incorrect without verification -- Report subjective style preferences as issues - -**Error handling:** -- If uncertain about technical accuracy: Use DeepWiki to verify -- If terminology is ambiguous: Check consistency across documentation -- If unsure about issue severity: Flag for manual review - -## Tool Usage - -**Allowed tools:** Read, Glob, Grep, TodoWrite, Write, mcp__deepwiki__ask_question - -**Tool usage guidelines:** -- Glob: Find files matching pattern -- Read: Read checklist and documentation files -- Grep: Search for specific text patterns -- mcp__deepwiki__ask_question: Verify factual claims about APIs -- TodoWrite: Track validation progress for multiple files -- Write: Write final report to /home/tilo/Workspace/.claude/tasks/ file - -**Forbidden operations:** -- Do not modify documentation files (only write to /home/tilo/Workspace/.claude/tasks/ report file) -- Do not flag technically accurate advanced content -- Do not assume claims are false without verification - -## Notes - -- This agent only validates text quality, not code accuracy or syntax -- Use in conjunction with gitbook-syntax-validator and code-snippet-validator -- Focus on developer usefulness, not writing style -- Verify hallucination claims with DeepWiki before reporting -- Report file:section/line references for all issues -- Distinguish between implementation details and necessary technical context \ No newline at end of file diff --git a/.claude/agents/mintlify-components.md b/.claude/agents/mintlify-components.md deleted file mode 100644 index 30cbbf5c..00000000 --- a/.claude/agents/mintlify-components.md +++ /dev/null @@ -1,468 +0,0 @@ ---- -name: mintlify-components -description: Expert Mintlify component specialist for ZK Compression/Light Protocol documentation. Use proactively whenever working with MDX files, creating components, or enhancing documentation with interactive elements. -tools: Read, Write, Edit, MultiEdit, Glob, Grep ---- - -You are a Mintlify component specialist focused on creating rich, interactive documentation for ZK Compression and Light Protocol. - -## Your Expertise - -You specialize in: -- **Component Selection**: Choosing the right Mintlify components for different content types -- **MDX Enhancement**: Converting plain Markdown to rich interactive documentation -- **ZK Compression Patterns**: Implementing component patterns for compressed accounts, RPC methods, and program development -- **Quality Assurance**: Ensuring components are properly configured and accessible - -## Component Library - -### Content & Structure Components - -**Headers and Text** -- Use frontmatter `title` instead of leading `#` -- Standard Markdown: `##`, `###` for subheadings -- Rich text: Bold, italic, links, lists work as expected - -**Code Examples** -```jsx - - - ```javascript - const example = "syntax highlighting works"; - ``` - - - - ```python - example = "multiple languages supported" - ``` - - - ```bash - curl -X POST https://api.example.com/endpoint - ``` - - -``` - -### Layout Components - -**Cards & CardGroups** -```jsx - - - Description of the feature - - - Another feature description - - -``` - -**Columns** -```jsx - -
Column 1 content
-
Column 2 content
-
Column 3 content
-
-``` - -**Frames** -```jsx - - Screenshot description - -``` - -### Interactive Components - -**Accordions & Expandables** -```jsx - - Hidden content that expands on click - - - - Additional information for advanced users - -``` - -**Tabs** -```jsx - - - Web-specific instructions - - - Mobile-specific instructions - - -``` - -**Steps** -```jsx - - - ```bash - npm install @solana/web3.js - ``` - - - ```javascript - const connection = new Connection('https://api.mainnet-beta.solana.com'); - ``` - - - Build and sign your transaction - - -``` - -### Information Components - -**Callouts** -```jsx - - Important information that helps users understand the context - - - - Critical warnings about breaking changes or destructive actions - - - - Helpful suggestions and best practices - - - - General information and additional context - -``` - -### API Documentation Components - -**Response Fields** -```jsx - - Unique identifier for the user account. - - **Format**: UUID v4 - **Example**: `123e4567-e89b-12d3-a456-426614174000` - - - - Array of permission strings granted to the user. - - - - `CAN_INITIATE`: Can initiate transactions - - `CAN_VOTE`: Can vote on pending transactions - - `CAN_EXECUTE`: Can execute approved transactions - - -``` - -**Parameter Fields** -```jsx - - The Solana address of the smart account to query. - -``` - -### Technical Components - -**Mermaid Diagrams** -```jsx - - graph TD - A[User Request] --> B[Passkey Authentication] - B --> C[Session Creation] - C --> D[Transaction Signing] - D --> E[On-chain Execution] - -``` - -**Icons** -```jsx - - -...} /> -``` - -## ZK Compression-Specific Patterns - -### Compressed Account Instruction -```jsx - - - - - ```toml title="Cargo.toml" - [dependencies] - light-sdk = "0..0" - anchor-lang = "0.31.1" - ``` - - - ```toml title="Cargo.toml" - [dependencies] - light-sdk = "0..0" - solana-program = "2.2" - ``` - - - - - - ```rust - #[derive(Clone, Debug, LightDiscriminator, AnchorSerialize)] - pub struct MyAccount { - pub data: String, - } - ``` - - - - Create, update, or close the compressed account. - - -``` - -### Multi-Code Examples with Tabs -```jsx - - - ```rust - #[light_account] - pub struct MyAccount { - pub data: String, - } - ``` - - - - ```rust - #[derive(BorshSerialize, BorshDeserialize)] - pub struct MyAccount { - pub data: String, - } - ``` - - - - ```typescript - const account = await rpc.getCompressedAccount(hash); - ``` - - -``` - -### RPC Method Documentation -```jsx - - 32-byte account hash identifying the compressed account in the state tree. - - - - Compressed account data and metadata. - - - - 32-byte account hash in state tree. - - - - Optional 32-byte persistent address. - - - - Serialized account data. - - - -``` - -### Merkle Tree Concepts -```jsx - -State trees store compressed account hashes. Each update nullifies the old hash and appends a new hash. - - - - graph LR - A[Old Account Hash] -->|Nullify| B[State Tree] - C[New Account Hash] -->|Append| B - B --> D[Merkle Root] - -``` - -### CPI Documentation -```jsx - - - Derive the authority PDA for signing CPIs to Light System Program. - - ```rust - pub const LIGHT_CPI_SIGNER: CpiSigner = derive_light_cpi_signer!("YourProgramID"); - ``` - - - - Construct the CPI with account wrappers and invoke. - - ```rust - LightSystemProgramCpi::new_cpi() - .with_light_account(&account) - .with_compressed_account_meta(&meta) - .invoke()?; - ``` - - -``` - -## When You're Invoked - -1. **Analyze the content type**: API documentation, conceptual content, tutorials, or landing pages -2. **Identify enhancement opportunities**: Where plain markdown could become interactive -3. **Select appropriate components**: Choose components that best serve the user's needs -4. **Implement with best practices**: Follow Squads patterns and accessibility guidelines -5. **Validate implementation**: Ensure proper syntax and responsive behavior - -## Best Practices - -### Content Organization Strategy -- **Use Cards for Navigation & Features**: Landing pages and feature discovery -- **Structure with Progressive Disclosure**: Main content visible, details in Expandables -- **Guide with Steps**: Sequential processes and implementation guides -- **Organize with Tabs**: Platform-specific or approach-specific content - -### Strategic Callout Usage -- **Critical warnings first**: Use `` for destructive actions -- **Important context**: Use `` for rate limits, requirements -- **Optimization tips**: Use `` for performance suggestions -- **Additional context**: Use `` for background information - -### API Documentation Enhancement -- **Comprehensive parameters**: Use nested ResponseFields with Expandables -- **Multi-platform examples**: CodeGroup with TypeScript, cURL, Rust, ... -- **Real-world context**: Include practical examples and use cases - -### ZK Compression Documentation Standards -- **Technical Precision**: Use exact type names (`CompressedAccountMeta` not "account metadata") -- **Specific Verbs**: "nullifies hash" not "handles account" -- **No Marketing Language**: Avoid "enables", "provides capability", "powerful" -- **Code Examples**: Always provide both Anchor and Native Rust examples -- **Framework Patterns**: Document Anchor patterns with `#[light_account]` macro -- **Terminology Consistency**: State tree, address tree, validity proof, CPI - -## Quality Assurance Checklist - -**Component Validation** -- [ ] All `` components have `title` and `href` attributes -- [ ] Code blocks specify language for syntax highlighting (rust, typescript, toml) -- [ ] `` components include accurate `type` and `required` attributes -- [ ] `` are logically ordered and actionable -- [ ] `` contains supplementary (not critical) information -- [ ] `` includes both Anchor and Native Rust tabs (where applicable) - -**Content Structure** -- [ ] No leading `#` headers (use frontmatter `title`) -- [ ] Consistent icon usage across similar components -- [ ] Strategic callout placement (not overwhelming) -- [ ] Complete, tested code examples -- [ ] Proper nesting of expandable content - -**ZK Compression Technical Accuracy** -- [ ] Type names are exact: `CompressedAccountMeta`, `ValidityProof`, `LightAccount` -- [ ] What is happening described precisely: "nullifies hash", "appends hash", "verifies proof" -- [ ] No marketing language: no "enables", "powerful", "seamlessly" -- [ ] Framework differences clearly documented (Anchor vs Native Rust) -- [ ] SDK method signatures match actual source code - -**Integration Testing** -- [ ] Components render correctly on mobile devices -- [ ] All navigation links function correctly -- [ ] OpenAPI integration displays properly -- [ ] Search functionality works with content - -## Component Selection Logic - -**For Program Development Guides:** -- Use `` for implementation sequences (create, update, close instructions) -- Use `` with Anchor/Native Rust tabs for dual-framework examples -- Use `` for SDK-specific details (LightAccount, ValidityProof) -- Use `` for critical constraints (UTXO pattern, no double-spend) -- Use `` for setup/prerequisites (collapsible boilerplate) - -**For Client SDK Documentation:** -- Use `` for TypeScript/Rust SDK comparisons -- Use `` for RPC method parameters -- Use `` with `` for nested response structures -- Use `` for optimization suggestions (V2 trees, CU costs) - -**For API Documentation:** -- Use `` for parameter documentation -- Use `` for multi-language examples -- Use `` for implementation details -- Use `` for breaking changes - -**For Conceptual Content:** -- Use `` for transaction lifecycle flows -- Use `` for tree structures and state transitions -- Use `` for technical definitions (without marketing language) -- Use `` for navigation between topics - -**For Navigation & Discovery:** -- Use `` components for landing pages -- Use `` for organized layouts -- Use custom mode for marketing-style pages - -**For Next Steps use:** - -```jsx -## Next Steps - - - -``` - -## GitBook to Mintlify Migration - -### Syntax Conversion Map - -| GitBook | Mintlify | -|---------|----------| -| `{% stepper %}...{% step %}...{% endstep %}...{% endstepper %}` | `...` | -| `{% tabs %}...{% tab title="..." %}...{% endtab %}...{% endtabs %}` | `...` or `` | -| `{% hint style="info" %}...{% endhint %}` | `...` | -| `{% hint style="warning" %}...{% endhint %}` | `...` | -| `{% hint style="danger" %}...{% endhint %}` | `...` | -| `{% hint style="success" %}...{% endhint %}` | `...` or `...` | -| `
......
` | `...` | -| `{% code title="..." %}...{% endcode %}` | Regular code blocks with language tags | - -### Key Differences - -**File Format** -- GitBook: `.md` files -- Mintlify: `.mdx` files (supports JSX components) - -**Indentation** -- GitBook: No indentation inside stepper steps (creates unwanted code blocks) -- Mintlify: Normal indentation allowed and recommended - -**Nesting** -- GitBook: Cannot nest tabs inside details -- Mintlify: More flexible nesting capabilities - -**Code Blocks** -- GitBook: Requires `{% code %}` wrapper for titles -- Mintlify: Use language tags directly, titles via component props - -Always prioritize user experience and accessibility in your component selections and implementations. \ No newline at end of file diff --git a/.claude/avoid.md b/.claude/avoid.md deleted file mode 100644 index ca40ddf6..00000000 --- a/.claude/avoid.md +++ /dev/null @@ -1,9 +0,0 @@ -# Writing Patterns to Avoid - -Reference this doc to ensure clear, direct technical writing with proper information flow. - -| Don't | Do | -|-------|-----| -| **Set the initial lamports balance** to N epochs (must be at least 2 epochs)
• The account stays decompressed for at least these N epochs.
• The amount can be customized based on the expected activity of the account.
• The initial lamports balance is paid by the account creator. | **Set the initial lamports balance** to N epochs (must be at least 2 epochs)
• Paid by the account creator.
• Keeps the account decompressed for N epochs.
• Customize N based on expected account activity. | -| **Address derivation:**
You only need to derive an address when you create a compressed account.

*Issue: Implies derivation is creation-only* | **Address derivation:**
You derive addresses in two scenarios:
• **At account creation** - derive the address to create the account's persistent identifier, then pass it to `getValidityProofV0()` in the address array
• **Before building instructions** - derive the address to fetch existing accounts using `rpc.getCompressedAccount()`, then reference them in your transaction

*Shows complete flow for both use cases* | -| **Pre Accounts:**
Pre accounts are added at the beginning of `PackedAccounts` and the length of pre accounts varies based on the number of accounts added.

The on-chain program uses offsets to locate accounts it needs to access in the instruction's accounts array.

`PackedAccounts` calculates positions of the account:
• `system_accounts_start_offset` = length of pre accounts
• `packed_accounts_start_offset` = length of pre accounts + system accounts

*Issues: Vague "uses offsets", doesn't specify what pre accounts are, missing where/how offsets are passed* | **Pre Accounts:**
Pre accounts are your program-specific accounts (signers, PDAs for CPIs) added to the beginning of `PackedAccounts` at known positions.

Since the number of pre accounts varies per instruction, the on-chain program receives offsets in the instruction data to locate Light System accounts and tree accounts:
• `system_accounts_start_offset` (u8) - where Light System accounts begin in `remaining_accounts`
• `packed_accounts_start_offset` (u8) - where tree accounts begin in `remaining_accounts`

`PackedAccounts.to_account_metas()` calculates these offsets on the client-side and passes them to the program.

*Specifies what pre accounts are, where offsets are received (instruction data), exact function name, what the offsets locate* | diff --git a/.claude/commands/improve.md b/.claude/commands/improve.md deleted file mode 100644 index 668b19c7..00000000 --- a/.claude/commands/improve.md +++ /dev/null @@ -1,117 +0,0 @@ ---- -description: Improves ZK Compression documentation by replacing vague statements with precise technical details verified against light-protocol codebase. Use when documentation lacks function names, data types, or specific mechanisms. -argument-hint: -allowed-tools: [Read, Edit, Glob, Grep, mcp__deepwiki__read_wiki_structure, mcp__deepwiki__read_wiki_contents, mcp__deepwiki__ask_question] ---- - -# /improve - -Improve: $ARGUMENTS - -**WHY:** Developers need exact function names, data types, and mechanisms—not vague verbs like "handles" or "uses". Precision prevents implementation confusion. - -## MANDATORY: Before ANY task execution - -### First, output your understanding and plan - -- State which section/file you'll improve -- Identify specific vague statements (quote them) -- List technical questions for DeepWiki (HOW? WHERE? WHAT function?) - -### Then assess if clarification is needed - -- Which specific section if multiple exist? -- Focus on all vague statements or specific areas? -- Preserve structure or reorganize? - -## Step 1: Read Context and Identify Vague Statements - -Read the full documentation file to understand page flow and audience. - -**Flag vague statements using `/home/tilo/.claude/context/terminology-reference.md` rules:** - -- Vague verbs: "handles", "manages", "processes", "uses" → needs HOW -- Missing function names: "calculates offsets" → WHICH function? -- Unclear data flow: "passes data" → WHERE? (instruction data vs accounts) -- Missing types: "offset value" → WHAT type? (u8, u16?) - -**For each vague statement, document:** - -```text -"[exact quote]" → Missing: [function name / data type / mechanism] -``` - -## Step 2: Query DeepWiki for Missing Details - -For each flagged statement, query `Lightprotocol/light-protocol`: - -**Query pattern:** - -- Browse: `mcp__deepwiki__read_wiki_structure("Lightprotocol/light-protocol")` -- Read: `mcp__deepwiki__read_wiki_contents("Lightprotocol/light-protocol")` -- Ask: `mcp__deepwiki__ask_question("Lightprotocol/light-protocol", "[precise question]")` - -**Questions to ask:** - -- "Which function [performs operation]? What is its signature?" -- "What data type is [field/parameter]?" -- "Where is [value] passed—instruction data or accounts array?" -- "How does the on-chain program [mechanism]? What steps occur?" - -## Step 3: Rewrite with Precision - -Apply precision rules (reference `/home/tilo/.claude/context/terminology-reference.md`): - -**AVOID:** "handles", "manages", "processes", "operations", "enables" - -**USE:** Exact function names (`PackedAccounts.to_account_metas()`), data types ((u8)), specific operations ("verifies proof against state root") - -**For each vague statement:** - -Original: `"[quote]"` - -Improved: `"[rewrite with function names, data types, and mechanism]"` - -Changes: - -- Added: `function_name()`, data type ([type]) -- Replaced: "[vague verb]" → "[specific operation]" -- Clarified: [WHERE/HOW detail] - -## Step 4: Apply Edits and Validate - -Use Edit tool to replace statements. Preserve structure and formatting. - -**Post-edit validation:** - -- [ ] All vague verbs replaced with specific operations -- [ ] Function names and data types included -- [ ] Mechanisms explain HOW, not just WHAT -- [ ] Data flow clarified (WHERE) - ---- - -## Example: Pre Accounts Accordion Improvement - -**BEFORE (vague):** -> "Pre accounts are added at the beginning. The on-chain program uses offsets to locate accounts." - -**Issues:** "are added" (WHAT are they?), "uses offsets" (HOW? WHERE received?), missing function/types - -**DeepWiki queries:** - -- "How does the on-chain program receive system_accounts_start_offset?" -- "Which function calculates these offsets?" - -**Answers:** Offsets in instruction data as u8, calculated by `PackedAccounts.to_account_metas()`, used via `ctx.remaining_accounts.split_at()` - -**AFTER (precise):** - -> "Pre accounts are your program-specific accounts (signers, PDAs for CPIs) added at known positions. -> -> The on-chain program receives offsets in the instruction data to locate Light System accounts and tree accounts: -> -> - `system_accounts_start_offset` (u8) - where Light System accounts begin -> - `packed_accounts_start_offset` (u8) - where tree accounts begin -> -> `PackedAccounts.to_account_metas()` calculates these offsets client-side." \ No newline at end of file diff --git a/.claude/commands/research.md b/.claude/commands/research.md deleted file mode 100644 index 85d9c713..00000000 --- a/.claude/commands/research.md +++ /dev/null @@ -1,195 +0,0 @@ -# Research Command Template - -Use this template for multi-step information gathering and analysis tasks. - -## Template - -```markdown ---- -description: [WHAT research it conducts] AND [WHEN to use it for research tasks] -argument-hint: -allowed-tools: [tools needed for research - Read, Grep, WebFetch, WebSearch, MCP tools] ---- - -# /command-name - -Research: $ARGUMENTS - -[WHY: Explain why systematic research matters for this domain] - -## MANDATORY: Before ANY task execution - -#### First, output your understanding and plan: -- Always make a plan before answering a prompt -- State how you understood the research question -- Identify research scope and boundaries -- Show refined research question with specific focus areas -- List sources you'll explore - -#### Then assess if clarification is needed: -If the question is vague, incomplete, or could have multiple interpretations, ask: -- What specific aspects should be prioritized? -- What is the intended use of this research? -- What depth of detail is needed (overview vs deep dive)? -- Are there time constraints? -- What existing knowledge should this build on? - -#### Research refinement checklist: -- Define clear scope boundaries (what's in/out) -- Identify specific questions to answer -- List authoritative sources to consult -- Determine success criteria - -### Step 1: Scope and Plan Research - -**Define scope:** -- Main question: [restatement of refined question] -- Sub-questions: [list specific questions to answer] -- Boundaries: [what's included/excluded] -- Sources: [where to look] - -**Research strategy:** -1. [First source/approach] -2. [Second source/approach] -3. [Third source/approach] - -### Step 2: Gather Information - -**From [Source Type 1]:** -- [What to search/read] -- [What information to extract] - -**From [Source Type 2]:** -- [What to query/fetch] -- [What information to extract] - -**From [Source Type 3]:** -- [What to analyze/check] -- [What information to extract] - -**As you research:** -- Follow promising leads -- Adjust approach based on findings -- Note conflicting information -- Prioritize authoritative sources -- Document sources for all findings - -### Step 3: Synthesize and Analyze - -**Synthesis:** -- Combine findings from multiple sources -- Identify patterns and themes -- Resolve conflicts or note discrepancies -- Draw connections between concepts - -**Analysis:** -- Answer the main question -- Address each sub-question -- Assess confidence level -- Identify knowledge gaps - -### Step 4: Format Findings - -Structure the research output: - -1. **Summary** - Key findings in 2-3 sentences - -2. **Detailed Findings** - Organized by topic/question - - [Finding 1] ([Source]) - - [Finding 2] ([Source]) - - [Finding 3] ([Source]) - -3. **[Domain-Specific Sections]** - - Examples / Implementations - - Best practices - - Common pitfalls - -4. **Sources** - All references consulted - - [Source 1 with URL/path] - - [Source 2 with URL/path] - -5. **Gaps and Follow-ups** - What remains unclear - - [Question/gap 1] - - [Question/gap 2] - -## Validation - -Before presenting findings: -- [ ] Main question answered -- [ ] All sub-questions addressed -- [ ] Multiple sources consulted -- [ ] Sources documented -- [ ] Conflicts resolved or noted -- [ ] Confidence level assessed - -## Notes - -- [Research methodology notes] -- [Domain-specific considerations] -- [Where to find additional information] -``` - ---- - -## Creating Effective Research Commands - -### Before Writing (Evaluation-Driven) - -1. **Test without the command first** - What does Claude miss? -2. **Identify 3 test scenarios** - Common, edge case, error case -3. **Write minimal instructions** - Address only the gaps - -### Research-Specific Setup - -**Define research domain and sources:** -- What topics will this command research? -- What sources are authoritative? -- What tools are needed? (Read, Grep, WebFetch, WebSearch, MCP tools) - -**Structure research methodology:** -- Logical flow (broad → specific, concept → implementation) -- Source hierarchy (official docs > implementation > discussions) -- Conflict resolution strategy - -### Anti-Patterns to Avoid - -❌ **Single-source research:** -```markdown -# Bad: Step 1: Search Google, Step 2: Use first result -# Good: Step 1: Official docs, Step 2: Source code, Step 3: Discussions, Step 4: Synthesize -``` - -❌ **Unstructured exploration:** -```markdown -# Bad: "Look around and see what you find" -# Good: "1. Official docs for concepts, 2. Source code for verification, 3. Examples for patterns" -``` - -❌ **No source attribution:** -```markdown -# Bad: "The system works by..." -# Good: "The system works by... (source: docs.example.com/api, src/core/system.ts:42)" -``` - -### Testing Your Research Command - -**Cross-model testing:** -- Test with Haiku (needs more explicit guidance) -- Test with Sonnet (balanced) -- Test with Opus (handles ambiguity better) - -**Scenario testing:** -- Well-documented topics (should find easily) -- Obscure topics (should identify gaps) -- Complex topics (should synthesize well) - -### Storage - -- **Project commands**: `.claude/commands/` (check into version control) -- **Personal commands**: `~/.claude/commands/` (user-specific) - ---- - -## Complete Example - -See `examples/research-zk-compression.md` for a fully implemented research command following this template. diff --git a/.claude/commands/review.md b/.claude/commands/review.md deleted file mode 100644 index 8071c02c..00000000 --- a/.claude/commands/review.md +++ /dev/null @@ -1,187 +0,0 @@ ---- -description: Validates ZK Compression documentation files against Mintlify syntax, code accuracy, and text quality checklists. Use before committing documentation changes. -argument-hint: [file-pattern] ---- - -# /review - -Validate documentation files: $ARGUMENTS (default: `developer-content/zk-compression-docs/**/*.md`) - -## MANDATORY: Before ANY task execution - -#### First, output your understanding and plan: -- State which files will be validated (based on $ARGUMENTS or default) -- Confirm three validation agents will run in parallel -- Identify validation scope (GitBook syntax, code snippets, text quality) - -#### Then assess if clarification is needed: -If unclear, ask: -- Should all warnings block or only critical errors? -- Should validation stop on first error or collect all issues? -- Which severity levels matter (critical/warning/info)? - -#### Validation refinement checklist: -- File pattern matches intended scope -- All three checklists from `developer-content/.github/` will be applied -- DeepWiki is available for code verification - -### Step 1: Parse Arguments and Generate Timestamp - -Determine file pattern to validate: -- If $ARGUMENTS is provided: use it -- If $ARGUMENTS is empty: use default `developer-content/zk-compression-docs/**/*.md` - -Generate timestamp for report files: -```bash -TIMESTAMP=$(date +%Y%m%d-%H%M) -``` - -Display: "Validating files matching: [file-pattern]" -Display: "Reports will be saved to: /home/tilo/Workspace/.claude/tasks/review-$TIMESTAMP-*.md" - -### Step 2: Spawn Three Validation Agents in Parallel - -Use Task tool three times in a single message with `subagent_type: "general-purpose"`. - -**Agent 1: GitBook Syntax Validator** - -``` -Task( - subagent_type: "general-purpose", - description: "GitBook syntax validation", - prompt: "Execute the validation workflow defined in /home/tilo/Workspace/.claude/agents/gitbook-syntax-validator.md - -File pattern to validate: [file-pattern from Step 1] -Working directory: /home/tilo/Workspace/developer-content -Report file: /home/tilo/Workspace/.claude/tasks/review-[TIMESTAMP]-gitbook-syntax.md - -Read the agent workflow file and follow ALL steps defined there to validate GitBook syntax and Markdown structure. -Write your complete findings to the report file specified above. -Return the report file path in your final message." -) -``` - -**Agent 2: Code Snippet Validator** - -``` -Task( - subagent_type: "general-purpose", - description: "Code snippet verification", - prompt: "Execute the validation workflow defined in /home/tilo/Workspace/.claude/agents/code-snippet-validator.md - -File pattern to validate: [file-pattern from Step 1] -Working directory: /home/tilo/Workspace/developer-content -Report file: /home/tilo/Workspace/.claude/tasks/review-[TIMESTAMP]-code-snippets.md - -Read the agent workflow file and follow ALL steps defined there to verify code snippets using: -- CLAUDE.md source mappings at developer-content/zk-compression-docs/CLAUDE.md -- DeepWiki queries to Lightprotocol/light-protocol repository -- WebFetch for GitHub source code verification - -Write your complete findings to the report file specified above. -Return the report file path in your final message." -) -``` - -**Agent 3: Developer Text Validator** - -``` -Task( - subagent_type: "general-purpose", - description: "Developer text quality evaluation", - prompt: "Execute the validation workflow defined in /home/tilo/Workspace/.claude/agents/developer-text-validator.md - -File pattern to validate: [file-pattern from Step 1] -Working directory: /home/tilo/Workspace/developer-content -Report file: /home/tilo/Workspace/.claude/tasks/review-[TIMESTAMP]-developer-text.md - -Read the agent workflow file and follow ALL steps defined there to evaluate text quality. -Flag implementation details, vague statements, and inaccuracies. - -Write your complete findings to the report file specified above. -Return the report file path in your final message." -) -``` - -### Step 3: Aggregate and Display Results - -Wait for all three agents to complete, then display aggregated report: - -``` -═══════════════════════════════════════ - DOCUMENTATION VALIDATION REPORT -═══════════════════════════════════════ - -Timestamp: [TIMESTAMP] -Files validated: [file-pattern] - -─── GitBook Syntax Validation ───────── -Report: /home/tilo/Workspace/.claude/tasks/review-[TIMESTAMP]-gitbook-syntax.md - -[Agent 1 summary of findings] - -─── Code Snippet Verification ───────── -Report: /home/tilo/Workspace/.claude/tasks/review-[TIMESTAMP]-code-snippets.md - -[Agent 2 summary of findings] - -─── Developer Text Quality ──────────── -Report: /home/tilo/Workspace/.claude/tasks/review-[TIMESTAMP]-developer-text.md - -[Agent 3 summary of findings] - -─── Summary ─────────────────────────── -Total issues: X (Y critical, Z warnings) - -Full reports available at: - - /home/tilo/Workspace/.claude/tasks/review-[TIMESTAMP]-gitbook-syntax.md - - /home/tilo/Workspace/.claude/tasks/review-[TIMESTAMP]-code-snippets.md - - /home/tilo/Workspace/.claude/tasks/review-[TIMESTAMP]-developer-text.md -``` - -### Step 4: Provide Actionable Next Steps - -Analyze severity and provide guidance: - -**No issues found:** -``` -✓ Documentation ready for commit - No validation issues detected -``` - -**Warnings only:** -``` -⚠ Review warnings before committing - [List warning-level issues with file:line references] - - Proceed with commit if warnings are acceptable -``` - -**Critical errors found:** -``` -✗ Fix critical errors before committing - -Critical issues that must be resolved: -[List each critical issue with: - - File and line number - - Issue description - - Recommended fix -] -``` - -## Validation - -Before finalizing: -- All three agents completed successfully -- Results are structured with file:line references -- Severity levels are clear (critical, warning, info) -- Actionable fixes are provided - -## Notes - -- Uses same checklists as CodeRabbit (`.github/*.md`) -- DeepWiki queries `Lightprotocol/light-protocol` for code verification -- Run locally to catch issues before pushing -- Examples: - - Single file: `/review zk-compression-docs/quickstart.md` - - Directory: `/review zk-compression-docs/compressed-tokens/**` diff --git a/.claude/skills/command-agent-builder/SKILL.md b/.claude/skills/command-agent-builder/SKILL.md deleted file mode 100644 index 29d3729a..00000000 --- a/.claude/skills/command-agent-builder/SKILL.md +++ /dev/null @@ -1,205 +0,0 @@ ---- -name: command-agent-builder -description: Building Claude Code commands and agents following Anthropic best practices. Use PROACTIVELY when user wants to create or optimize commands, agents, or workflows. Validates $ARGUMENTS usage, removes meta-instructions, ensures plan-first patterns, and enforces technical precision. ---- - -# Command & Agent Builder - -Create and optimize Claude Code commands and agents following Anthropic's official best practices. - -## When to Use This Skill - -Use PROACTIVELY when: -- User wants to create a new command or agent -- User asks to optimize or validate existing commands -- User mentions "slash command", "custom command", or "agent" -- Creating workflows that need best practice validation - -## Quick Navigation - -- **Patterns**: [mandatory-execution.md](patterns/mandatory-execution.md), [plan-first.md](patterns/plan-first.md), [freedom-levels.md](patterns/freedom-levels.md) -- **Templates**: [basic-command.md](templates/basic-command.md), [mcp-command.md](templates/mcp-command.md), [research-command.md](templates/research-command.md), [agent-template.md](templates/agent-template.md) -- **Validation**: [checklist.md](validation/checklist.md), [examples.md](validation/examples.md) - ---- - -## Creation Workflow - -### Step 1: Understand the Request - -Output your understanding: -- What type of artifact (command vs agent)? -- What is its purpose (single responsibility)? -- What complexity level (basic, MCP, research, agent)? - -### Step 2: Gather Context Interactively - -Ask these questions to gather complete context: - -**1. Type Selection:** -``` -What are you creating? -1. Basic command (single purpose, no external tools) -2. MCP-enabled command (queries external services) -3. Research command (multi-step information gathering) -4. Sub-agent (autonomous specialized task handler) -``` - -**2. Core Details:** -- **Primary purpose**: What does it do? (single, clear responsibility) -- **Trigger conditions**: When should it be used/activated? -- **Arguments needed**: Does it need user input? (→ $ARGUMENTS + argument-hint) -- **WHY context**: Why is this approach/precision/pattern important? - -**3. Behavior Configuration:** -- **Clarification questions**: Does it need to ask for more details? (→ plan-first pattern) -- **Tools required**: What tools does it need? (→ allowed-tools with least privilege) -- **Freedom level**: How prescriptive should instructions be? - - Low: Exact sequences (migrations, security ops) - - Medium: Structured flexibility (code generation, templates) - - High: Exploratory (research, debugging) - -**4. Scope:** -- **Project** (.claude/commands/ or .claude/agents/): Shared with team via git -- **User** (~/.claude/commands/ or ~/.claude/agents/): Personal only - -### Step 3: Select Template and Generate - -Based on type selection, use appropriate template: - -1. **Basic command** → [templates/basic-command.md](templates/basic-command.md) -2. **MCP command** → [templates/mcp-command.md](templates/mcp-command.md) -3. **Research command** → [templates/research-command.md](templates/research-command.md) -4. **Sub-agent** → [templates/agent-template.md](templates/agent-template.md) - -Fill in: -- ✅ YAML frontmatter (description with WHAT + WHEN) -- ✅ $ARGUMENTS placeholder (if needs input) -- ✅ WHY context (explain importance) -- ✅ MANDATORY execution pattern from [patterns/mandatory-execution.md](patterns/mandatory-execution.md) -- ✅ Plan-first pattern from [patterns/plan-first.md](patterns/plan-first.md) (if needs clarification) -- ✅ Tool permissions (allowed-tools with least privilege) -- ✅ Validation steps - -### Step 4: Validate Against Checklist - -Run through [validation/checklist.md](validation/checklist.md): - -**Structure:** -- [ ] YAML frontmatter present (name, description) -- [ ] Description includes WHAT + WHEN -- [ ] Uses $ARGUMENTS (not "your question" or placeholders) -- [ ] No meta-instructions ("When invoked...", "read this file...") -- [ ] Includes WHY context -- [ ] MANDATORY pattern present - -**Instructions:** -- [ ] Direct, explicit instructions (not passive) -- [ ] Active verbs throughout -- [ ] Proper code formatting (backticks for inline, avoid triple backticks in nested markdown) -- [ ] Technical precision (specific verbs, exact names) - -**Tool Configuration:** -- [ ] allowed-tools specified (if using tools) -- [ ] Least privilege principle applied - -**Agent-Specific:** -- [ ] Single clear responsibility -- [ ] Proactive activation language ("Use PROACTIVELY when...") -- [ ] Detailed system prompt with examples - -### Step 5: Output and Provide Usage - -Generate the complete file content and provide: -1. **File path**: Where to save it -2. **Complete content**: Ready to copy-paste -3. **Example usage**: How to invoke it -4. **Testing notes**: Multi-model testing reminder (Haiku, Sonnet, Opus) - ---- - -## Optimization Workflow - -For existing commands/agents: - -### Step 1: Read Current Content - -Use Read tool to get current file content. - -### Step 2: Validate Against Checklist - -Run through [validation/checklist.md](validation/checklist.md) and identify issues. - -### Step 3: Provide Specific Improvements - -For each issue found, provide: -- **What's wrong**: Specific line or pattern -- **Why it matters**: Impact on Claude's behavior -- **How to fix**: Exact replacement using Edit tool -- **Example**: Good vs bad from [validation/examples.md](validation/examples.md) - -### Step 4: Apply Fixes - -Use Edit tool to apply improvements systematically. - ---- - -## Key Best Practices Reference - -**From Anthropic Documentation:** - -1. **$ARGUMENTS for parameters** - Never use placeholder text like "your question" -2. **No meta-instructions** - Don't explain what the command file is -3. **WHY context first** - Explain why the approach matters -4. **Plan-first for complex tasks** - Output understanding before executing -5. **Least privilege tools** - Only grant necessary tool access -6. **Single responsibility** - Clear, narrow purpose per command/agent -7. **Proactive activation** - Use "PROACTIVELY" in descriptions -8. **Under 500 lines** - Keep SKILL.md concise, separate details - -**Common Anti-Patterns to Avoid:** - -❌ `"your question"` → ✅ `$ARGUMENTS` -❌ `When invoked: 1. read this file...` → ✅ Direct instructions -❌ Vague: "handles", "manages", "processes" → ✅ Specific: "verifies proof", "nullifies hash" -❌ No WHY context → ✅ "Precision is critical because..." -❌ All tools allowed → ✅ allowed-tools: mcp__specific__* - ---- - -## Examples from This Project - -**Good Example:** `/ask-deepwiki` command -- Uses $ARGUMENTS for question -- Includes WHY context (precision matters) -- Plan-first pattern (output understanding) -- Specific tool permissions (mcp__deepwiki__*) -- Technical precision rules - -**Patterns Used:** -- MANDATORY execution pattern -- Plan-first approach -- Clarification questions -- Freedom level: Medium (structured but flexible) - ---- - -## Progressive Disclosure - -This SKILL.md provides overview and workflow. For detailed guidance: - -- **[patterns/mandatory-execution.md](patterns/mandatory-execution.md)** - Full MANDATORY pattern to include -- **[patterns/plan-first.md](patterns/plan-first.md)** - Plan-first approach details -- **[patterns/freedom-levels.md](patterns/freedom-levels.md)** - Instruction prescriptiveness guidance -- **[templates/*.md](templates/)** - Ready-to-use templates -- **[validation/checklist.md](validation/checklist.md)** - Complete validation checklist -- **[validation/examples.md](validation/examples.md)** - Good vs bad examples ---- - -## Notes - -- Commands are Markdown files that become prompts -- Agents are specialized subagents with tool access -- Store project-level in .claude/ (shared via git) -- Store personal in ~/.claude/ (user-only) -- Iterate based on actual usage, not assumptions \ No newline at end of file diff --git a/.claude/skills/command-agent-builder/patterns/freedom-levels.md b/.claude/skills/command-agent-builder/patterns/freedom-levels.md deleted file mode 100644 index de5cbc9c..00000000 --- a/.claude/skills/command-agent-builder/patterns/freedom-levels.md +++ /dev/null @@ -1,248 +0,0 @@ -# Freedom Levels (Optional Guidance) - -How prescriptive should your command instructions be? Match the freedom level to task fragility. - -## Core Concept - -**Freedom level** = How much flexibility Claude has in execution - -- **Low freedom**: Exact sequences, no deviation -- **Medium freedom**: Structured templates with parameters -- **High freedom**: Exploratory approaches - -**Source**: Anthropic Agent Skills Best Practices - -## The Three Levels - -### Low Freedom (Exact Sequences) - -**Use when:** -- Database migrations -- Security operations -- Destructive actions (rm, force push) -- Compliance-critical operations -- Multi-step dependencies - -**Characteristics:** -- Step-by-step exact commands -- No decision points -- Explicit ordering -- Clear validation checkpoints -- Rollback instructions - -**Example: `/commit` command** -```markdown -### Step 2: Execute Commit - -Run these commands in exact sequence: - -1. Stage changes: - - `git add [specific files]` - -2. Create commit: - - `git commit -m "$(cat <<'EOF' - [Commit message] - - Co-Authored-By: Claude - EOF - )"` - -3. Verify: - - `git status` - -**Do NOT:** -- Skip any step -- Reorder operations -- Use --amend without checking authorship -``` - -**When to use:** -- Fragile operations that break if done wrong -- Security-sensitive tasks -- Operations requiring exact sequences - -### Medium Freedom (Structured Flexibility) - -**Use when:** -- Code generation from templates -- Component creation -- Configuration tasks -- Report generation -- Testing workflows - -**Characteristics:** -- Template/pattern to follow -- Customization parameters -- Decision points with guidance -- Structured but adaptable - -**Example: `/create-component` command** -```markdown -### Step 2: Generate Component - -Use this structure and customize as needed: - -```typescript -interface [ComponentName]Props { - // Add props based on requirements -} - -export function [ComponentName]({ ...props }: [ComponentName]Props) { - // Implementation varies by: - // - State management needs - // - Event handlers required - // - Styling approach - - return ( - // JSX structure follows project patterns - ); -} -``` - -**Customize:** -- Props based on user requirements -- State management (useState, useReducer, store) -- Styling (CSS modules, styled-components, tailwind) -- Event handlers as needed - -**Follow project patterns:** -- File naming: PascalCase.tsx -- Export style: named exports -- Testing: create ComponentName.test.tsx -``` - -**When to use:** -- Preferred pattern exists -- Some variation is acceptable -- Configuration affects behavior -- Multiple valid approaches - -### High Freedom (Exploratory) - -**Use when:** -- Research tasks -- Debugging unknown issues -- Creative work -- Open-ended analysis -- Learning new codebases - -**Characteristics:** -- Goal-oriented instructions -- Multiple approach options -- Iterative refinement -- Exploration encouraged - -**Example: `/research` command** -```markdown -### Step 2: Conduct Research - -Explore multiple approaches to answer the question: - -**Strategies to consider:** -1. Search documentation for official guidance -2. Query code repositories for implementations -3. Check recent issues/discussions for context -4. Review examples and patterns -5. Cross-reference multiple sources - -**As you research:** -- Follow promising leads -- Adjust approach based on findings -- Synthesize information from multiple sources -- Note conflicting information -- Prioritize authoritative sources - -**Output:** -- Key findings with sources -- Multiple perspectives if relevant -- Confidence level in answer -- Gaps in available information -``` - -**When to use:** -- No clear single approach -- Exploration needed -- Creative solutions wanted -- Learning objectives - -## Choosing the Right Level - -### Decision Framework - -Ask yourself: - -1. **What's the risk of deviation?** - - High risk → Low freedom - - Medium risk → Medium freedom - - Low risk → High freedom - -2. **Is there a required sequence?** - - Yes, strict → Low freedom - - Yes, flexible → Medium freedom - - No → High freedom - -3. **Are multiple approaches valid?** - - No, one way only → Low freedom - - Yes, within patterns → Medium freedom - - Yes, many ways → High freedom - -4. **What's the task fragility?** - - Breaks easily → Low freedom - - Adaptable → Medium freedom - - Resilient → High freedom - -### Mixed Approaches - -Commands can mix freedom levels across steps: - -```markdown -### Step 1: Validate Requirements (Low Freedom) -[Exact validation steps] - -### Step 2: Generate Solution (Medium Freedom) -[Template with customization] - -### Step 3: Research Edge Cases (High Freedom) -[Exploratory investigation] -``` - - -## Common Mistakes - -❌ **Too restrictive for creative tasks:** -```markdown -### Research Architecture Patterns - -Step 1: Search for "microservices" -Step 2: Read first result -Step 3: Summarize in 3 sentences -``` -This should be high freedom exploration. - -❌ **Too loose for critical operations:** -```markdown -### Deploy to Production - -Deploy the application using appropriate methods. -``` -This should be low freedom with exact steps. - -❌ **No guidance for medium tasks:** -```markdown -### Create Component - -Make a component. -``` -Needs template and customization guidance. - -## Notes - -- Optional guidance for command creators, not mandatory pattern -- Can mix levels across different steps -- Default to medium freedom if unsure - ---- - -## Examples - -See `examples/freedom-levels-implementations.md` for detailed examples of each level. diff --git a/.claude/skills/command-agent-builder/patterns/mandatory-execution.md b/.claude/skills/command-agent-builder/patterns/mandatory-execution.md deleted file mode 100644 index e9dc4fec..00000000 --- a/.claude/skills/command-agent-builder/patterns/mandatory-execution.md +++ /dev/null @@ -1,45 +0,0 @@ -# Mandatory Execution Pattern - -Include this pattern in every command and agent to ensure proper planning and clarification before execution. - -## Pattern Structure - -```markdown -## MANDATORY: Before ANY task execution - -#### First, output your understanding and plan: -- Always make a plan before answering a prompt -- State how you understood the query -- **Instead of making assumptions execute `/ask-deepwiki` to verify technical accuracy** - -#### Then assess if clarification is needed: -If the question is vague, incomplete, or could have multiple interpretations, ask: -- What specific component or feature are you working with? -- What problem are you trying to solve? -- What have you tried so far? -- What level of detail do you need (overview vs implementation)? - -#### Question refinement checklist: -- Use exact component names (`CompressedAccountMeta`, not "account metadata") -- Use specific operations ("verifies proof", not "handles proof") -- Include concrete function names or error messages when available -``` - -## Usage - -**Include in:** All commands and agents. - -**Placement:** After command header and WHY context, before Step 1. - -**Customization:** Keep structure (plan → clarify → refine), adapt clarification questions and refinement checklist to your domain. - -## Notes - -- Derived from Anthropic best practices (explicit stage-gating with clarification) -- `/ask-deepwiki` reference is project-specific; adapt for your context - ---- - -## Example - -See `examples/mandatory-execution-integration.md` for complete implementation. \ No newline at end of file diff --git a/.claude/skills/command-agent-builder/patterns/plan-first.md b/.claude/skills/command-agent-builder/patterns/plan-first.md deleted file mode 100644 index 56d875a0..00000000 --- a/.claude/skills/command-agent-builder/patterns/plan-first.md +++ /dev/null @@ -1,116 +0,0 @@ -# Plan-First Pattern - -Show understanding and create a plan before executing any task. This gives users visibility and prevents incorrect assumptions. - -## Core Principle - -**Output first, execute second.** - -Claude should: -1. State its understanding of the request -2. Show the plan it will follow -3. Ask for clarification if needed -4. THEN proceed with execution - -## Pattern Structure - -```markdown -### Step 1: [Analyze/Understand/Plan] [Subject] - -#### First, output your understanding and plan: -- [What you identified about the scope] -- [What you'll do/query/create] -- [Refined version of the request] - -#### Then assess if clarification is needed: -[Specific questions if request is vague] - -[Repository mapping / domain-specific guidance] - -#### [Action] checklist: -- [Specific requirement 1] -- [Specific requirement 2] -- [Specific requirement 3] -``` - -## Why This Works - -From Anthropic: "Claude performs best when it has a clear target to iterate against" - -**Benefits:** User sees interpretation before execution, enables early correction, prevents wasted tool calls, separates planning from execution. - -## Integration with Mandatory Pattern - -Plan-first is **part of** the mandatory execution pattern: - -```markdown -## MANDATORY: Before ANY task execution - -#### First, output your understanding and plan: -[Plan-first pattern goes here] - -#### Then assess if clarification is needed: -[Clarification questions] -``` - -Relationship: -- Mandatory pattern = overall structure -- Plan-first = specific implementation of "output understanding" - -## Anti-Patterns - -❌ **Jumping straight to execution:** -```markdown -### Step 1: Query DeepWiki -Call `mcp__deepwiki__ask_question(...)` -``` - -✅ **Plan first:** -```markdown -### Step 1: Analyze and Create Plan - -#### First, output your understanding: -- Repository: Lightprotocol/light-protocol (ZK Compression question) -- Query: "How does ValidityProof verification work?" - -#### Then assess if clarification needed: -[questions if vague] - -### Step 2: Query DeepWiki -Call `mcp__deepwiki__ask_question(...)` -``` - -❌ **Vague planning:** -```markdown -I'll search for information about the topic. -``` - -✅ **Specific planning:** -```markdown -I'll query three sources: -1. DeepWiki for implementation details -2. Public docs for conceptual overview -3. GitHub for code examples -Refined query: "How does CompressedAccountMeta structure validation work in state compression?" -``` - -## Validation Checklist - -Good plan-first output includes: -- [ ] Clear statement of understanding -- [ ] Specific steps to follow -- [ ] Refined/clarified version of request -- [ ] Targeted clarification questions (if needed) -- [ ] Domain-specific guidance/mapping - -## Notes - -- Particularly critical for MCP commands (prevent wasted API calls) -- Short plans for simple tasks, detailed plans for complex ones -- Always output the plan as TEXT, not as tool calls or comments - ---- - -## Example - -See `examples/ask-deepwiki-plan-first.md` for a complete implementation. \ No newline at end of file diff --git a/.claude/skills/command-agent-builder/templates/agent-template.md b/.claude/skills/command-agent-builder/templates/agent-template.md deleted file mode 100644 index f3c528c6..00000000 --- a/.claude/skills/command-agent-builder/templates/agent-template.md +++ /dev/null @@ -1,252 +0,0 @@ -# Agent (Sub-Agent) Template - -Use this template for creating autonomous sub-agents with specialized responsibilities. - -## Template - -```markdown ---- -name: agent-name -description: [WHAT it does] AND [WHEN to use - include "Use PROACTIVELY when..." for automatic triggering] -allowed-tools: [Least privilege - only tools needed for this specific responsibility] ---- - -# Agent: [Name] - -**Single Responsibility:** [Clear, narrow purpose statement] - -[WHY: Explain why this agent exists and why automation/specialization matters] - -## MANDATORY: Before ANY task execution - -#### First, output your understanding and plan: -- Always make a plan before answering a prompt -- State how you understood the task -- Identify [domain-specific context] -- Show your planned approach with specific steps - -#### Then assess if clarification is needed: -If the task is vague, incomplete, or could have multiple interpretations, ask: -- [Domain-specific question 1] -- [Domain-specific question 2] -- [Domain-specific question 3] -- What level of detail/thoroughness is expected? - -#### [Task] refinement checklist: -- [Specific requirement 1] -- [Specific requirement 2] -- [Specific requirement 3] - -## Workflow - -### Step 1: Validate and Understand - -**Validate inputs:** -- [What to check before starting] -- [Prerequisites that must be met] -- [Constraints to verify] - -**Understand context:** -- [What context to gather] -- [What to analyze] -- [What patterns to identify] - -### Step 2: Execute [Primary Task] - -[Detailed instructions with specific steps] - -[Decision points with clear criteria] - -[Examples showing expected behavior] - -**As you work:** -- [Guideline 1] -- [Guideline 2] -- [Guideline 3] - -### Step 3: Verify and Report - -**Verification checklist:** -- [ ] [Quality check 1] -- [ ] [Quality check 2] -- [ ] [Quality check 3] - -**Report:** -- What was done -- What worked well -- What issues encountered -- What requires attention - -## Examples - -### Example 1: [Scenario] - -**Input:** [What the agent receives] - -**Process:** -1. [Step taken] -2. [Step taken] -3. [Step taken] - -**Output:** [What the agent produces] - -### Example 2: [Another Scenario] - -**Input:** [What the agent receives] - -**Process:** -1. [Step taken] -2. [Step taken] -3. [Step taken] - -**Output:** [What the agent produces] - -## Constraints and Security - -**What this agent MUST NOT do:** -- [Constraint 1 - with reason] -- [Constraint 2 - with reason] -- [Constraint 3 - with reason] - -**Security considerations:** -- [Security rule 1] -- [Security rule 2] -- [Security rule 3] - -**Error handling:** -- If [error condition]: [how to handle] -- If [error condition]: [how to handle] -- If uncertain: Stop and ask user - -## Tool Usage - -**Allowed tools:** [list from frontmatter] - -**Tool usage guidelines:** -- [Tool 1]: Use for [specific purpose] -- [Tool 2]: Use for [specific purpose] -- [Tool 3]: Use when [specific condition] - -**Forbidden operations:** -- Do not [dangerous operation] without confirmation -- Do not [destructive action] without backup -- Do not [sensitive operation] without validation - -## Notes - -- [Important reminder 1] -- [Important reminder 2] -- [Known limitations] -- [When to delegate to user] -``` - ---- - -## Creating Effective Agents - -### Before Writing (Evaluation-Driven) - -1. **Test without the agent first** - What does Claude miss? -2. **Identify 3 test scenarios** - Common, edge case, error case -3. **Write minimal instructions** - Address only the gaps - -### Appropriate Degrees of Freedom - -Match instruction specificity to task fragility: - -**Low Freedom (Exact Scripts)** - Error-prone operations -```bash -#!/bin/bash -npm run build && npm test && git push origin main -``` - -**Medium Freedom (Pseudocode)** - Preferred patterns with flexibility -```typescript -function generate${ComponentName}() { - // 1. Create interface | 2. Implement [features] | 3. Add tests -} -``` - -**High Freedom (Text Instructions)** - Exploratory tasks -```markdown -Investigate [topic] by analyzing codebase, checking docs, proposing solutions -``` - -### Progressive Disclosure - -Keep agent file under 500 lines: -``` -.claude/agents/agent-name/ -├── agent.md # Main (<500 lines) -├── scripts/ # Validation (0 tokens until used) -└── examples/ # Extended examples (reference when needed) -``` - -Reference with: `bash scripts/validate.sh` or `cat examples/scenario-1.md` - -**Why:** Files consume 0 tokens until explicitly loaded. - -### Anti-Patterns to Avoid - -**Assuming pre-installed packages** -```markdown -# Bad: Run pytest -# Good: Verify pytest installed, if not: pip install pytest, then run -``` - -**Windows paths** -```markdown -# Bad: C:\scripts\validate.bat -# Good: scripts/validate.sh -``` - -**Deeply nested references** -```markdown -# Bad: See reference.md → see details.md → see examples.md -# Good: See examples/code-review-example.md (one level deep) -``` - -**Excessive options without defaults** -```markdown -# Bad: Choose format: JSON, YAML, TOML, XML, CSV, or custom -# Good: Output as JSON (override with --format flag if needed) -``` - -**Vague descriptions** -```markdown -# Bad: description: Helper agent for code tasks -# Good: description: Reviews code for security vulnerabilities. Use PROACTIVELY after authentication/database code is written. -``` - -### Testing Your Agent - -**Cross-model testing:** -- Test with Haiku (needs more explicit guidance) -- Test with Sonnet (balanced) -- Test with Opus (handles ambiguity better) -- Adjust instructions if Haiku struggles - -### Agent vs Command - -**Use agent when:** -- Task needs autonomous execution -- Multiple steps with decision points -- Should activate automatically -- Needs tool access control - -**Use command when:** -- User explicitly invokes -- Needs user input ($ARGUMENTS) -- Simpler workflow -- More user interaction - -### Storage - -- **Project agents**: `.claude/agents/` (check into version control) -- **Personal agents**: `~/.claude/agents/` (user-specific) - ---- - -## Complete Example - -See `examples/code-reviewer.md` for a fully implemented agent following this template. diff --git a/.claude/skills/command-agent-builder/templates/basic-command.md b/.claude/skills/command-agent-builder/templates/basic-command.md deleted file mode 100644 index ef2ed19c..00000000 --- a/.claude/skills/command-agent-builder/templates/basic-command.md +++ /dev/null @@ -1,173 +0,0 @@ -# Basic Command Template - -Use this template for simple, single-purpose commands that don't require external tools. - -## Template - -```markdown ---- -description: [WHAT it does] AND [WHEN to use it - be explicit about triggering conditions] -argument-hint: ---- - -# /command-name - -[Task]: $ARGUMENTS - -[WHY context: Explain why this approach/precision/pattern matters] - -## MANDATORY: Before ANY task execution - -#### First, output your understanding and plan: -- Always make a plan before answering a prompt -- State how you understood the query -- [Domain-specific understanding points] - -#### Then assess if clarification is needed: -If the question is vague, incomplete, or could have multiple interpretations, ask: -- [Domain-specific question 1] -- [Domain-specific question 2] -- [Domain-specific question 3] -- What level of detail do you need (overview vs implementation)? - -#### [Action] refinement checklist: -- [Specific requirement 1] -- [Specific requirement 2] -- [Specific requirement 3] - -### Step 1: [Active verb] [What] - -[Direct, explicit instructions] - -[If needed: provide options or decision points] - -### Step 2: [Active verb] [What] - -[Direct, explicit instructions] - -[If applicable: validation steps] - -### Step 3: [Active verb] [What] - -[Direct, explicit instructions] - -[If applicable: output formatting] - -## Validation - -Before finalizing: -- [Checkpoint 1] -- [Checkpoint 2] -- [Checkpoint 3] - -## Notes - -- [Key reminder 1] -- [Key reminder 2] -- [Cross-references to documentation] -``` - ---- - -## Creating Effective Commands - -### Before Writing (Evaluation-Driven) - -1. **Test without the command first** - What does Claude miss? -2. **Identify 3 test scenarios** - Common, edge case, error case -3. **Write minimal instructions** - Address only the gaps - -### Appropriate Degrees of Freedom - -Commands typically use **medium freedom** - clear steps with flexibility. - -**Adjust based on task fragility:** -```bash -# Low freedom (critical operations) -git add . && git commit -m "message" && git push origin main - -# Medium freedom (most commands) -Run formatter on $ARGUMENTS: -- JavaScript/TypeScript: prettier --write -- Python: black -- Verify with git diff - -# High freedom (exploratory) -Analyze $ARGUMENTS for patterns and suggest improvements -``` - -### Progressive Disclosure - -For complex commands, organize supporting files: -``` -.claude/commands/ -├── command-name.md # Main command (<300 lines) -├── scripts/ -│ └── validate.sh # Executable helpers -└── templates/ - └── output-template.txt # Output formatting -``` - -Reference with: `bash scripts/validate.sh` or `cat templates/output-template.txt` - -### Anti-Patterns to Avoid - -**Vague descriptions** -```markdown -# Bad: description: Format code -# Good: description: Format code according to project style guide. Use before commits. -``` - -**Not using $ARGUMENTS** -```markdown -# Bad: Run formatter on "the files you want to format" -# Good: Run formatter on $ARGUMENTS -``` - -**Passive instructions** -```markdown -# Bad: The code should be formatted using prettier -# Good: Run `prettier --write $ARGUMENTS` -``` - -**Missing validation** -```markdown -# Bad: Step 3: Done! -# Good: Step 3: Verify - Check all files formatted, no errors, formatting-only changes -``` - -**Assuming pre-installed tools** -```markdown -# Bad: Run pytest -# Good: Verify pytest installed (pip install pytest if needed), then run pytest -``` - -**Windows paths** -```markdown -# Bad: C:\scripts\validate.bat -# Good: scripts/validate.sh -``` - -### Testing Your Command - -**Cross-model testing:** -- Test with Haiku (needs more explicit guidance) -- Test with Sonnet (balanced) -- Test with Opus (handles ambiguity better) - -**Scenario testing:** -- Typical arguments -- Edge cases (no files, many files, special characters) -- Invalid arguments -- Error conditions - -### Storage - -- **Project commands**: `.claude/commands/` (check into version control) -- **Personal commands**: `~/.claude/commands/` (user-specific) - ---- - -## Complete Example - -See `examples/format-code.md` for a fully implemented command following this template. diff --git a/.claude/skills/command-agent-builder/templates/mcp-command.md b/.claude/skills/command-agent-builder/templates/mcp-command.md deleted file mode 100644 index 0944ff34..00000000 --- a/.claude/skills/command-agent-builder/templates/mcp-command.md +++ /dev/null @@ -1,192 +0,0 @@ -# MCP Command Template - -Use this template for commands that query external services via Model Context Protocol (MCP). - -## Template - -```markdown ---- -description: [WHAT + WHEN - emphasize precision/accuracy if relevant] -argument-hint: -allowed-tools: mcp__service__* ---- - -# /command-name - -Answer: $ARGUMENTS - -[WHY: Explain importance of precision/accuracy for this domain] - -## MANDATORY: Before ANY task execution - -#### First, output your understanding and plan: -- Always make a plan before answering a prompt -- State how you understood the query -- Identify [data source/repository/service] scope -- Show the refined [question/query] you'll use - -#### Then assess if clarification is needed: -If the question is vague, incomplete, or could have multiple interpretations, ask: -- What specific [component/feature/topic] are you working with? -- What problem are you trying to solve? -- What have you tried so far? -- What level of detail do you need (overview vs implementation)? - -#### [Query] refinement checklist: -- Use exact [terms/names/identifiers] from [domain] -- Use specific [operations/verbs] ([precise action], not [vague action]) -- Include concrete [references/examples/context] when available - -### Step 1: [Analyze/Determine] [Scope/Source] - -**[Source/Repository] mapping:** -- [Option A] → `service/repo-a` -- [Option B] → `service/repo-b` -- [Option C] → `service/repo-c` - -Select appropriate source based on query content. - -### Step 2: Query [Service Name] - -For the appropriate [source], call in sequence: - -**For [Source A]:** -- `mcp__service__[tool_1]("param")` -- `mcp__service__[tool_2]("param")` -- `mcp__service__[tool_3]("param", $ARGUMENTS)` - -**For [Source B]:** -- `mcp__service__[tool_1]("param")` -- `mcp__service__[tool_2]("param")` -- `mcp__service__[tool_3]("param", $ARGUMENTS)` - -**For complex questions:** Query multiple sources as needed. - -### Step 3: Format Response with [Domain] Precision - -Structure: -1. **Direct answer** - Immediate [domain] explanation -2. **[Domain] details** - Specific implementations, data structures -3. **Code examples** - With inline comments explaining key points -4. **Source references** - [References format] from [service] -5. **Related concepts** - Connections to other [domain concepts] (if relevant) - -**Precision Rules:** - -AVOID: -- Vague verbs: "handles", "manages", "processes", "enables", "provides" -- Abstract terms: "operations", "management", "coordination" -- Marketing language: "powerful", "seamless", "easy" -- Generic descriptions: [vague example] instead of [precise example] - -USE: -- Exact [function/method/API] names: `[example1]()`, `[example2]()` -- Concrete data structures: `[Type1]`, `[Type2]`, `[Type3]` -- Specific operations: "[precise verb 1]", "[precise verb 2]", "[precise verb 3]" -- Precise [field/parameter] names: `[field1]`, `[field2]`, `[field3]` -- [Reference format] from [service] responses - -**Cross-reference with:** -- [Documentation URL 1] -- [Documentation URL 2] -- [Source repository URL] - -## Notes - -- Always include source references from [service] responses -- Provide runnable code examples for implementation questions -- Ask follow-up questions to [service] for clarification when needed -``` - ---- - -## Creating Effective MCP Commands - -### Before Writing (Evaluation-Driven) - -1. **Test without the command first** - What does Claude miss? -2. **Identify 3 test scenarios** - Common, edge case, error case -3. **Write minimal instructions** - Address only the gaps - -### MCP-Specific Setup - -**Identify the MCP service and tools:** -```markdown -allowed-tools: mcp__service__* # All tools from service -allowed-tools: mcp__service__tool1, mcp__service__tool2 # Specific tools only -``` - -**Define source/repository mapping** - What data sources exist and how to choose between them. - -**Structure MCP call sequences** - Show call order for each source, include $ARGUMENTS in final query tool. - -### MCP Call Formatting - -**In nested markdown, use backticks (NOT triple backticks):** - -❌ Don't use triple backticks (breaks outer code block): -```markdown -``` -mcp__service__call("param") -``` -``` - -✅ Use inline backticks: -```markdown -- `mcp__service__call("param")` -- `mcp__service__call("param", $ARGUMENTS)` -``` - -**For multiple calls, use bullet points:** -```markdown -**For [scenario]:** -- `tool_1("param")` -- `tool_2("param")` -- `tool_3("param", $ARGUMENTS)` -``` - -### Anti-Patterns to Avoid - -**Vague queries** -```markdown -# Bad: "How does it work?" -# Good: "How does CompressedAccountMeta.new_init() create the account hash?" -``` - -**Missing precision rules** -```markdown -# Bad: No guidance on terminology -# Good: AVOID "handles proof" → USE "verifies proof against state root" -``` - -**Not using $ARGUMENTS** -```markdown -# Bad: ask_question("repo", "user question") -# Good: ask_question("repo", $ARGUMENTS) -``` - -**Triple backticks in nested markdown** -```markdown -# Bad: ```mcp__call()``` (breaks template) -# Good: `mcp__call()` (inline backticks) -``` - -### Testing Your Command - -**Cross-model testing:** -- Test with Haiku (needs more explicit guidance) -- Test with Sonnet (balanced) -- Test with Opus (handles ambiguity better) -- Adjust instructions if Haiku struggles - -### Storage - -- **Project commands**: `.claude/commands/` (check into version control) -- **Personal commands**: `~/.claude/commands/` (user-specific) -- **Document required MCP server** in project README - ---- - -## Complete Example - -See `examples/ask-deepwiki.md` for a fully implemented MCP command following this template. diff --git a/.claude/skills/command-agent-builder/validation/checklist.md b/.claude/skills/command-agent-builder/validation/checklist.md deleted file mode 100644 index 075a30de..00000000 --- a/.claude/skills/command-agent-builder/validation/checklist.md +++ /dev/null @@ -1,265 +0,0 @@ -# Validation Checklist - -Use this checklist to validate commands and agents against Anthropic's best practices. - -## Structure Validation - -### Frontmatter (YAML) - -- [ ] **Frontmatter present** with `---` delimiters -- [ ] **description field** present and non-empty -- [ ] **Description includes WHAT** - What the command/agent does -- [ ] **Description includes WHEN** - When to use it / triggering conditions -- [ ] **description under 1024 characters** -- [ ] **argument-hint present** if command uses $ARGUMENTS -- [ ] **argument-hint format** is `` not `[param]` or `{param}` -- [ ] **allowed-tools specified** if using MCP or specific tools -- [ ] **allowed-tools follows least privilege** - only necessary tools - -**For agents only:** -- [ ] **name field present** (lowercase, hyphens, max 64 chars) -- [ ] **name follows gerund form** (verb-ing) if applicable -- [ ] **name does not contain** "anthropic" or "claude" - -### File Structure - -- [ ] **Header present** with # /command-name or # Agent: Name -- [ ] **WHY context included** - Explains importance/rationale -- [ ] **MANDATORY pattern present** - Before ANY task execution section -- [ ] **Steps clearly numbered** - ### Step 1, ### Step 2, etc. -- [ ] **Notes section** at end (if applicable) -- [ ] **File under 500 lines** for SKILL.md (templates can be longer) - -## Content Validation - -### Arguments and Placeholders - -- [ ] **Uses $ARGUMENTS** not placeholder text -- [ ] **No "your question"** or similar placeholders -- [ ] **No "the files"** or similar vague references -- [ ] **$ARGUMENTS placed correctly** - where user input should go -- [ ] **Argument referenced in command** if argument-hint present - -### Instructions Quality - -- [ ] **No meta-instructions** - No "When invoked..." or "read this file..." -- [ ] **Direct instructions** - No "The code should be..." passive voice -- [ ] **Active verbs** throughout - "Run", "Execute", "Create", not "is run", "should be" -- [ ] **Explicit and clear** - No ambiguous instructions -- [ ] **Specific actions** - "Run `npm test`" not "test the code" -- [ ] **Stage-gating present** - Plan before execute, validate before finalize - -### MANDATORY Pattern - -- [ ] **Plan-first section** - "First, output your understanding and plan" -- [ ] **Clarification section** - "Then assess if clarification is needed" -- [ ] **Refinement checklist** - Domain-specific requirements -- [ ] **Plan asks for understanding** - Not just "create plan" -- [ ] **Clarification questions relevant** to domain -- [ ] **Checklist items specific** not generic - -### Technical Precision - -- [ ] **Avoids vague verbs** - No "handles", "manages", "processes", "enables", "provides" -- [ ] **Uses specific verbs** - "verifies", "nullifies", "appends", "executes" -- [ ] **Avoids abstract terms** - No "operations", "management", "coordination" -- [ ] **Uses concrete terms** - Specific functions, types, methods -- [ ] **No marketing language** - No "powerful", "seamless", "easy", "simply" -- [ ] **Uses exact names** - CompressedAccountMeta not "account metadata" - -### Code and Formatting - -- [ ] **Inline code uses backticks** - `code` not ```code``` -- [ ] **No triple backticks in nested markdown** - Breaks outer code block -- [ ] **Code blocks properly formatted** - When appropriate -- [ ] **Commands are runnable** - Actual syntax, not pseudocode (unless intentional) -- [ ] **File paths are accurate** - If referencing specific files - -## Tool Configuration - -### Tool Permissions - -- [ ] **allowed-tools follows glob pattern** - `mcp__service__*` for all tools -- [ ] **allowed-tools is specific** - Lists only needed tools -- [ ] **Least privilege applied** - Not granted unnecessary tools -- [ ] **Tool usage explained** - Why each tool is needed - -### Tool Usage in Commands - -- [ ] **Tools used correctly** - Proper syntax for each tool -- [ ] **MCP calls formatted properly** - With backticks in lists -- [ ] **Tool calls include $ARGUMENTS** - Where user input should go -- [ ] **Error handling for tool failures** - What to do if tool fails - -## Workflow Validation - -### Step Structure - -- [ ] **Step 1 includes planning** - For complex commands -- [ ] **Steps are ordered logically** - Natural progression -- [ ] **Each step has clear action** - Not vague goals -- [ ] **Decision points have criteria** - When to choose option A vs B -- [ ] **Validation steps included** - Check before finalizing - -### Examples (For Agents) - -- [ ] **At least 2 examples** provided -- [ ] **Examples show input/output** - What agent receives and produces -- [ ] **Examples demonstrate key scenarios** - Common use cases -- [ ] **Examples show decision-making** - Not just happy path - -### Constraints (For Agents) - -- [ ] **Constraints section present** - What agent must NOT do -- [ ] **Security considerations listed** - Dangerous operations identified -- [ ] **Error handling defined** - What to do when uncertain -- [ ] **Constraints have rationale** - Why each constraint exists - -## Agent-Specific Validation - -### Responsibility - -- [ ] **Single, clear responsibility** stated -- [ ] **Responsibility is narrow** - Not trying to do everything -- [ ] **Proactive activation language** - "Use PROACTIVELY when..." -- [ ] **Triggering conditions clear** - When to invoke automatically - -### Autonomy - -- [ ] **Can run without user input** - No mid-execution questions -- [ ] **Decision criteria provided** - For all decision points -- [ ] **Stopping conditions defined** - When to stop and ask user -- [ ] **Error recovery specified** - How to handle failures - -### Safety - -- [ ] **Destructive operations guarded** - Require confirmation or prevented -- [ ] **No force operations** without explicit user request -- [ ] **Rollback instructions** if applicable -- [ ] **Security boundaries enforced** - No credential access, etc. - -## Command Type-Specific - -### Basic Commands - -- [ ] **Single purpose focus** - Does one thing well -- [ ] **Clear usage example** in notes -- [ ] **Output format specified** - What user should see - -### MCP Commands - -- [ ] **Source/repository mapping** provided -- [ ] **MCP call sequence** shown for each source -- [ ] **$ARGUMENTS in query tools** - Passed to ask_question or similar -- [ ] **Precision rules included** - Domain-specific terminology -- [ ] **Cross-references provided** - Documentation URLs - -### Research Commands - -- [ ] **Research scope defined** - Boundaries clear -- [ ] **Multiple sources consulted** - Not single-source -- [ ] **Synthesis step included** - Combine findings -- [ ] **Source attribution required** - All references documented -- [ ] **Gap identification** - Note what's unclear - -## Best Practices Adherence - -### From Prompt Engineering - -- [ ] **Clear and direct** - No subtle hints -- [ ] **Structured with headers** - Easy to navigate -- [ ] **Examples where helpful** - Show don't just tell - -### From Claude Code Best Practices - -- [ ] **$ARGUMENTS for parameters** - Dynamic input -- [ ] **Extended thinking triggers** - Plan-first approach -- [ ] **Clear targets provided** - Success criteria - -### From Sub-Agents Guide - -- [ ] **Single responsibility** - Focused purpose -- [ ] **Detailed prompt** - Comprehensive instructions -- [ ] **Least privilege** - Minimal tool access -- [ ] **Proactive if appropriate** - Automatic activation - -### From Agent Skills Best Practices - -- [ ] **Concise** - No unnecessary words -- [ ] **Progressive disclosure** - Main file navigates to details -- [ ] **Appropriate freedom level** - Matches task fragility -- [ ] **Multi-model consideration** - Works across Haiku/Sonnet/Opus - -## Final Checks - -### Completeness - -- [ ] **All sections present** - Nothing obviously missing -- [ ] **Cross-references work** - Links point to actual files -- [ ] **Examples are complete** - Not TODO or placeholder - -### Clarity - -- [ ] **Instructions are understandable** - Clear to another person -- [ ] **No ambiguous terms** - Specific throughout -- [ ] **Logical flow** - Natural progression - -### Testability - -- [ ] **Can be tested** - Possible to verify it works -- [ ] **Success criteria clear** - Know when it's done right -- [ ] **Failure modes identified** - Know what could go wrong - -## Severity Levels - -### Critical Issues (Must Fix) - -- Missing $ARGUMENTS (uses placeholder) -- No description or malformed frontmatter -- Meta-instructions present -- No MANDATORY pattern -- Security vulnerabilities (in agents) -- All tools granted (violates least privilege) - -### Important Issues (Should Fix) - -- Missing WHY context -- Passive or vague instructions -- No validation steps -- Missing examples (for agents) -- No constraints defined (for agents) -- Poor technical precision - -### Minor Issues (Nice to Fix) - -- Formatting inconsistencies -- Could use better terminology -- Missing cross-references -- Light on examples -- Could be more concise - -## Using This Checklist - -**When creating:** -- Use as template guide -- Check off items as you add them -- Refer to validation/examples.md for good patterns - -**When validating:** -- Go through each section systematically -- Note all issues found with severity -- Provide specific fixes for each issue -- Reference examples.md for good vs bad patterns - -**When optimizing:** -- Focus on critical issues first -- Group related issues together -- Provide concrete edit instructions -- Verify fixes don't break other aspects - -## Notes - -- Not all items apply to all command types -- Use judgment for context-specific items -- When in doubt, check official Anthropic docs -- Validation examples.md provides concrete good/bad examples diff --git a/.claude/skills/prompt-template/SKILL.md b/.claude/skills/prompt-template/SKILL.md deleted file mode 100644 index 4f7d2dc9..00000000 --- a/.claude/skills/prompt-template/SKILL.md +++ /dev/null @@ -1,95 +0,0 @@ ---- -name: prompt-template -description: Generate structured implementation prompts for SDK integration, API setup, or feature implementation. Use when user wants to create a prompt for implementing something in their codebase. ---- - -# Prompt Template Skill - -## When to Use - -User says: -- "create a prompt for [SDK/feature]" -- "help me write a prompt to implement [X]" -- "I need to integrate [library]" -- Shows you documentation and wants an implementation prompt - -## Core Purpose - -Transform documentation into structured implementation prompts that: -- Extract exact technical requirements from source -- Gather user's application context -- Generate copy-paste ready prompts for any LLM - -## Process - -### Step 1: Identify Source Documentation - -Ask: -1. What is the official documentation URL? -2. What specific page/section covers this? -3. What is the source code directory URL? (GitHub folder/file) -4. Any GitHub repos with examples? - -### Step 2: Read Documentation and Extract - -From the source documentation, extract: -- **Installation**: Exact commands with versions -- **Imports**: Exact import statements -- **Configuration**: All options with types -- **Initialization**: Complete working example -- **Key APIs**: Core methods/functions needed - -### Step 3: Gather User Context - -Ask about their application: -1. **Framework**: React/Next.js/Express/Rust/etc.? -2. **Language**: TypeScript/JavaScript/Rust? Version? -3. **Service organization**: How do they structure clients/services? -4. **Environment management**: .env files, config service, other? -5. **Error handling**: try/catch, error types, logging pattern? -6. **Type system**: TypeScript? Strict mode? - -### Step 4: Generate Structured Prompt - -Use the template from resources/implementation-prompt-template.md - -Fill in: -- Task overview (one sentence) -- User's tech stack context -- Technical requirements extracted from docs -- Specific implementation deliverables -- Documentation references with exact URLs - -### Step 5: Validate - -Before delivering, check: -- [ ] Source documentation URL included (specific page, not homepage) -- [ ] Source code directory URL included (GitHub folder/file where implemented) -- [ ] Technical requirements from official docs (not assumptions) -- [ ] User context gathered (or questions asked) -- [ ] Installation commands include versions -- [ ] Initialization example is complete working code -- [ ] Deliverables are specific and actionable -- [ ] No assumptions about user's patterns - -## Key Principles - -1. **Never assume** - Always ask about user's patterns -2. **Extract from source** - Get technical details from official docs, not memory -3. **Be specific** - No vague requests like "set up properly" -4. **Include WHY** - Context about why patterns matter from docs -5. **Link precisely** - Reference exact documentation pages - -## Example Templates - -Load resources/implementation-prompt-template.md for: -- SDK Client Setup (like Grid, ZK Compression) -- API Integration (REST, GraphQL) -- Feature Implementation (new functionality) -- Migration (library upgrade, framework switch) - -## Integration - -- Use alongside zk-compression-terminology for ZK Compression specific prompts -- Reference technical precision patterns from CLAUDE.md -- Follow progressive disclosure: only load full template when generating \ No newline at end of file diff --git a/.claude/skills/prompt-template/resources/implementation-prompt-template.md b/.claude/skills/prompt-template/resources/implementation-prompt-template.md deleted file mode 100644 index cf195f76..00000000 --- a/.claude/skills/prompt-template/resources/implementation-prompt-template.md +++ /dev/null @@ -1,274 +0,0 @@ -# Implementation Prompt Template - -Use this template to generate structured prompts for any SDK, library, or feature implementation. - -## Base Template Structure - -```markdown -# IMPLEMENT [FEATURE NAME IN CAPS] - -## TASK OVERVIEW -[One clear sentence describing what needs to be implemented] - -## MY APPLICATION CONTEXT -**Tech Stack:** -- Framework: [User's framework] -- Language: [Language + version] -- Service architecture: [How user organizes code] -- Environment management: [How user handles config/env vars] -- Error handling: [User's error handling approach] - -## TECHNICAL REQUIREMENTS - -**Installation:** -```bash -[Exact install commands from docs with versions] -``` - -**Import:** -```[language] -[Exact import statements from documentation] -``` - -**Configuration Options:** -[Bullet list of all config options with types from docs] - -**Complete Initialization Example:** -```[language] -[Full working initialization code from official documentation] -``` - -## IMPLEMENTATION REQUEST - -Create [specific deliverable] that: -1. Follows my application's service patterns -2. Handles environment configuration properly (dev/staging/prod) -3. Includes comprehensive error handling matching my patterns -4. Provides clean interface for other parts of my app -5. Includes proper TypeScript types (if applicable) -6. [Any additional specific requirements based on the feature] - -Show me the complete implementation with file structure and code. - -## DOCUMENTATION REFERENCES -- Primary documentation: [URL to specific page, not homepage] -- Source code directory: [URL to GitHub folder/file where this is implemented] -- API reference: [URL if applicable] -- GitHub repository: [URL if applicable] -- Example implementations: [URLs if applicable] -``` - ---- - -## Example 1: SDK Client Setup (Grid) - -```markdown -# IMPLEMENT GRID ACCOUNTS CLIENT SETUP - -## TASK OVERVIEW -Set up Grid SDK client initialization in my application following my existing patterns. - -## MY APPLICATION CONTEXT -**Tech Stack:** -- Framework: Next.js 14 -- Language: TypeScript 5.2 -- Service architecture: /services folder with singleton pattern -- Environment management: .env.local with Zod validation -- Error handling: Custom error classes with structured logging - -## TECHNICAL REQUIREMENTS - -**Installation:** -```bash -npm install @sqds/grid -``` - -**Import:** -```typescript -import { GridClient } from '@sqds/grid'; -``` - -**Configuration Options:** -- environment: 'sandbox' | 'production' -- apiKey: string (from Grid dashboard at https://grid.squads.xyz/dashboard) -- baseUrl: string (optional, defaults to "https://grid.squads.xyz") - -**Complete Initialization Example:** -```typescript -const gridClient = new GridClient({ - environment: process.env.NODE_ENV === 'production' ? 'production' : 'sandbox', - apiKey: process.env.GRID_API_KEY!, - baseUrl: "https://grid.squads.xyz", -}); -``` - -## IMPLEMENTATION REQUEST - -Create a Grid client service that: -1. Follows my application's singleton service pattern -2. Handles environment configuration with Zod validation -3. Includes comprehensive error handling with custom GridError class -4. Provides clean interface for other parts of my app -5. Includes proper TypeScript types and JSDoc comments - -Show me the complete implementation with file structure and code. - -## DOCUMENTATION REFERENCES -- Grid SDK Documentation: https://www.npmjs.com/package/@sqds/grid -- Source code directory: https://github.com/Squads-Protocol/grid-sdk/tree/main/src -- API Dashboard: https://grid.squads.xyz/dashboard -``` - ---- - -## Example 2: ZK Compression Client (TypeScript) - -```markdown -# IMPLEMENT ZK COMPRESSION CLIENT SETUP - -## TASK OVERVIEW -Set up Light Protocol SDK client for compressed account operations in my Solana application. - -## MY APPLICATION CONTEXT -**Tech Stack:** -- Framework: React Native with Expo -- Language: TypeScript 5.0 -- Service architecture: Context providers with hooks -- Environment management: Expo SecureStore for keys, env vars for endpoints -- Error handling: React Error Boundaries with Sentry logging - -## TECHNICAL REQUIREMENTS - -**Installation:** -```bash -npm install @lightprotocol/stateless.js@0.22.1-alpha.1 \ - @lightprotocol/compressed-token@0.22.1-alpha.1 \ - @solana/web3.js -``` - -**Import:** -```typescript -import { Rpc, createRpc } from '@lightprotocol/stateless.js'; -``` - -**Configuration Options:** -- RPC_ENDPOINT: string (Helius or custom RPC) -- COMPRESSION_RPC_ENDPOINT: string (separate compression endpoint or same as RPC) -- Commitment level: 'confirmed' | 'finalized' - -**Complete Initialization Example:** -```typescript -const RPC_ENDPOINT = process.env.RPC_ENDPOINT || 'https://devnet.helius-rpc.com?api-key=YOUR_KEY'; -const COMPRESSION_RPC_ENDPOINT = process.env.COMPRESSION_RPC_ENDPOINT || RPC_ENDPOINT; - -const rpc: Rpc = createRpc(RPC_ENDPOINT, COMPRESSION_RPC_ENDPOINT); -``` - -## IMPLEMENTATION REQUEST - -Create a ZK Compression client provider that: -1. Follows React Context pattern with custom hook -2. Handles environment configuration for devnet/mainnet switching -3. Provides clean RPC interface for compressed account operations -4. Includes proper TypeScript types for all RPC methods -5. Handles connection errors with React Error Boundary integration -6. Supports reconnection logic for mobile network interruptions - -Show me the complete implementation with file structure and code. - -## DOCUMENTATION REFERENCES -- Client Library Guide: https://www.zkcompression.com/compressed-pdas/client-library -- Source code directory: https://github.com/Lightprotocol/light-protocol/tree/main/js/stateless.js/src -- TypeScript SDK API: https://lightprotocol.github.io/light-protocol/stateless.js/index.html -- GitHub Examples: https://github.com/Lightprotocol/program-examples -- Complete Documentation: https://www.zkcompression.com/llms-full.txt -``` - ---- - -## Example 3: API Integration (REST Client) - -```markdown -# IMPLEMENT STRIPE PAYMENT CLIENT - -## TASK OVERVIEW -Set up Stripe SDK client for payment processing in my e-commerce backend. - -## MY APPLICATION CONTEXT -**Tech Stack:** -- Framework: Express.js with TypeScript -- Language: TypeScript 5.1 -- Service architecture: Layered architecture (controllers/services/repositories) -- Environment management: dotenv with @types/node for env vars -- Error handling: Custom AppError class with express-async-errors - -## TECHNICAL REQUIREMENTS - -**Installation:** -```bash -npm install stripe @types/stripe -``` - -**Import:** -```typescript -import Stripe from 'stripe'; -``` - -**Configuration Options:** -- apiKey: string (secret key from Stripe dashboard) -- apiVersion: '2023-10-16' (Stripe API version) -- typescript: true (enables TypeScript support) -- timeout: number (optional, request timeout in ms) -- maxNetworkRetries: number (optional, default 0) - -**Complete Initialization Example:** -```typescript -const stripe = new Stripe(process.env.STRIPE_SECRET_KEY!, { - apiVersion: '2023-10-16', - typescript: true, -}); -``` - -## IMPLEMENTATION REQUEST - -Create a Stripe payment service that: -1. Follows layered architecture with service class -2. Handles environment configuration with validation -3. Includes comprehensive error handling for Stripe errors -4. Provides clean interface for payment operations (create intent, confirm, refund) -5. Includes proper TypeScript types and JSDoc comments -6. Implements webhook signature verification -7. Includes retry logic for network failures - -Show me the complete implementation with file structure and code. - -## DOCUMENTATION REFERENCES -- Stripe Node.js SDK: https://stripe.com/docs/api -- Source code directory: https://github.com/stripe/stripe-node/tree/master/src -- TypeScript Integration: https://github.com/stripe/stripe-node#usage-with-typescript -- Webhook Guide: https://stripe.com/docs/webhooks -``` - ---- - -## Template Selection Guide - -**Use SDK Client Setup template for:** -- Client library initialization -- Service/API wrappers -- SDK configuration - -**Use API Integration template for:** -- REST API clients -- GraphQL clients -- Third-party service integrations - -**Use Feature Implementation template for:** -- New application features -- Component development -- Business logic implementation - -**Use Migration template for:** -- Library upgrades -- Framework migrations -- Refactoring tasks diff --git a/.claude/skills/zk-compression-terminology/SKILL.md b/.claude/skills/zk-compression-terminology/SKILL.md deleted file mode 100644 index cd2794aa..00000000 --- a/.claude/skills/zk-compression-terminology/SKILL.md +++ /dev/null @@ -1,117 +0,0 @@ ---- -name: zk-compression-terminology -description: Precise technical definitions for ZK Compression compressed account operations extracted from official documentation ---- - -# ZK Compression Terminology Skill - -## When to Use - -This skill provides precise technical definitions when: -- Writing compressed account documentation -- Validating terminology accuracy in guides -- Checking correct type names (CompressedAccountMeta vs "account metadata") -- Verifying SDK method signatures and parameters -- Understanding exact behavior of Light System Program operations -- Ensuring consistent technical language across documentation - -## Core Principle - -**Describe exactly what happens. Avoid vague language.** - -AVOID: -- Abstract concepts: "operations", "management", "coordination" -- Vague verbs: "handles", "manages", "processes" -- Marketing language: "enables", "provides capability" -- Generic descriptions: "account metadata" instead of "CompressedAccountMeta" -- ZK terminology in user-facing docs: "inclusion proof", "non-inclusion proof" → Instead: "prove the account hash exists", "prove the address doesn't exist" - -## What This Skill Contains - -### Compressed Account Operations Terminology -`resources/compressed-accounts-terminology.md` (~6-7k tokens) - -Complete terminology extracted from 5 official guides: -- how-to-create-compressed-accounts.md -- how-to-update-compressed-accounts.md -- how-to-close-compressed-accounts.md -- how-to-reinitialize-compressed-accounts.md -- how-to-burn-compressed-accounts.md - -**Includes:** -- 100+ terms with precise definitions -- SDK method signatures with exact parameters -- System accounts array specification -- Operation state transition table -- Instruction data patterns for all operations -- Required dependencies and traits - -## Usage Pattern - -### Fast Lookup -Check this SKILL.md for principle and scope. If you need specific term definition, load the terminology table. - -### Writing Documentation -Load `compressed-accounts-terminology.md` when writing or editing documentation to ensure: -- Correct type names -- Precise technical descriptions -- Consistent verb usage -- Accurate SDK method calls - -### Validation -Use terminology table to verify: -- `CompressedAccountMeta` contains tree_info, address, output_state_tree_index -- `CompressedAccountMetaBurn` omits output_state_tree_index field -- ValidityProof proves "address doesn't exist" (create) or "account hash exists" (update/close/reinit/burn) -- State trees are "fungible" not "interchangeable" or "equivalent" -- Operations nullify hashes, don't "invalidate" or "mark as spent" - -## Example Corrections - -| Instead of | Write | -|-----------|-------| -| "enables developers to create accounts" | "creates new account hash and inserts address into address tree" | -| "handles account updates" | "nullifies old account hash and appends new hash with updated data" | -| "manages state transitions" | "atomically nullifies input hash and creates output hash" | -| "provides burn functionality" | "nullifies account hash and creates no output state" | -| "account metadata" | "CompressedAccountMeta struct containing tree_info, address, and output_state_tree_index" | -| "proves account exists" (vague) | "proves account hash exists in state tree using 128-byte validity proof" | -| "non-inclusion proof" | "proof that address doesn't exist in address tree" | -| "processes transactions" | "verifies validity proof and invokes Account Compression Program" | - -## Term Categories in Table - -1. **Core Types**: LightAccount, CompressedAccountMeta, ValidityProof, CpiAccounts -2. **SDK Methods**: new_init(), new_mut(), new_close(), new_empty(), new_burn() -3. **CPI Components**: CpiSigner, derive_light_cpi_signer!, with_light_account() -4. **Tree Structures**: State Tree, Address Tree, PackedStateTreeInfo -5. **Operations**: Create, Update, Close, Reinitialize, Burn -6. **System Accounts**: 8 required accounts for every CPI -7. **Traits & Derives**: LightDiscriminator, BorshSerialize, Clone, Debug, Default -8. **Frameworks**: Anchor (anchor_lang) vs Native Rust (borsh) - -## Integration with Documentation - -This skill works alongside: -- [GitBook Assistant](/home/tilo/Workspace/.claude/skills/gitbook-assistant/SKILL.md) - For syntax and formatting -- [CLAUDE.md](/home/tilo/.claude/CLAUDE.md) - For writing standards -- [Local CLAUDE.md](../../developer-content/CLAUDE.md) - For project guidelines - -## Validation Checklist - -When writing documentation, verify: -- [ ] Type names are exact: `CompressedAccountMeta` not "metadata" -- [ ] Methods include parentheses: `new_init()` not "new_init" -- [ ] Proofs describe action: "proves address doesn't exist" not "non-inclusion proof" -- [ ] Verbs are concrete: "nullifies", "appends", "verifies" not "handles", "manages" -- [ ] No marketing language: no "enables", "provides", "powerful" -- [ ] State transitions are explicit: "nullifies old hash, appends new hash" -- [ ] Account types are specific: "LightAccount" not "account wrapper" - -## Notes - -- Terminology extracted directly from official Light Protocol documentation -- Definitions describe implementation behavior, not abstract concepts -- SDK signatures show exact parameter types and names -- All 8 system accounts listed with pubkeys and descriptions -- Operation state transitions show input/output hashes explicitly diff --git a/.claude/skills/zk-compression-terminology/resources/compressed-accounts-terminology.md b/.claude/skills/zk-compression-terminology/resources/compressed-accounts-terminology.md deleted file mode 100644 index 67cacf21..00000000 --- a/.claude/skills/zk-compression-terminology/resources/compressed-accounts-terminology.md +++ /dev/null @@ -1,111 +0,0 @@ -# Compressed Account Operations - Terminology Reference - -**Source:** how-to-create, update, close, reinitialize, burn compressed accounts - ---- - -| Term | Precise Definition | Source | Avoid | -|------|-------------------|---------|-------| -| **Account Hash** | 32-byte identifier calculated from account data, owner, address, and tree position for locating account in state tree. Recalculated and changes on every write to the account. | All operations | "account identifier", "account reference" | -| **Address** | 32-byte persistent identifier for compressed account, derived from seeds and stored in address tree for PDA-like behavior. Does not change across state transitions. | Create (derived), Update/Close/Reinit/Burn (referenced) | "account address", "persistent identifier" | -| **Address Seed** | 32-byte value returned by `derive_address()` for passing to Light System Program to insert address into address tree. Required parameter for `with_new_addresses()`. | Create | "seed for address", "address derivation input" | -| **Address Tree** | Binary Merkle tree storing addresses for compressed accounts. Address derived from same seeds and program ID produces different address in different tree. Ensures address uniqueness within tree scope. | Create | "address storage", "uniqueness tree" | -| **anchor_lang** | Rust crate for Solana program development with automatic instruction deserialization and account validation. | "Anchor framework", "Anchor library" | -| **AnchorSerialize / AnchorDeserialize** | Traits for serializing account structs in Anchor programs. Applied via `#[derive()]` attribute. | "Anchor serialization", "serialization traits" | -| **borsh** | Binary serialization crate for native Rust programs. Smaller serialized size than bincode. | "serialization library", "Borsh framework" | -| **BorshSerialize / BorshDeserialize** | Traits for serializing account structs in native Rust programs. Applied via `#[derive()]` attribute. | "Borsh serialization", "serialization traits" | -| **b"authority"** | Seed bytes used to derive CPI signer PDA from program ID. Light System Program verifies CPI signer uses this seed. | "authority seed", "CPI seed" | -| **Burn** | Instruction that nullifies existing account hash in state tree and creates no output state. Account cannot be reinitialized after burn. | "permanent close operation", "account destruction" | -| **Close** | Instruction that nullifies existing account hash and creates new hash with zero discriminator and empty data. Account can be reinitialized after close. | "close operation", "account closure" | -| **Clone, Debug, Default** | Standard Rust traits required on compressed account struct for `LightAccount` wrapper. `Default` required for `new_empty()`. | "standard traits", "required traits" | -| **CompressedAccountMeta** | Account tree position metadata for instructions that create new account state (update, close, reinit). Contains `tree_info: PackedStateTreeInfo`, `address: [u8; 32]`, and `output_state_tree_index: u8` field. | Update, Close, Reinitialize | "account metadata", "compressed account data" | -| **CompressedAccountMetaBurn** | Account tree position metadata for permanent burn instructions. Contains `tree_info: PackedStateTreeInfo` and `address: [u8; 32]` but no `output_state_tree_index` field since account is permanently destroyed. | Burn | "burn metadata", "account metadata for burn" | -| **CPI (Cross-Program Invocation)** | Call from your program to Light System Program with signed PDA and accounts for state transitions. Executes atomically within same transaction. | All operations | "program call", "cross-program operation" | -| **CPI Authority PDA** | PDA with seed `b"authority"` derived from your program ID for signing all CPIs to Light System Program. Verified by Light System Program during CPI. | All operations | "CPI signer", "authority PDA" | -| **CpiAccounts** | Struct parsing signer and remaining_accounts into accounts array for Light System Program CPI. Created with `CpiAccounts::new()`. | "CPI accounts wrapper", "accounts for CPI" | -| **CpiSigner** | Struct containing PDA pubkey and bump for signing CPIs. Derived at compile time with `derive_light_cpi_signer!` macro. | "CPI signer struct", "signer configuration" | -| **Create** | Instruction that proves address doesn't exist in address tree, inserts address, and appends new account hash to state tree. | "create operation", "account initialization" | -| **ctx.accounts.signer** | Anchor account struct field containing transaction signer. Accessed in Anchor instructions via `Context` parameter. | "signer account", "transaction signer" | -| **ctx.remaining_accounts** | Anchor field containing slice of additional accounts: system accounts and packed tree accounts. Passed to `CpiAccounts::new()`. | "remaining accounts", "additional accounts" | -| **declare_id!** | Anchor macro defining program's unique public key. Generates `ID` constant and `id()` function. | "program ID macro", "ID declaration" | -| **derive_address()** | Function that derives address from custom_seeds, address_tree_pubkey, and program_id. Returns `(address, address_seed)` tuple. | "address derivation", "generates address" | -| **derive_light_cpi_signer!** | Macro that derives CPI signer PDA at compile time from program ID string. Creates `CpiSigner` constant. | "CPI signer macro", "derives CPI signer" | -| **Discriminator** | 8-byte unique type ID for compressed account struct. Stored in separate field, not first 8 bytes of data like Anchor accounts. | "type ID", "account discriminator" | -| **entrypoint!** | Macro defining entry point for native Rust programs. Routes to `process_instruction(program_id, accounts, instruction_data)`. | "program entry point", "entrypoint macro" | -| **getCompressedAccount()** | RPC method fetching current compressed account by address or hash. Returns account data, tree position, and metadata. | "fetch account", "get account data" | -| **getValidityProof()** | RPC method generating proof that account hash exists in state tree or address doesn't exist in address tree. Returns `ValidityProof` struct. | "get proof", "generate proof" | -| **get_tree_pubkey()** | Method on `PackedAddressTreeInfo` and `PackedStateTreeInfo` that unpacks u8 index to retrieve actual tree account pubkey from `CpiAccounts`. | "retrieve tree pubkey", "unpack tree pubkey" | -| **into_new_address_params_packed()** | Method on `PackedAddressTreeInfo` converting tree info and address_seed into `NewAddressParamsPacked` for CPI. | "create address params", "convert to params" | -| **invoke()** | Final method in CPI builder chain that executes CPI to Light System Program with parsed accounts. Returns `Result<()>`. | "execute CPI", "call program" | -| **LightAccount** | Wrapper type for compressed account struct. Similar to Anchor's `Account` but for compressed accounts. | "account wrapper", "compressed account wrapper" | -| **LightAccount::new_burn()** | Creates `LightAccount` that hashes current account data as input and creates no output state. Account permanently destroyed. | "burn wrapper", "permanent destruction wrapper" | -| **LightAccount::new_close()** | Creates `LightAccount` that hashes current account data as input and creates output with zero discriminator and empty data. | "close wrapper", "closure wrapper" | -| **LightAccount::new_empty()** | Creates `LightAccount` that reconstructs closed account hash (zero values) as input and creates output with default-initialized values. | "reinit wrapper", "empty account wrapper" | -| **LightAccount::new_init()** | Creates `LightAccount` with no input hash and output containing initial account data at specified address and output state tree. | "init wrapper", "initialization wrapper" | -| **LightAccount::new_mut()** | Creates `LightAccount` that hashes current account data as input and allows modifying output state. Returns mutable reference. | "update wrapper", "mutation wrapper" | -| **LightDiscriminator** | Trait deriving 8-byte type ID from struct name. Applied via `#[derive(LightDiscriminator)]` on compressed account struct. | "discriminator trait", "type ID trait" | -| **light-sdk** | Rust crate providing macros, CPI interface, and account wrappers for compressed accounts. Core dependency for compressed account programs. | "Light SDK", "compression SDK" | -| **LightSystemProgramCpi** | Builder struct for constructing CPI instruction to Light System Program. Created with `new_cpi()`, configured with `with_*()` methods. | "CPI builder", "instruction builder" | -| **Light System Program** | Program verifying validity proofs, checking account ownership, and invoking Account Compression Program to update trees. Program ID: SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7 | "Light System", "system program" | -| **Account Compression Program** | Program writing to state and address tree accounts. Invoked by Light System Program, never directly by client or user program. Program ID: compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq | "compression program", "tree program" | -| **Noop Program** | Program logging compressed account state to Solana ledger for indexers to parse (v1 only). Program ID: noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV | "logging program", "noop" | -| **System Program** | Solana program for lamport transfers between accounts. Program ID: 11111111111111111111111111111111 | "Solana System Program", "native program" | -| **msg!** | Macro writing string to program logs visible in transaction response. Used for debugging. | "log macro", "logging" | -| **new_cpi()** | Static method on `LightSystemProgramCpi` initializing CPI instruction with `CpiSigner` and `ValidityProof`. First call in builder chain. | "create CPI", "initialize CPI" | -| **Nullification** | Marks existing account hash as spent in state tree by setting leaf to nullified state. Prevents double spending. | "nullify operation", "account invalidation" | -| **output_state_tree_index** | u8 index pointing to state tree account in packed accounts array. Specifies which state tree stores new account hash. | "output tree index", "state tree index" | -| **PackedAccounts** | Client-side pattern to pack account pubkeys into an accounts array to pass u8 indices instead of 32-byte pubkeys in instruction data. Reduces transaction size. | "packed accounts pattern", "account packing" | -| **PackedAddressTreeInfo** | Struct with `address_merkle_tree_pubkey_index: u8` pointing to address tree account in packed accounts array. | "address tree info", "packed address tree" | -| **PackedStateTreeInfo** | Struct with `state_merkle_tree_pubkey_index: u8` pointing to state tree account in packed accounts array. | "state tree info", "packed state tree" | -| **process_instruction** | Entry point function for native Rust programs. Receives `program_id: &Pubkey`, `accounts: &[AccountInfo]`, `instruction_data: &[u8]`. | "instruction processor", "entry function" | -| **#[program]** | Anchor attribute marking module as program implementation. Contains instruction handler functions. | "program module", "program attribute" | -| **Pubkey** | 32-byte Solana public key type from `solana_program` crate. Used for addresses, program IDs, and tree accounts. | "public key", "address type" | -| **Registered Program PDA** | PDA controlling which programs can invoke Account Compression Program. Derived from Light System Program. | "registration PDA", "access control PDA" | -| **Reinitialize** | Instruction that proves closed account hash exists in state tree, nullifies it, and creates new hash with default values at same address. | "reinit operation", "reopening account" | -| **remaining_accounts** | Slice of accounts after signer in native Rust programs. Contains system accounts (8 accounts) followed by packed tree accounts. | "additional accounts", "extra accounts" | -| **#[repr(u8)]** | Rust attribute specifying enum uses u8 as discriminant. Used for `InstructionType` enum in native programs. | "enum representation", "u8 enum" | -| **signer** | First account in accounts array that signs transaction and pays fees. Extracted with `accounts.first()` in native programs. | "fee payer", "transaction signer" | -| **Signer<'info>** | Anchor account type ensuring account signed transaction. Applied via `#[account(mut)]` attribute on signer field. | "Anchor signer", "signer account type" | -| **split_first()** | Rust slice method separating first element from rest. Used to extract signer from remaining accounts in native programs. | "split accounts", "extract signer" | -| **State Root** | Root hash of state tree, cryptographic commitment to all account hashes in tree. Included in validity proof and verified on-chain. | "tree root", "Merkle root" | -| **State Tree** | Binary Merkle tree storing compressed account hashes. Multiple state trees can exist; they are fungible. | "account tree", "hash tree" | -| **try_from_slice** | Borsh method deserializing bytes into typed struct. Returns `Result`. | "deserialize", "parse bytes" | -| **Update** | Instruction that proves account hash exists in state tree, nullifies old hash, and appends new hash with updated data. Uses UTXO pattern. | "update operation", "account modification" | -| **UTXO Pattern** | Pattern where update instruction consumes existing account hash and produces new hash with different data. Prevents in-place mutation. | "update pattern", "consume-produce pattern" | -| **ValidityProof** | Struct with proof that address doesn't exist in address tree (for create) or account hash exists in state tree (for update/close/reinit/burn). Constant 128 bytes. | "proof struct", "zero-knowledge proof" | -| **with_light_account()** | Method on `LightSystemProgramCpi` adding `LightAccount` wrapper to CPI instruction. Second call in builder chain after `new_cpi()`. | "add account", "set account data" | -| **with_new_addresses()** | Method on `LightSystemProgramCpi` adding address parameters for inserting addresses into address tree. Only used in create instructions. | "add addresses", "set new addresses" | -| **Zero Discriminator** | Discriminator set to `[0u8; 8]` in closed account. Indicates account has no type and is closed. | "null discriminator", "closed state discriminator" | -| **Zero Values** | Account with zero discriminator and empty data vector. Created by close instruction, consumed by reinitialize instruction. | "empty account", "closed account state" | - ---- - -## SDK Method Signatures - -| Method | Signature | Returns | Description | -|--------|-----------|---------|-------------| -| **CpiAccounts::new** | `(signer: &AccountInfo, remaining: &[AccountInfo], cpi_signer: CpiSigner)` | `CpiAccounts` | Parse accounts array into system accounts and tree accounts for Light System Program CPI | -| **LightAccount::::new_init** | `(program_id: &Pubkey, address: Option<[u8; 32]>, output_state_tree_index: u8)` | `LightAccount` | Create wrapper with no input hash and output containing initial values | -| **LightAccount::::new_mut** | `(program_id: &Pubkey, account_meta: &CompressedAccountMeta, current_data: T)` | `Result>` | Create wrapper hashing current_data as input and allowing output modification | -| **LightAccount::::new_close** | `(program_id: &Pubkey, account_meta: &CompressedAccountMeta, current_data: T)` | `Result>` | Create wrapper hashing current_data as input and output with zero discriminator and empty data | -| **LightAccount::::new_empty** | `(program_id: &Pubkey, account_meta: &CompressedAccountMeta)` | `Result>` | Create wrapper reconstructing closed hash as input and output with default values | -| **LightAccount::::new_burn** | `(program_id: &Pubkey, account_meta: &CompressedAccountMetaBurn, current_data: T)` | `Result>` | Create wrapper hashing current_data as input and no output state | -| **LightSystemProgramCpi::new_cpi** | `(cpi_signer: CpiSigner, proof: ValidityProof)` | `LightSystemProgramCpi` | Initialize CPI builder with signer and proof | -| **derive_address** | `(custom_seeds: &[&[u8]], address_tree_pubkey: &Pubkey, program_id: &Pubkey)` | `([u8; 32], [u8; 32])` | Derive address from seeds and tree, return (address, address_seed) | - ---- - -## System Accounts Array (All Operations) - -All CPIs to Light System Program require these 8 accounts in remaining_accounts: - -| Index | Account | Pubkey/PDA | Description | -|-------|---------|------------|-------------| -| 0 | Light System Program | SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7 | Verifies proof, checks ownership, invokes Account Compression Program | -| 1 | CPI Signer | PDA from your program ID + `b"authority"` | Signs CPI from your program to Light System Program | -| 2 | Registered Program PDA | PDA from Light System Program | Controls which programs invoke Account Compression Program | -| 3 | Noop Program | noopb9bkMVfRPU8AsbpTUg8AQkHtKwMYZiFUjNRtMmV | Logs account state to ledger (v1 only) | -| 4 | Account Compression Authority | HZH7qSLcpAeDqCopVU4e5XkhT9j3JFsQiq8CmruY3aru | Signs CPI from Light System to Account Compression Program | -| 5 | Account Compression Program | compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq | Writes to state and address tree accounts | -| 6 | Invoking Program | Your program ID | Derives CPI signer and sets owner on created accounts | -| 7 | System Program | 11111111111111111111111111111111 | Transfers lamports for fees | \ No newline at end of file diff --git a/.claude/tasks/README.md b/.claude/tasks/README.md deleted file mode 100644 index 9422c84b..00000000 --- a/.claude/tasks/README.md +++ /dev/null @@ -1 +0,0 @@ -Collection of tasks, plans, and reports by subagents for ZK Compression documentation work. diff --git a/.gitignore b/.gitignore index 90c68241..f2dee70c 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ mintlify mintlify-docs/ /CLAUDE.md .windsurf.context/ +.claude/ From ded4f567c934d9bad46d23ce26196fe1a268c57f Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 22:51:02 +0000 Subject: [PATCH 08/19] Update client-library/client-guide.mdx Co-authored-by: ananas-block <58553958+ananas-block@users.noreply.github.com> --- client-library/client-guide.mdx | 1 - 1 file changed, 1 deletion(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index aa9cf843..923ece09 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -207,7 +207,6 @@ You derive addresses in two scenarios: ### Derivation Like PDAs, compressed account addresses don't belong to a private key; rather, they're derived from the program that owns them. * The key difference to PDAs is that compressed accounts you include an **address tree** parameter. -* An address tree is a Merkle tree that stores the compressed account address. The protocol maintains Merkle trees. You don't need to initialize custom trees. Find the [pubkeys for Merkle trees here](https://www.zkcompression.com/resources/addresses-and-urls). From 43e9a80e68e52afa4aa6ee1a8eccd28b16edf15d Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 22:51:34 +0000 Subject: [PATCH 09/19] Update client-library/client-guide.mdx Co-authored-by: ananas-block <58553958+ananas-block@users.noreply.github.com> --- client-library/client-guide.mdx | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index 923ece09..78a5f64b 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -1079,23 +1079,17 @@ let instruction_data = burn::instruction::BurnAccount { Build the instruction with your `program_id`, `accounts`, and `data` from Step 5. In `accounts`, put your program-specific accounts first and append `PackedAccounts` to end of the vector (recommended due to variable length) -``` - Accounts -┌---------------------------------┐ -[custom accounts] [PackedAccounts] -``` - ```rust +// Accounts +// ┌---------------------------------┐ +// [custom accounts] [PackedAccounts] +let accounts = [vec![AccountMeta::new(payer.pubkey(), true), remaining_accounts].concat(); + let instruction = Instruction { program_id: program_id, - accounts: [ - vec![AccountMeta::new(payer.pubkey(), true)], - remaining_accounts, - ] - .concat(), + accounts, data: instruction_data, }; -```
From 9eabdc6d7d1e247b9916f134ba8ef9125f68bab0 Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 22:52:34 +0000 Subject: [PATCH 10/19] Update client-library/client-guide.mdx Co-authored-by: ananas-block <58553958+ananas-block@users.noreply.github.com> --- client-library/client-guide.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index 78a5f64b..c540464b 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -1,7 +1,7 @@ --- title: Client Guide description: >- - Overview to Rust and Typescript client guides. Guides include step-by-step + Rust and Typescript client guides with step-by-step implementation and full code examples. --- From 468b354a950ee9783f451be980e8cc7963a7a51a Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 22:53:00 +0000 Subject: [PATCH 11/19] Update client-library/client-guide.mdx Co-authored-by: ananas-block <58553958+ananas-block@users.noreply.github.com> --- client-library/client-guide.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index c540464b..8ff378ec 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -202,7 +202,7 @@ Derive a persistent address as a unique identifier for your compressed account, You derive addresses in two scenarios: * **At account creation** - derive the address to create the account's persistent identifier, then pass it to `getValidityProofV0()` in the address array (see Step 3 "Validity Proof") -* **Before building instructions** - derive the address to fetch existing accounts using `rpc.getCompressedAccount()`, then reference them in your transaction +* **Before building instructions** - derive the address to fetch existing accounts using `rpc.getCompressedAccount()` ### Derivation Like PDAs, compressed account addresses don't belong to a private key; rather, they're derived from the program that owns them. From d343abf6b17f8d302e3708013dc640af67975cf1 Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 22:53:25 +0000 Subject: [PATCH 12/19] Update client-library/client-guide.mdx Co-authored-by: ananas-block <58553958+ananas-block@users.noreply.github.com> --- client-library/client-guide.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index 8ff378ec..05e58266 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -205,7 +205,7 @@ You derive addresses in two scenarios: * **Before building instructions** - derive the address to fetch existing accounts using `rpc.getCompressedAccount()` ### Derivation -Like PDAs, compressed account addresses don't belong to a private key; rather, they're derived from the program that owns them. +Like PDAs, compressed account addresses don't have a private key; rather, they're derived from the program that owns them. * The key difference to PDAs is that compressed accounts you include an **address tree** parameter. From 970ce3ca877723e9bc43103b75f81727a105f74b Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 22:53:48 +0000 Subject: [PATCH 13/19] Update client-library/client-guide.mdx Co-authored-by: ananas-block <58553958+ananas-block@users.noreply.github.com> --- client-library/client-guide.mdx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index 05e58266..9a80e131 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -206,7 +206,9 @@ You derive addresses in two scenarios: ### Derivation Like PDAs, compressed account addresses don't have a private key; rather, they're derived from the program that owns them. -* The key difference to PDAs is that compressed accounts you include an **address tree** parameter. +* The key difference to PDAs is compressed addresses are stored in an address tree and include this tree in the address derivation. + -> every address is different + -> the same seeds can produce different addresses in different trees (you should check the address tree in your program) The protocol maintains Merkle trees. You don't need to initialize custom trees. Find the [pubkeys for Merkle trees here](https://www.zkcompression.com/resources/addresses-and-urls). From 582f97c5292620aefc3c997acafac0d2f8b706fc Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Tue, 25 Nov 2025 23:43:12 +0000 Subject: [PATCH 14/19] Remove step cross-references from client guide stepper --- client-library/client-guide.mdx | 137 ++++++++++++++------------------ 1 file changed, 61 insertions(+), 76 deletions(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index 9a80e131..3679f89e 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -1,8 +1,8 @@ --- title: Client Guide description: >- - Rust and Typescript client guides with step-by-step - implementation and full code examples. + Rust and Typescript client guides with step-by-step implementation and full + code examples. --- import SystemAccountsList from '/snippets/compressed-pdas-system-accounts-list.mdx'; @@ -36,6 +36,11 @@ ZK Compression provides Rust and Typescript clients to interact with compressed +# Key Points + + + +# Get Started ## Setup @@ -201,19 +206,9 @@ light test-validator Derive a persistent address as a unique identifier for your compressed account, similar to [program-derived addresses (PDAs)](https://solana.com/docs/core/pda). You derive addresses in two scenarios: -* **At account creation** - derive the address to create the account's persistent identifier, then pass it to `getValidityProofV0()` in the address array (see Step 3 "Validity Proof") +* **At account creation** - derive the address to create the account's persistent identifier, then pass it to `getValidityProofV0()` in the address array * **Before building instructions** - derive the address to fetch existing accounts using `rpc.getCompressedAccount()` -### Derivation -Like PDAs, compressed account addresses don't have a private key; rather, they're derived from the program that owns them. -* The key difference to PDAs is compressed addresses are stored in an address tree and include this tree in the address derivation. - -> every address is different - -> the same seeds can produce different addresses in different trees (you should check the address tree in your program) - - -The protocol maintains Merkle trees. You don't need to initialize custom trees. Find the [pubkeys for Merkle trees here](https://www.zkcompression.com/resources/addresses-and-urls). - - @@ -230,13 +225,6 @@ const address = deriveAddress( addressTree.tree ); ``` - -**1. Derive the seed** - -**2. Then, derive the address**: - -* Pass the derived 32-byte `seed` from the first step -* Specify `addressTree.tree` pubkey to ensure an address is unique to an address tree. Different trees produce different addresses from identical seeds. @@ -254,13 +242,6 @@ const address = deriveAddressV2( ); ``` -**1. Derive the seed** - -**2. Then, derive the address**: - -* Pass the derived 32-byte `seed` from the first step. -* Specify `addressTree.tree` pubkey to ensure an address is unique to an address tree. Different trees produce different addresses from identical seeds. -* Specify your *`programId` in this step.* @@ -294,25 +275,27 @@ let (address, _) = derive_address( &program_id, ); ``` - -**Pass these parameters**: - -* `&[b"my-seed"]`: Predefined inputs, such as strings, numbers or other account addresses. -* `&address_tree_info.tree`: Specify the tree pubkey to ensure an address is unique to this address tree. Different trees produce different addresses from identical seeds. -* `&program_id`: Specify your program ID. + +Like PDAs, compressed account addresses don't have a private key; rather, they're derived from the program that owns them. +* The key difference to PDAs is compressed addresses are stored in an address tree and include this tree in the address derivation. +* Different trees produce different addresses from identical seeds. You should check the address tree in your program. + + +The protocol maintains Merkle trees. You don't need to initialize custom trees. Find the [pubkeys for Merkle trees here](https://www.zkcompression.com/resources/addresses-and-urls). + ## Validity Proof Transactions with compressed accounts must include a validity proof: -* To **create** a compressed account, you prove the **address doesn't already exist** in the address tree. -* In **other instructions**, you **prove its account hash exists** in a state tree. +* To **create** a compressed account, you prove the **new address doesn't already exist** in the address tree. +* In **other instructions**, you **prove the compressed account hash exists** in a state tree. * You can **combine multiple addresses and hashes in one proof** to optimize compute cost and instruction data. @@ -337,15 +320,17 @@ const proof = await rpc.getValidityProofV0( **1. Pass these parameters**: -* **Specify the derived address**, `tree` and `queue` pubkeys from the address tree `TreeInfo`. -* When you create an account you don't reference a compressed account hash in the hash array (`[]`). +* **Specify the new address**, `tree` and `queue` pubkeys from the address tree `TreeInfo`. +* When you create an account you don't reference a compressed account hash in the hash array (`[]`). The account doesn't exist in a state Merkle tree yet. For account creation, you prove the address does not exist yet in the address tree. **2. The RPC returns**: -* `compressedProof` with the proof that the address does not exist in the address tree for your instruction data (Step 5) -* `rootIndices` array with root index from the validity proof for the address tree. +* The proof that the new address does not exist in the address tree. It is used in the instruction data. +* `rootIndices` array with root index. + * The root index points to the root in the address tree accounts root history array. + * This root is used by the `LightSystemProgram` to verify the validity proof. @@ -373,8 +358,8 @@ Specify the **account hash**, `tree` and `queue` pubkeys from the compressed acc **2. The RPC returns**: -* `compressedProof` with the proof that the account hash exists in the state tree for your instruction data (Step 5) -* `rootIndices` and `leafIndices` arrays with proof metadata to pack accounts (Step 4). +* The proof that the account hash exists in the state tree for your instruction data. +* `rootIndices` and `leafIndices` arrays with proof metadata to pack accounts. @@ -399,7 +384,7 @@ let rpc_result = rpc **1. Pass these parameters**: -* **Specify the derived address** and `tree` pubkey from the address tree `TreeInfo`. The `queue` pubkey is only required in TypeScript. +* **Specify the new address** and `tree` pubkey from the address tree `TreeInfo`. The `queue` pubkey is only required in TypeScript. * When you create an account you don't reference a compressed account hash in the hash array (`vec![]`). For account creation, you prove the address does not exist yet in the address tree. @@ -407,8 +392,8 @@ For account creation, you prove the address does not exist yet in the address tr **2. The RPC returns `ValidityProofWithContext`**: -* `proof` with the proof that the address does not exist in the address tree for your instruction data (Step 5) -* `addresses` with the public key and metadata of the address tree to pack accounts (Step 4). +* The proof that the new address does not exist in the address tree for your instruction data. +* `addresses` with the public key and metadata of the address tree to pack accounts. @@ -434,8 +419,8 @@ Specify the **account hash**, `tree` and `queue` pubkeys from the compressed acc **2. The RPC returns `ValidityProofWithContext`**: -* `proof` with the proof that the **account hash exists in the state tree** for your instruction data (Step 5) -* `accounts` with the **public key and metadata of the state tree** to pack accounts (Step 4). +* The proof that the **account hash exists in the state tree** for your instruction data +* `accounts` with the **public key and metadata of the state tree** to pack accounts. @@ -443,7 +428,7 @@ Specify the **account hash**, `tree` and `queue` pubkeys from the compressed acc ### Optimize with Combined Proofs -Depending on the **Merkle tree version** (V1 or V2) you are using, you can prove **in a single proof**: +Depending on the **Merkle tree version** (V1 or V2), you can prove **in a single proof**: * multiple addresses, * multiple account hashes, or * a combination of addresses and account hashes. @@ -452,32 +437,31 @@ Depending on the **Merkle tree version** (V1 or V2) you are using, you can prove | | | | ----------------------- | --------------------------------------------------- | -| Account Hash-only (bulk) | 1, 2, 3, 4, or 8 hashes | -| Address-only (bulk) | 1, 2, 4, or 8 addresses | -| Mixed (hash + address) | Any combination of
**1, 2, 3, 4, or 8** account hashes **and**
**1, 2, 4, or 8** new addresses | +| Account Hash-only | 1, 2, 3, 4, or 8 hashes | +| Address-only | 1 and 2 addresses | +| Mixed (hash + address) | Any combination of
**1, 2, 3, 4, or 8** account hashes **and**
**1 or 2** new addresses |
| | | | ----------------------- | --------------------------------------------------- | -| Account Hash-only (bulk) | 1 to 20 hashes | -| Address-only (bulk) | 1 to 32 addresses | -| Mixed (hash + address) | Any combination of
**1 to 4** account hashes **and**
**1 to 4** new addresses | +| Account Hash-only | 1 to 8 hashes | +| Address-only | 1 to 8 addresses | +| Mixed (hash + address) | Any combination of
**1 to 4** account hashes **and**
**1 or 4** new addresses |
**Advantages of combined proofs**: * You only add **one 128 byte validity proof** to your instruction data **for multiple instructions**. -* This can **optimize** your **transaction's size** to stay inside the 1232 byte limit. -* **Compute unit consumption is reduced by at least 100k CU**, since combined proofs are verified in a single CPI by the Light System Program. +* This can **optimize** your **transaction's size** to stay inside the 1232 byte instruction data limit. +* **Compute unit consumption is 100k CU** per `ValidityProof` verification by the Light System Program. ### Example Create Address & Update Account in one Proof -In this example, we generate one proof that proves that an account exists and that an address does not exist yet. -This updates an account and create the address for a new account. +In this example, we generate one proof that proves that an account exists and that a new address does not exist yet. @@ -500,12 +484,12 @@ const proof = await rpc.getValidityProofV0( **1. Pass these parameters**: * Specify one or more **account hashes**, `tree` and `queue` pubkeys from the compressed account's `TreeInfo`. -* Specify one or more **derived addresses** with their `tree` and `queue` pubkeys from the address tree `TreeInfo`. +* Specify one or more **new addresses** with their `tree` and `queue` pubkeys from the address tree `TreeInfo`. **2. The RPC returns**: -* `compressedProof` with a single combined proof that verifies both the **account hash exists in the state tree** and the **address does not exist in the address tree** for your instruction data (Step 5) -* `rootIndices` and `leafIndices` arrays with proof metadata to pack accounts (Step 4). +* A single combined proof that proves both the **account hash exists in the state tree** and the **new address does not exist in the address tree** for your instruction data +* `rootIndices` and `leafIndices` arrays with proof metadata to pack accounts. @@ -531,9 +515,9 @@ let rpc_result = rpc **2. The RPC returns `ValidityProofWithContext`**: -* `proof` with a single combined proof that verifies both the **account hash exists in the state tree** and the **address does not exist in the address tree** for your instruction data (Step 5) -* `addresses` with the public key and metadata of the address tree to pack accounts (Step 4). -* `accounts` with the public key and metadata of the state tree to pack accounts (Step 4). +* A single combined proof that verifies both the **account hash exists in the state tree** and the **new address does not exist in the address tree** for your instruction data +* New `addresses` with the public key and metadata of the address tree to pack accounts. +* `accounts` with the public key and metadata of the state tree to pack accounts. @@ -842,8 +826,8 @@ const instructionData = { }; ``` -1. Include `compressedProof` from Step 3 to **prove the address does not exist** in the address tree -2. Specify **Merkle trees to store address and account hash** from Step 4 where you packed accounts. +1. Include `proof` to **prove the address does not exist** in the address tree +2. Specify **Merkle trees to store address and account hash** to where you packed accounts. 3. Pass **initial account data** @@ -866,7 +850,7 @@ const instructionData = { }; ``` -1. Include `compressedProof` from **Step 3** to prove the **account hash exists** in the state tree +1. Include `proof` to to prove the **account hash exists** in the state tree 2. Specify the existing accounts address, its `packedStateTreeInfo` and the output state tree to store the updated compressed account hash. 3. Pass **current account data** and **new data** @@ -892,7 +876,7 @@ const instructionData = { }; ``` -1. Include `compressedProof` from **Step 3** to prove the **account hash exists** in the state tree +1. Include `proof` to prove the **account hash exists** in the state tree 2. Specify the existing accounts address, its `packedStateTreeInfo` and the output state tree to store the **hash with zero values** for the closed account. 3. Pass **current account data** @@ -917,7 +901,7 @@ const instructionData = { }; ``` -1. Include `compressedProof` from **Step 3** to prove the **account hash exists** in the state tree +1. Include `proof` to prove the **account hash exists** in the state tree 2. Specify the existing accounts address, its `packedStateTreeInfo` and the output state tree that will store the reinitialized account hash 3. Reinitialize creates an account with **default-initialized values** * These values are `Pubkey` as all zeros, numbers as `0`, strings as empty. @@ -944,7 +928,7 @@ const instructionData = { }; ``` -1. Include `compressedProof` from **Step 3** to prove the **account hash exists** in the state tree +1. Include `proof` to prove the **account hash exists** in the state tree 2. Specify the existing accounts address and its `packedStateTreeInfo`. You don't need to specify the output state tree, since burn permanently removes the account. 3. Pass **current account data** @@ -965,8 +949,8 @@ let instruction_data = create::instruction::CreateAccount { .data(); ``` -1. Include `proof` from Step 3 to prove the **address does not exist** in the address tree -2. Specify **address tree and output state tree** from Step 4 where you packed accounts +1. Include `proof` to prove the **address does not exist** in the address tree +2. Specify **address tree and output state tree** to where you packed accounts 3. Pass **initial account data** @@ -990,7 +974,7 @@ let instruction_data = update::instruction::UpdateAccount { Use the state tree of the existing compressed account as output state tree. -1. Include `proof` from Step 3 to prove the **account hash exists** in the state tree +1. Include `proof` to prove the **account hash exists** in the state tree 2. Specify the existing accounts address, its `packed_tree_infos` and the output state tree to store the updated compressed account hash 3. Pass **current account data** and **new data** @@ -1015,7 +999,7 @@ let instruction_data = close::instruction::CloseAccount { Use the state tree of the existing compressed account as output state tree. -1. Include `proof` from Step 3 to prove the **account hash exists** in the state tree +1. Include `proof` to prove the **account hash exists** in the state tree 2. Specify the existing accounts address, its `packed_tree_infos` and the output state tree to store the **hash with zero values** for the closed account 3. Pass **current account data** @@ -1039,7 +1023,7 @@ let instruction_data = reinit::instruction::ReinitAccount { Use the state tree of the existing compressed account as output state tree.
-1. Include `proof` from Step 3 to prove the **account hash exists** in the state tree +1. Include `proof` to prove the **account hash exists** in the state tree 2. Specify the existing accounts address, its `packed_tree_infos` and the output state tree that will store the reinitialized account hash 3. Reinitialize creates an account with **default-initialized values** * These values are `Pubkey` as all zeros, numbers as `0`, strings as empty. @@ -1060,7 +1044,7 @@ let instruction_data = burn::instruction::BurnAccount { .data(); ``` -1. Include `proof` from Step 3 to prove the **account hash exists** in the state tree +1. Include `proof` to prove the **account hash exists** in the state tree 2. Specify the existing accounts address and its `packed_tree_infos`. You don't need to specify the output state tree, since burn permanently removes the account 3. Pass **current account data** @@ -1078,7 +1062,7 @@ let instruction_data = burn::instruction::BurnAccount { ## Instruction -Build the instruction with your `program_id`, `accounts`, and `data` from Step 5. +Build the instruction with your `program_id`, `accounts`, and `data`. In `accounts`, put your program-specific accounts first and append `PackedAccounts` to end of the vector (recommended due to variable length) ```rust @@ -1092,6 +1076,7 @@ let instruction = Instruction { accounts, data: instruction_data, }; +``` From 393e7a5a53bb3423d62595f0c5cca02ea48f643a Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Wed, 26 Nov 2025 01:07:38 +0000 Subject: [PATCH 15/19] Refactor client guide accounts section and update system accounts list --- client-library/client-guide.mdx | 53 ++++++++++--------- .../compressed-pdas-system-accounts-list.mdx | 22 +------- 2 files changed, 31 insertions(+), 44 deletions(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index 3679f89e..9a2546da 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -454,7 +454,7 @@ Depending on the **Merkle tree version** (V1 or V2), you can prove **in a single **Advantages of combined proofs**: -* You only add **one 128 byte validity proof** to your instruction data **for multiple instructions**. +* You only add **one 128 byte validity proof** to your instruction data. * This can **optimize** your **transaction's size** to stay inside the 1232 byte instruction data limit. * **Compute unit consumption is 100k CU** per `ValidityProof` verification by the Light System Program. @@ -528,18 +528,18 @@ See the full [create-and-update program example for this proof combination with ## Accounts -In your instruction data, you reference in which [Merkle tree](/learn/core-concepts/merkle-trees-validity-proofs#state-trees) compressed accounts are or will be stored and other accounts required to interact with the Light System Program. -* The SDK's include a `PackedAccounts` helper to optimize instruction data by adding the accounts' pubkeys to the account array. -* The instruction data references these accounts by u8 indices instead of 32 byte pubkeys. +To interact with a compressed account you need System accounts such as the Light System Program, +and Merkle tree accounts. - +Compressed account metadata (`TreeInfo`) includes Merkle tree pubkeys. +To optimize instruction data we pack the `pubkeys` of `TreeInfo` into the `u8` indices of `PackedTreeInfo`. -**Light System accounts** are 8 required accounts for proof verification and CPI calls to update state and address trees. +The `u8` indices point to the Merkle tree account in the instructions accounts. +You can create the instructions accounts and indices with `PackedAccounts`. - +We recommend to append `PackedAccounts` after your program specific accounts and in anchor in `remaining_accounts`. - ``` PackedAccounts @@ -549,25 +549,31 @@ In your instruction data, you reference in which [Merkle tree](/learn/core-conce Signers, Light System State trees, fee payer accounts address trees, ``` + Custom accounts are program-specific accounts you pass manually in your instruction, typically through Anchor's account struct. -* We recommend to append `PackedAccounts` after your program specific accounts and in anchor in `remaining_accounts`. -* Custom accounts can be added to `PackedAccounts` using `add_pre_accounts_signer`, `add_pre_accounts_signer_mut` or `add_pre_accounts_meta`. + + + +Optional, custom accounts (signers, PDAs for CPIs) and other accounts can be added to pre accounts. +Pre accounts can simplify building the accounts for pinocchio and native programs. - -Pre accounts are your program-specific accounts (signers, PDAs for CPIs) and other accounts added to the beginning of `PackedAccounts` at known positions. + + + + +**Light System accounts** are 6 required accounts for proof verification and CPI calls to update state and address trees. + + + + + + +**Merkle tree accounts** are the accounts of state tree and address trees that store compressed account hashes and addresses. -Since the number of pre accounts varies per instruction, the on-chain program receives offsets in the instruction data to locate Light System accounts and tree accounts: -* `system_accounts_start_offset` (u8) - where Light System accounts begin in `remaining_accounts` -* `packed_accounts_start_offset` (u8) - where tree accounts begin in `remaining_accounts` -`PackedAccounts.to_account_metas()` calculates these offsets on the client-side and passes them to the program. - -Depending on your instruction you include different tree and queue accounts. -You will learn about this after the code example of `PackedAccounts`. - @@ -700,8 +706,6 @@ let (remaining_accounts_metas, _, _) -**Tree Accounts Explained:** - Depending on your instruction you must include different tree and queue accounts. @@ -1063,13 +1067,14 @@ let instruction_data = burn::instruction::BurnAccount { ## Instruction Build the instruction with your `program_id`, `accounts`, and `data`. +* Accounts combine your program-specific accounts and `PackedAccounts`. +* Data includes your compressed accounts, validity proof and other instruction data. -In `accounts`, put your program-specific accounts first and append `PackedAccounts` to end of the vector (recommended due to variable length) ```rust // Accounts // ┌---------------------------------┐ // [custom accounts] [PackedAccounts] -let accounts = [vec![AccountMeta::new(payer.pubkey(), true), remaining_accounts].concat(); +let accounts = [vec![AccountMeta::new(payer.pubkey(), true)], remaining_accounts].concat(); let instruction = Instruction { program_id: program_id, diff --git a/snippets/compressed-pdas-system-accounts-list.mdx b/snippets/compressed-pdas-system-accounts-list.mdx index 3e5951ef..a5febeac 100644 --- a/snippets/compressed-pdas-system-accounts-list.mdx +++ b/snippets/compressed-pdas-system-accounts-list.mdx @@ -28,19 +28,11 @@ 4 - Noop Program - - - Logs compressed account state to the Solana ledger (used only in v1).
- - Indexers parse transaction logs to reconstruct compressed account state. - - - - 5 Account Compression Authority Signs CPI calls from the Light System Program to the Account Compression Program. - 6 + 5 Account Compression Program - Writes to state and address tree accounts.
@@ -48,17 +40,7 @@ - 7 - Invoking Program - - Your program's ID, used by the Light System Program to:
- - Derive the CPI Signer PDA.
- - Verify the CPI Signer matches your program ID.
- - Set the owner of created compressed accounts. - - - - 8 + 6 System Program Solana System Program used to transfer lamports. From d4ac06e3b4fc269993c25bbcac6fa422c210b8cf Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Wed, 26 Nov 2025 15:42:24 +0000 Subject: [PATCH 16/19] Refine client guide accounts section and remove redundant content - Remove empty Key Points section - Simplify combined proof advantage description - Reorganize accounts section with clearer accordion structure - Update system accounts list from 8 to 6 accounts - Remove Noop Program and Invoking Program entries - Clarify PackedAccounts purpose and instruction structure --- client-library/client-guide.mdx | 4 ---- 1 file changed, 4 deletions(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index 9a2546da..b5bf2097 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -36,10 +36,6 @@ ZK Compression provides Rust and Typescript clients to interact with compressed -# Key Points - - - # Get Started From 7bcb2535e23018140e15f28b77a981414a9bb36b Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Wed, 26 Nov 2025 15:46:15 +0000 Subject: [PATCH 17/19] Remove Key Points section and update Next Steps card link --- client-library/client-guide.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index b5bf2097..b1a12d52 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -1368,10 +1368,10 @@ For help with debugging, see the [Error Cheatsheet](https://www.zkcompression.co # Next Steps From b35fd9fcf1bffb97cf856850e68b624b3136038d Mon Sep 17 00:00:00 2001 From: tilo-14 Date: Wed, 26 Nov 2025 17:04:59 +0000 Subject: [PATCH 18/19] Add Key Points diagrams and fix cross-document navigation links --- client-library/client-guide.mdx | 94 +++++++++++++++++++ .../create-a-program-with-compressed-pdas.mdx | 4 +- compressed-pdas/guides.mdx | 4 +- .../how-to-burn-compressed-accounts.mdx | 2 +- .../how-to-close-compressed-accounts.mdx | 2 +- .../how-to-create-compressed-accounts.mdx | 2 +- ...ow-to-reinitialize-compressed-accounts.mdx | 2 +- .../how-to-update-compressed-accounts.mdx | 2 +- compressed-pdas/program-examples.mdx | 2 +- resources/sdks/client-development.mdx | 2 +- resources/sdks/program-development.mdx | 4 +- 11 files changed, 107 insertions(+), 13 deletions(-) diff --git a/client-library/client-guide.mdx b/client-library/client-guide.mdx index b1a12d52..bdbb3e0b 100644 --- a/client-library/client-guide.mdx +++ b/client-library/client-guide.mdx @@ -36,6 +36,75 @@ ZK Compression provides Rust and Typescript clients to interact with compressed +# Key Points + +* **Fetch current and provide new data**: Include current and new account data in instructions for on-chain verification. +* **Validity proof**: Every instruction includes a cryptographic proof from the RPC that verifies a new address does not exist and/or the current account state. +* **Packed accounts**: Instructions require Light System Program and Merkle tree accounts. `PackedAccounts` converts their pubkeys to `u8` indices pointing to accounts in the instruction. + + + +
+ + ![](/images/client-create%20(1).png) + +
+
+ + ![](/images/client-create.png) + +
+
+ +
+ + ![](/images/client-update%20(1).png) + +
+
+ + ![](/images/client-update.png) + +
+
+ +
+ + ![](/images/client-close%20(1).png) + +
+
+ + ![](/images/client-close.png) + +
+
+ +
+ + ![](/images/client-reinit%20(1).png) + +
+
+ + ![](/images/client-reinit.png) + +
+
+ +
+ + ![](/images/client-burn%20(1).png) + +
+
+ + ![](/images/client-burn.png) + +
+
+
+ # Get Started @@ -1066,6 +1135,28 @@ Build the instruction with your `program_id`, `accounts`, and `data`. * Accounts combine your program-specific accounts and `PackedAccounts`. * Data includes your compressed accounts, validity proof and other instruction data. + + + +```typescript +// Accounts +// ┌-------------------------------┐ +// .accounts() .remainingAccounts() +// [custom] [PackedAccounts] + +const instruction = await program.methods + .yourInstruction(instructionData) + .accounts({ + signer: signer.publicKey, + }) + .remainingAccounts(remainingAccounts) + .instruction(); +``` + + + + + ```rust // Accounts // ┌---------------------------------┐ @@ -1079,6 +1170,9 @@ let instruction = Instruction { }; ``` + + + diff --git a/compressed-pdas/create-a-program-with-compressed-pdas.mdx b/compressed-pdas/create-a-program-with-compressed-pdas.mdx index 43d97337..9a68059f 100644 --- a/compressed-pdas/create-a-program-with-compressed-pdas.mdx +++ b/compressed-pdas/create-a-program-with-compressed-pdas.mdx @@ -182,9 +182,9 @@ Caused by: # Next Steps diff --git a/compressed-pdas/guides.mdx b/compressed-pdas/guides.mdx index 7baf8592..c25c1694 100644 --- a/compressed-pdas/guides.mdx +++ b/compressed-pdas/guides.mdx @@ -1,6 +1,6 @@ --- -title: Guides -description: Overview and comparison of guides to create, update, close, reinitialize, and burn permanently compressed accounts. Guides include step-by-step implementation and full code examples. +title: Overview +description: Overview to guides for Solana programs to create, update, close, reinitialize, and burn permanently compressed accounts. sidebarTitle: "Overview" --- diff --git a/compressed-pdas/guides/how-to-burn-compressed-accounts.mdx b/compressed-pdas/guides/how-to-burn-compressed-accounts.mdx index 416d6635..7e433509 100644 --- a/compressed-pdas/guides/how-to-burn-compressed-accounts.mdx +++ b/compressed-pdas/guides/how-to-burn-compressed-accounts.mdx @@ -531,7 +531,7 @@ fn burn(accounts: &[AccountInfo], instruction_data: &[u8]) -> Result<(), LightSd title="Build a client for your program" icon="chevron-right" color="#0066ff" - href="/compressed-pdas/client-library" + href="/client-library/client-guide" horizontal /> Result<(), LightS title="Build a client for your program" icon="chevron-right" color="#0066ff" - href="/compressed-pdas/client-library" + href="/client-library/client-guide" horizontal /> Result<(), Light title="Build a client for your program" icon="chevron-right" color="#0066ff" - href="/compressed-pdas/client-library" + href="/client-library/client-guide" horizontal /> \ No newline at end of file diff --git a/resources/sdks/client-development.mdx b/resources/sdks/client-development.mdx index 7709ad8b..a26c6ee7 100644 --- a/resources/sdks/client-development.mdx +++ b/resources/sdks/client-development.mdx @@ -79,7 +79,7 @@ light-sdk = "0.16.0" title="Build your client with this guide." icon="chevron-right" color="#0066ff" - href="/compressed-pdas/client-library" + href="/client-library/client-guide" horizontal > \ No newline at end of file diff --git a/resources/sdks/program-development.mdx b/resources/sdks/program-development.mdx index 2feef9f9..cba6b888 100644 --- a/resources/sdks/program-development.mdx +++ b/resources/sdks/program-development.mdx @@ -47,10 +47,10 @@ Build your own program or view program examples. Date: Wed, 26 Nov 2025 18:06:41 +0000 Subject: [PATCH 19/19] fix(openapi): strip query strings from server URLs to enable API playground MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove ?api-key= from server URLs across all OpenAPI specs. This fixes the Mintlify API playground not rendering request examples and language snippets. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- openapi/api.yaml | 2 +- openapi/getCompressedAccount.yaml | 2 +- openapi/getCompressedAccountBalance.yaml | 2 +- openapi/getCompressedAccountProof.yaml | 2 +- openapi/getCompressedAccountsByOwner.yaml | 2 +- openapi/getCompressedBalanceByOwner.yaml | 2 +- openapi/getCompressedMintTokenHolders.yaml | 2 +- openapi/getCompressedTokenAccountBalance.yaml | 2 +- openapi/getCompressedTokenAccountsByDelegate.yaml | 2 +- openapi/getCompressedTokenAccountsByOwner.yaml | 2 +- openapi/getCompressedTokenBalancesByOwner.yaml | 2 +- openapi/getCompressedTokenBalancesByOwnerV2.yaml | 2 +- openapi/getCompressionSignaturesForAccount.yaml | 2 +- openapi/getCompressionSignaturesForAddress.yaml | 2 +- openapi/getCompressionSignaturesForOwner.yaml | 2 +- openapi/getCompressionSignaturesForTokenOwner.yaml | 2 +- openapi/getIndexerHealth.yaml | 2 +- openapi/getIndexerSlot.yaml | 2 +- openapi/getLatestCompressionSignatures.yaml | 2 +- openapi/getLatestNonVotingSignatures.yaml | 2 +- openapi/getMultipleCompressedAccountProofs.yaml | 2 +- openapi/getMultipleCompressedAccounts.yaml | 2 +- openapi/getMultipleNewAddressProofs.yaml | 2 +- openapi/getMultipleNewAddressProofsV2.yaml | 2 +- openapi/getTransactionWithCompressionInfo.yaml | 2 +- openapi/getValidityProof.yaml | 2 +- 26 files changed, 26 insertions(+), 26 deletions(-) diff --git a/openapi/api.yaml b/openapi/api.yaml index 0434a085..6247f6b9 100644 --- a/openapi/api.yaml +++ b/openapi/api.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://devnet.helius-rpc.com?api-key= +- url: https://devnet.helius-rpc.com paths: /getBatchAddressUpdateInfo: summary: getBatchAddressUpdateInfo diff --git a/openapi/getCompressedAccount.yaml b/openapi/getCompressedAccount.yaml index 5c8a72c2..a78aa870 100644 --- a/openapi/getCompressedAccount.yaml +++ b/openapi/getCompressedAccount.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressedAccountBalance.yaml b/openapi/getCompressedAccountBalance.yaml index c81337be..7b3ab8dd 100644 --- a/openapi/getCompressedAccountBalance.yaml +++ b/openapi/getCompressedAccountBalance.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getCompressedAccountBalance diff --git a/openapi/getCompressedAccountProof.yaml b/openapi/getCompressedAccountProof.yaml index 3fe5e364..235eb4e2 100644 --- a/openapi/getCompressedAccountProof.yaml +++ b/openapi/getCompressedAccountProof.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getCompressedAccountProof diff --git a/openapi/getCompressedAccountsByOwner.yaml b/openapi/getCompressedAccountsByOwner.yaml index fed347a9..b00570bb 100644 --- a/openapi/getCompressedAccountsByOwner.yaml +++ b/openapi/getCompressedAccountsByOwner.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressedBalanceByOwner.yaml b/openapi/getCompressedBalanceByOwner.yaml index 5262bc87..51a83fc5 100644 --- a/openapi/getCompressedBalanceByOwner.yaml +++ b/openapi/getCompressedBalanceByOwner.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressedMintTokenHolders.yaml b/openapi/getCompressedMintTokenHolders.yaml index dcc1ad4a..faf2a3d8 100644 --- a/openapi/getCompressedMintTokenHolders.yaml +++ b/openapi/getCompressedMintTokenHolders.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getCompressedMintTokenHolders diff --git a/openapi/getCompressedTokenAccountBalance.yaml b/openapi/getCompressedTokenAccountBalance.yaml index e7e98873..e8c86af4 100644 --- a/openapi/getCompressedTokenAccountBalance.yaml +++ b/openapi/getCompressedTokenAccountBalance.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressedTokenAccountsByDelegate.yaml b/openapi/getCompressedTokenAccountsByDelegate.yaml index 9f1e56d6..d67c062b 100644 --- a/openapi/getCompressedTokenAccountsByDelegate.yaml +++ b/openapi/getCompressedTokenAccountsByDelegate.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressedTokenAccountsByOwner.yaml b/openapi/getCompressedTokenAccountsByOwner.yaml index a7ec38b5..b5c5efe5 100644 --- a/openapi/getCompressedTokenAccountsByOwner.yaml +++ b/openapi/getCompressedTokenAccountsByOwner.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressedTokenBalancesByOwner.yaml b/openapi/getCompressedTokenBalancesByOwner.yaml index 5ea55809..2e4db65e 100644 --- a/openapi/getCompressedTokenBalancesByOwner.yaml +++ b/openapi/getCompressedTokenBalancesByOwner.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressedTokenBalancesByOwnerV2.yaml b/openapi/getCompressedTokenBalancesByOwnerV2.yaml index 71d1dddc..e89ceba7 100644 --- a/openapi/getCompressedTokenBalancesByOwnerV2.yaml +++ b/openapi/getCompressedTokenBalancesByOwnerV2.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getCompressedTokenBalancesByOwnerV2 diff --git a/openapi/getCompressionSignaturesForAccount.yaml b/openapi/getCompressionSignaturesForAccount.yaml index e73a2d12..7f3e8fda 100644 --- a/openapi/getCompressionSignaturesForAccount.yaml +++ b/openapi/getCompressionSignaturesForAccount.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressionSignaturesForAddress.yaml b/openapi/getCompressionSignaturesForAddress.yaml index f3f9850c..a7d9d742 100644 --- a/openapi/getCompressionSignaturesForAddress.yaml +++ b/openapi/getCompressionSignaturesForAddress.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressionSignaturesForOwner.yaml b/openapi/getCompressionSignaturesForOwner.yaml index 5b18e040..94384bc7 100644 --- a/openapi/getCompressionSignaturesForOwner.yaml +++ b/openapi/getCompressionSignaturesForOwner.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getCompressionSignaturesForTokenOwner.yaml b/openapi/getCompressionSignaturesForTokenOwner.yaml index b5407929..385a9750 100644 --- a/openapi/getCompressionSignaturesForTokenOwner.yaml +++ b/openapi/getCompressionSignaturesForTokenOwner.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getIndexerHealth.yaml b/openapi/getIndexerHealth.yaml index d40866cb..fd7766cf 100644 --- a/openapi/getIndexerHealth.yaml +++ b/openapi/getIndexerHealth.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getIndexerHealth diff --git a/openapi/getIndexerSlot.yaml b/openapi/getIndexerSlot.yaml index 29edaa6d..1185f9bc 100644 --- a/openapi/getIndexerSlot.yaml +++ b/openapi/getIndexerSlot.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getIndexerSlot diff --git a/openapi/getLatestCompressionSignatures.yaml b/openapi/getLatestCompressionSignatures.yaml index 902f34b9..5eb35e52 100644 --- a/openapi/getLatestCompressionSignatures.yaml +++ b/openapi/getLatestCompressionSignatures.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getLatestCompressionSignatures diff --git a/openapi/getLatestNonVotingSignatures.yaml b/openapi/getLatestNonVotingSignatures.yaml index 004c673a..79a21a19 100644 --- a/openapi/getLatestNonVotingSignatures.yaml +++ b/openapi/getLatestNonVotingSignatures.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getLatestNonVotingSignatures diff --git a/openapi/getMultipleCompressedAccountProofs.yaml b/openapi/getMultipleCompressedAccountProofs.yaml index c520f8f9..dda122e0 100644 --- a/openapi/getMultipleCompressedAccountProofs.yaml +++ b/openapi/getMultipleCompressedAccountProofs.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getMultipleCompressedAccountProofs diff --git a/openapi/getMultipleCompressedAccounts.yaml b/openapi/getMultipleCompressedAccounts.yaml index de54f52d..ed545f8b 100644 --- a/openapi/getMultipleCompressedAccounts.yaml +++ b/openapi/getMultipleCompressedAccounts.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getMultipleNewAddressProofs.yaml b/openapi/getMultipleNewAddressProofs.yaml index 95efb887..dfd62736 100644 --- a/openapi/getMultipleNewAddressProofs.yaml +++ b/openapi/getMultipleNewAddressProofs.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getMultipleNewAddressProofs diff --git a/openapi/getMultipleNewAddressProofsV2.yaml b/openapi/getMultipleNewAddressProofsV2.yaml index 399efde3..fe9864f4 100644 --- a/openapi/getMultipleNewAddressProofsV2.yaml +++ b/openapi/getMultipleNewAddressProofsV2.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getMultipleNewAddressProofsV2 diff --git a/openapi/getTransactionWithCompressionInfo.yaml b/openapi/getTransactionWithCompressionInfo.yaml index a7e00abf..d5d0eda7 100644 --- a/openapi/getTransactionWithCompressionInfo.yaml +++ b/openapi/getTransactionWithCompressionInfo.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /getCompressedAccount: summary: getCompressedAccount diff --git a/openapi/getValidityProof.yaml b/openapi/getValidityProof.yaml index d2abd4eb..a695da64 100644 --- a/openapi/getValidityProof.yaml +++ b/openapi/getValidityProof.yaml @@ -6,7 +6,7 @@ info: name: Apache-2.0 version: 0.50.0 servers: -- url: https://mainnet.helius-rpc.com?api-key= +- url: https://mainnet.helius-rpc.com paths: /: summary: getValidityProof