diff --git a/Cargo.lock b/Cargo.lock index c97abb1319..a17411ce99 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1390,6 +1390,7 @@ dependencies = [ "light-compressed-account", "light-compressed-token", "light-compressible", + "light-instruction-decoder", "light-program-test", "light-prover-client", "light-registry", @@ -1529,6 +1530,7 @@ dependencies = [ "anchor-lang", "light-compressed-account", "light-hasher", + "light-instruction-decoder", "light-sdk", "light-sdk-types", "light-system-program-anchor", @@ -1609,6 +1611,8 @@ dependencies = [ "light-compressible", "light-hasher", "light-heap", + "light-instruction-decoder", + "light-instruction-decoder-derive", "light-macros", "light-program-test", "light-sdk", @@ -1619,6 +1623,7 @@ dependencies = [ "light-token-client", "light-token-interface", "light-token-types", + "sha2 0.10.9", "solana-account", "solana-account-info", "solana-instruction", @@ -3763,6 +3768,37 @@ dependencies = [ "thiserror 2.0.17", ] +[[package]] +name = "light-instruction-decoder" +version = "0.1.0" +dependencies = [ + "borsh 0.10.4", + "bs58", + "light-compressed-account", + "light-instruction-decoder-derive", + "light-sdk-types", + "light-token-interface", + "serde", + "solana-instruction", + "solana-pubkey 2.4.0", + "solana-signature", + "tabled", +] + +[[package]] +name = "light-instruction-decoder-derive" +version = "0.1.0" +dependencies = [ + "bs58", + "darling", + "heck 0.5.0", + "light-instruction-decoder", + "proc-macro2", + "quote", + "sha2 0.10.9", + "syn 2.0.111", +] + [[package]] name = "light-macros" version = "2.2.0" @@ -3868,6 +3904,7 @@ dependencies = [ "light-hasher", "light-indexed-array", "light-indexed-merkle-tree", + "light-instruction-decoder", "light-merkle-tree-metadata", "light-merkle-tree-reference", "light-prover-client", @@ -5897,13 +5934,13 @@ dependencies = [ "light-client", "light-compressed-account", "light-hasher", + "light-instruction-decoder", "light-program-test", "light-prover-client", "light-sdk", "light-sdk-types", "light-test-utils", "serial_test", - "solana-pubkey 2.4.0", "solana-sdk", "tokio", ] @@ -5996,6 +6033,8 @@ dependencies = [ "light-compressed-account", "light-compressible", "light-hasher", + "light-instruction-decoder", + "light-instruction-decoder-derive", "light-program-profiler", "light-program-test", "light-sdk", @@ -6008,6 +6047,7 @@ dependencies = [ "light-zero-copy", "serial_test", "solana-account-info", + "solana-instruction", "solana-pubkey 2.4.0", "solana-sdk", "tokio", @@ -10205,6 +10245,7 @@ dependencies = [ "light-compressed-account", "light-compressed-token", "light-hasher", + "light-instruction-decoder", "light-merkle-tree-metadata", "light-program-test", "light-prover-client", diff --git a/Cargo.toml b/Cargo.toml index 73e4568774..bb645393a4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -33,6 +33,8 @@ members = [ "sdk-libs/sdk-types", "sdk-libs/photon-api", "sdk-libs/program-test", + "sdk-libs/instruction-decoder", + "sdk-libs/instruction-decoder-derive", "xtask", "program-tests/account-compression-test", "program-tests/batched-merkle-tree-test", @@ -132,6 +134,7 @@ pinocchio = { version = "0.9" } pinocchio-pubkey = { version = "0.3.0" } pinocchio-system = { version = "0.3.0" } bs58 = "^0.5.1" +sha2 = "0.10" litesvm = "0.7" # Anchor anchor-lang = { version = "0.31.1" } @@ -150,6 +153,7 @@ proc-macro2 = "1.0" quote = "1.0" syn = { version = "2.0", features = ["visit", "visit-mut", "full"] } darling = "0.21" +heck = "0.5" # Async ecosystem futures = "0.3.31" @@ -223,6 +227,8 @@ create-address-test-program = { path = "program-tests/create-address-test-progra "cpi", ] } light-program-test = { path = "sdk-libs/program-test", version = "0.18.0" } +light-instruction-decoder = { path = "sdk-libs/instruction-decoder", version = "0.1.0" } +light-instruction-decoder-derive = { path = "sdk-libs/instruction-decoder-derive", version = "0.1.0" } light-batched-merkle-tree = { path = "program-libs/batched-merkle-tree", version = "0.8.0" } light-merkle-tree-metadata = { path = "program-libs/merkle-tree-metadata", version = "0.8.0" } aligned-sized = { path = "program-libs/aligned-sized", version = "1.1.0" } diff --git a/program-tests/compressed-token-test/Cargo.toml b/program-tests/compressed-token-test/Cargo.toml index 9048593feb..8257a8c725 100644 --- a/program-tests/compressed-token-test/Cargo.toml +++ b/program-tests/compressed-token-test/Cargo.toml @@ -20,6 +20,7 @@ default = [] [dependencies] anchor-lang = { workspace = true } light-sdk = { workspace = true, features = ["anchor"] } +light-instruction-decoder = { workspace = true } [dev-dependencies] light-compressed-token = { workspace = true } diff --git a/program-tests/compressed-token-test/src/lib.rs b/program-tests/compressed-token-test/src/lib.rs index e2e0ef87bc..19cd646546 100644 --- a/program-tests/compressed-token-test/src/lib.rs +++ b/program-tests/compressed-token-test/src/lib.rs @@ -3,9 +3,11 @@ #![allow(deprecated)] use anchor_lang::{prelude::*, solana_program::instruction::Instruction}; +use light_instruction_decoder::instruction_decoder; declare_id!("CompressedTokenTestProgram11111111111111111"); +#[instruction_decoder] #[program] pub mod compressed_token_test { use super::*; diff --git a/program-tests/create-address-test-program/Cargo.toml b/program-tests/create-address-test-program/Cargo.toml index 8ca8807c53..fa98d87c87 100644 --- a/program-tests/create-address-test-program/Cargo.toml +++ b/program-tests/create-address-test-program/Cargo.toml @@ -27,3 +27,4 @@ account-compression = { workspace = true, features = ["cpi"] } light-compressed-account = { workspace = true, features = ["anchor"] } light-sdk = { workspace = true, features = ["anchor", "v2"] } light-sdk-types = { workspace = true } +light-instruction-decoder = { workspace = true } diff --git a/program-tests/create-address-test-program/src/lib.rs b/program-tests/create-address-test-program/src/lib.rs index cfba3c54be..8c24f68a18 100644 --- a/program-tests/create-address-test-program/src/lib.rs +++ b/program-tests/create-address-test-program/src/lib.rs @@ -8,6 +8,7 @@ use anchor_lang::{ solana_program::{instruction::Instruction, pubkey::Pubkey}, InstructionData, }; +use light_instruction_decoder::instruction_decoder; use light_sdk::{ cpi::{v2::CpiAccounts, CpiAccountsConfig, CpiSigner}, derive_light_cpi_signer, @@ -59,6 +60,7 @@ declare_id!("FNt7byTHev1k5x2cXZLBr8TdWiC3zoP5vcnZR4P682Uy"); pub const LIGHT_CPI_SIGNER: CpiSigner = derive_light_cpi_signer!("FNt7byTHev1k5x2cXZLBr8TdWiC3zoP5vcnZR4P682Uy"); +#[instruction_decoder] #[program] pub mod system_cpi_test { diff --git a/program-tests/system-cpi-test/Cargo.toml b/program-tests/system-cpi-test/Cargo.toml index 1e3bd46621..d4302e5d3e 100644 --- a/program-tests/system-cpi-test/Cargo.toml +++ b/program-tests/system-cpi-test/Cargo.toml @@ -35,6 +35,7 @@ light-merkle-tree-metadata = { workspace = true, features = ["anchor"] } light-account-checks = { workspace = true } light-sdk = { workspace = true, features = ["v2", "cpi-context"] } light-sdk-types = { workspace = true, features = ["v2", "cpi-context"] } +light-instruction-decoder = { workspace = true } [target.'cfg(not(target_os = "solana"))'.dependencies] solana-sdk = { workspace = true } diff --git a/program-tests/system-cpi-test/src/lib.rs b/program-tests/system-cpi-test/src/lib.rs index fdb4dce7ff..6f74aef26b 100644 --- a/program-tests/system-cpi-test/src/lib.rs +++ b/program-tests/system-cpi-test/src/lib.rs @@ -24,6 +24,7 @@ use light_compressed_account::{ data::{NewAddressParamsPacked, PackedReadOnlyAddress}, }, }; +use light_instruction_decoder::instruction_decoder; use light_sdk::derive_light_cpi_signer; use light_sdk_types::CpiSigner; @@ -32,6 +33,7 @@ declare_id!("FNt7byTHev1k5x2cXZLBr8TdWiC3zoP5vcnZR4P682Uy"); pub const LIGHT_CPI_SIGNER: CpiSigner = derive_light_cpi_signer!("FNt7byTHev1k5x2cXZLBr8TdWiC3zoP5vcnZR4P682Uy"); +#[instruction_decoder] #[program] pub mod system_cpi_test { diff --git a/sdk-libs/instruction-decoder-derive/CLAUDE.md b/sdk-libs/instruction-decoder-derive/CLAUDE.md new file mode 100644 index 0000000000..184932ad68 --- /dev/null +++ b/sdk-libs/instruction-decoder-derive/CLAUDE.md @@ -0,0 +1,148 @@ +# light-instruction-decoder-derive + +Procedural macros for generating `InstructionDecoder` implementations. + +## Overview + +This crate provides two macros for generating instruction decoders: + +| Macro | Type | Purpose | +|-------|------|---------| +| `#[derive(InstructionDecoder)]` | Derive | Generate decoder for instruction enums | +| `#[instruction_decoder]` | Attribute | Auto-generate from Anchor program modules | + +## Module Structure + +``` +src/ +├── lib.rs # Macro entry points only (~100 lines) +├── utils.rs # Case conversion, discriminator, error handling +├── parsing.rs # Darling-based attribute parsing structs +├── builder.rs # InstructionDecoderBuilder (code generation) +├── derive_impl.rs # #[derive(InstructionDecoder)] implementation +├── attribute_impl.rs # #[instruction_decoder] attribute implementation +└── crate_context.rs # Recursive crate parsing for Accounts struct discovery +``` + +## Key Features + +### Multiple Discriminator Sizes + +- **1 byte**: Native programs with simple instruction indices +- **4 bytes**: System-style programs (little-endian u32) +- **8 bytes**: Anchor programs (SHA256 prefix, default) + +### Explicit Discriminators + +Two syntax forms for specifying explicit discriminators: + +1. **Integer**: `#[discriminator = 5]` - for 1-byte and 4-byte modes +2. **Array**: `#[discriminator(26, 16, 169, 7, 21, 202, 242, 25)]` - for 8-byte mode with custom discriminators + +### Account Names Extraction + +Two ways to specify account names: + +1. **Accounts type reference**: `accounts = MyAccountsStruct` - extracts field names at compile time +2. **Inline names**: Direct array `["source", "dest", "authority"]` + +When using `accounts = SomeType`, the macro uses `CrateContext` to parse the crate at macro expansion time and extract field names from the struct definition. This works for any struct with named fields (including standard Anchor `#[derive(Accounts)]` structs) without requiring any special trait implementation. + +### Off-chain Only + +All generated code is gated with `#[cfg(not(target_os = "solana"))]` since instruction decoding is only needed for logging/debugging. + +## Usage Examples + +### Derive Macro + +```rust +use light_instruction_decoder_derive::InstructionDecoder; + +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "MyProgramId111111111111111111111111111111111", + program_name = "My Program", // optional + discriminator_size = 8 // optional: 1, 4, or 8 +)] +pub enum MyInstruction { + // Reference Accounts struct for account names + #[instruction_decoder(accounts = CreateRecord, params = CreateRecordParams)] + CreateRecord, + + // Inline account names + #[instruction_decoder(account_names = ["source", "dest"])] + Transfer, + + // Explicit integer discriminator (for 1-byte or 4-byte modes) + #[discriminator = 5] + Close, + + // Explicit array discriminator (for 8-byte mode with custom discriminators) + #[discriminator(26, 16, 169, 7, 21, 202, 242, 25)] + #[instruction_decoder(account_names = ["fee_payer", "authority"])] + CustomInstruction, +} +``` + +### Attribute Macro (Anchor Programs) + +```rust +use light_instruction_decoder_derive::instruction_decoder; + +#[instruction_decoder] // or #[instruction_decoder(program_id = crate::ID)] +#[program] +pub mod my_program { + pub fn create_record(ctx: Context, params: CreateParams) -> Result<()> { ... } + pub fn transfer(ctx: Context) -> Result<()> { ... } +} +``` + +This generates `MyProgramInstructionDecoder` that: +- Gets program_id from `crate::ID` (or `declare_id!` if found) +- Extracts function names and converts to discriminators +- Discovers Accounts struct field names from the crate +- Decodes params using borsh if specified + +## Architecture + +### Darling-Based Parsing + +Attributes are parsed using the `darling` crate for: +- Declarative struct-based definitions +- Automatic validation +- Better error messages with span preservation + +### Builder Pattern + +`InstructionDecoderBuilder` separates: +- **Parsing**: Extract and validate attributes +- **Code Generation**: Produce TokenStream output + +This follows the pattern from `sdk-libs/macros`. + +### Crate Context + +`CrateContext` recursively parses all module files at macro expansion time to discover structs by name. This enables both macros to automatically find field names: + +- **Derive macro**: When `accounts = SomeType` is specified, extracts struct field names +- **Attribute macro**: Discovers Accounts structs from `Context` parameters + +The struct lookup finds any struct with named fields - no special trait implementation required. This makes the macro completely independent and works with any Anchor program. + +## Testing + +```bash +# Unit tests +cargo test -p light-instruction-decoder-derive + +# Integration tests (verifies generated code compiles and works) +cargo test-sbf -p csdk-anchor-full-derived-test --test instruction_decoder_test +``` + +## Dependencies + +- `darling`: Attribute parsing +- `syn/quote/proc-macro2`: Token manipulation +- `sha2`: Anchor discriminator computation +- `bs58`: Program ID encoding diff --git a/sdk-libs/instruction-decoder-derive/Cargo.toml b/sdk-libs/instruction-decoder-derive/Cargo.toml new file mode 100644 index 0000000000..6ba650d991 --- /dev/null +++ b/sdk-libs/instruction-decoder-derive/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "light-instruction-decoder-derive" +version = "0.1.0" +description = "Derive macros for InstructionDecoder implementations in Light Protocol" +repository = "https://github.com/Lightprotocol/light-protocol" +license = "Apache-2.0" +edition = "2021" + +[dependencies] +bs58 = { workspace = true } +darling = { workspace = true } +heck = { workspace = true } +proc-macro2 = { workspace = true } +quote = { workspace = true } +sha2 = "0.10" +syn = { workspace = true } + +[dev-dependencies] +light-instruction-decoder = { workspace = true } + +[lib] +proc-macro = true diff --git a/sdk-libs/instruction-decoder-derive/src/attribute_impl.rs b/sdk-libs/instruction-decoder-derive/src/attribute_impl.rs new file mode 100644 index 0000000000..43b07951ef --- /dev/null +++ b/sdk-libs/instruction-decoder-derive/src/attribute_impl.rs @@ -0,0 +1,325 @@ +//! Implementation of the `#[instruction_decoder]` attribute macro. +//! +//! This module provides the attribute macro that can be applied to Anchor program +//! modules to automatically generate InstructionDecoder implementations. + +use proc_macro2::TokenStream as TokenStream2; +use quote::{format_ident, quote}; +use syn::ItemMod; + +use crate::{ + crate_context::CrateContext, + parsing::ModuleDecoderArgs, + utils::{compute_anchor_discriminator, to_pascal_case}, +}; + +/// Information about a single parameter +struct ParamInfo { + name: syn::Ident, + ty: syn::Type, +} + +/// Information about an instruction extracted from the module. +struct InstructionInfo { + /// Function name (snake_case) + name: String, + /// Account field names extracted from the Accounts struct + account_names: Vec, + /// All parameters after Context + params: Vec, +} + +/// Main implementation for the `#[instruction_decoder]` attribute macro. +/// +/// This extracts function names from an Anchor program module and generates +/// an InstructionDecoder implementation. +/// +/// # Errors +/// +/// Returns an error if: +/// - Input is not a module +/// - Module parsing fails +pub fn instruction_decoder_attr( + attr: TokenStream2, + item: TokenStream2, +) -> syn::Result { + let module: ItemMod = syn::parse2(item.clone())?; + + // Parse attribute arguments + let mut args = ModuleDecoderArgs::parse(attr)?; + + // Try to find declare_id! in module if program_id not specified + args.find_declare_id(&module)?; + + // Extract function info from the module + let instructions = extract_instruction_info(&module)?; + + if instructions.is_empty() { + // No functions found, just return the module as-is + return Ok(item); + } + + let module_name = &module.ident; + let decoder_name = format_ident!( + "{}InstructionDecoder", + to_pascal_case(&module_name.to_string()) + ); + + // Generate match arms for each instruction + let match_arms = generate_match_arms(&instructions); + + // Generate params structs for all instructions that have params + let params_structs: Vec = instructions + .iter() + .filter_map(|info| generate_params_struct(&info.name, &info.params)) + .collect(); + + // Get program ID and name + let program_id_source = args.program_id_source(); + let program_id_impl = program_id_source.program_id_impl(); + let program_name = args.program_name(&module_name.to_string()); + + // Generate the decoder struct and implementation + let decoder_impl = quote! { + #[cfg(not(target_os = "solana"))] + /// Generated InstructionDecoder for the program module (off-chain only) + pub struct #decoder_name; + + // Generated params structs for deserialization (off-chain only) + #( + #[cfg(not(target_os = "solana"))] + #params_structs + )* + + #[cfg(not(target_os = "solana"))] + impl light_instruction_decoder::InstructionDecoder for #decoder_name { + #program_id_impl + + fn program_name(&self) -> &'static str { + #program_name + } + + fn decode( + &self, + data: &[u8], + _accounts: &[light_instruction_decoder::solana_instruction::AccountMeta], + ) -> Option { + if data.len() < 8 { + return None; + } + + let discriminator: [u8; 8] = data[0..8].try_into().ok()?; + + match discriminator { + #(#match_arms)* + _ => None, + } + } + } + }; + + // Return the original module plus the generated decoder + Ok(quote! { + #item + #decoder_impl + }) +} + +/// Generate match arms for all instructions. +fn generate_match_arms(instructions: &[InstructionInfo]) -> Vec { + instructions + .iter() + .map(|info| { + let pascal_name = to_pascal_case(&info.name); + let discriminator = compute_anchor_discriminator(&info.name); + let disc_array = discriminator.iter(); + + // Generate params decoding code using the generated DecoderParams struct + let fields_code = if info.params.is_empty() { + quote! { Vec::new() } + } else { + let params_struct_name = format_ident!("{}DecoderParams", pascal_name); + // Generate field accessors for each parameter - use empty name to print value directly + let field_pushes: Vec = info.params.iter().map(|param| { + let field_name = ¶m.name; + quote! { + fields.push(light_instruction_decoder::DecodedField::new( + "", + format!("{:#?}", params.#field_name), + )); + } + }).collect(); + quote! { + let mut fields = Vec::new(); + if let Ok(params) = <#params_struct_name as borsh::BorshDeserialize>::try_from_slice(remaining) { + #(#field_pushes)* + } else if !remaining.is_empty() { + fields.push(light_instruction_decoder::DecodedField::new( + "data_len", + remaining.len().to_string(), + )); + } + fields + } + }; + + let account_names = &info.account_names; + if account_names.is_empty() { + quote! { + [#(#disc_array),*] => { + let remaining = &data[8..]; + let fields = { #fields_code }; + Some(light_instruction_decoder::DecodedInstruction::with_fields_and_accounts( + #pascal_name, + fields, + Vec::new(), + )) + } + } + } else { + quote! { + [#(#disc_array),*] => { + let remaining = &data[8..]; + let fields = { #fields_code }; + Some(light_instruction_decoder::DecodedInstruction::with_fields_and_accounts( + #pascal_name, + fields, + vec![#(#account_names.to_string()),*], + )) + } + } + } + }) + .collect() +} + +/// Extract public function information from an Anchor program module. +fn extract_instruction_info(module: &ItemMod) -> syn::Result> { + // Parse entire crate to find Accounts structs + let crate_ctx = CrateContext::parse_from_manifest()?; + + let mut instructions = Vec::new(); + + if let Some(ref content) = module.content { + for item in &content.1 { + if let syn::Item::Fn(func) = item { + // Only include public functions + if matches!(func.vis, syn::Visibility::Public(_)) { + let name = func.sig.ident.to_string(); + + // Extract Context type from first parameter and look up account names + let account_names = if let Some(type_name) = extract_context_type(&func.sig) { + crate_ctx + .get_struct_field_names(&type_name) + .unwrap_or_default() + } else { + Vec::new() + }; + + // Extract all parameters after Context + let params = extract_all_params(&func.sig); + + instructions.push(InstructionInfo { + name, + account_names, + params, + }); + } + } + } + } + + Ok(instructions) +} + +/// Extract the type name from Context in a function signature. +/// +/// Handles various patterns: +/// - `Context<'_, '_, '_, 'info, T<'info>>` -> "T" +/// - `Context` -> "T" +fn extract_context_type(sig: &syn::Signature) -> Option { + for input in &sig.inputs { + if let syn::FnArg::Typed(pat_type) = input { + if let syn::Type::Path(type_path) = &*pat_type.ty { + if let Some(segment) = type_path.path.segments.last() { + if segment.ident == "Context" { + if let syn::PathArguments::AngleBracketed(args) = &segment.arguments { + // Get the last type argument (accounts struct) + if let Some(syn::GenericArgument::Type(ty)) = args.args.last() { + return extract_type_name(ty); + } + } + } + } + } + } + } + None +} + +/// Extract the simple type name from a Type. +fn extract_type_name(ty: &syn::Type) -> Option { + match ty { + syn::Type::Path(type_path) => type_path.path.segments.last().map(|s| s.ident.to_string()), + _ => None, + } +} + +/// Extract ALL parameters after Context from a function signature. +/// +/// This mirrors how Anchor generates its instruction structs - iterating +/// all args after Context and generating a struct field for each. +/// We generate our own struct with Debug derive for decoding. +fn extract_all_params(sig: &syn::Signature) -> Vec { + let mut params = Vec::new(); + let mut found_context = false; + + for input in &sig.inputs { + if let syn::FnArg::Typed(pat_type) = input { + if let syn::Type::Path(type_path) = &*pat_type.ty { + if let Some(segment) = type_path.path.segments.last() { + if segment.ident == "Context" { + found_context = true; + continue; + } + } + } + if found_context { + if let syn::Pat::Ident(pat_ident) = &*pat_type.pat { + params.push(ParamInfo { + name: pat_ident.ident.clone(), + ty: (*pat_type.ty).clone(), + }); + } + } + } + } + params +} + +/// Generate a params struct for an instruction with Debug and BorshDeserialize. +/// +/// Returns None if the instruction has no parameters. +fn generate_params_struct(instruction_name: &str, params: &[ParamInfo]) -> Option { + if params.is_empty() { + return None; + } + + let struct_name = format_ident!("{}DecoderParams", to_pascal_case(instruction_name)); + + let fields: Vec = params + .iter() + .map(|param| { + let name = ¶m.name; + let ty = ¶m.ty; + quote! { pub #name: #ty } + }) + .collect(); + + Some(quote! { + #[derive(Debug, borsh::BorshDeserialize)] + struct #struct_name { + #(#fields),* + } + }) +} diff --git a/sdk-libs/instruction-decoder-derive/src/builder.rs b/sdk-libs/instruction-decoder-derive/src/builder.rs new file mode 100644 index 0000000000..20eaf2ac53 --- /dev/null +++ b/sdk-libs/instruction-decoder-derive/src/builder.rs @@ -0,0 +1,569 @@ +//! Builder pattern for InstructionDecoder code generation. +//! +//! This module provides the `InstructionDecoderBuilder` which handles the core +//! code generation logic for both the derive macro and attribute macro. + +use proc_macro2::TokenStream as TokenStream2; +use quote::{format_ident, quote}; +use syn::Fields; + +use crate::{ + crate_context::CrateContext, + parsing::{ + parse_explicit_discriminator, ExplicitDiscriminator, InstructionDecoderArgs, + VariantDecoderArgs, + }, + utils::{compute_anchor_discriminator, to_snake_case}, +}; + +/// Builder for generating InstructionDecoder implementations. +/// +/// Handles the code generation for decoder structs and trait implementations. +pub struct InstructionDecoderBuilder<'a> { + /// Parsed top-level attributes + args: &'a InstructionDecoderArgs, + /// Explicit discriminator values (indexed by variant position) + explicit_discriminators: Vec>, + /// Parsed program ID bytes as token stream + program_id_bytes: TokenStream2, + /// Crate context for resolving struct field names at compile time + crate_ctx: Option, +} + +impl<'a> InstructionDecoderBuilder<'a> { + /// Create a new builder from parsed arguments. + /// + /// # Errors + /// + /// Returns an error if program_id is invalid or discriminator_size is unsupported. + pub fn new(args: &'a InstructionDecoderArgs, input: &syn::DeriveInput) -> syn::Result { + // Validate arguments + args.validate()?; + + // Parse program ID bytes + let program_id_bytes = args.program_id_bytes(args.ident.span())?; + + // Parse explicit discriminators from variants + let explicit_discriminators = Self::parse_explicit_discriminators(input)?; + + // Try to parse CrateContext for account name resolution + // This allows us to extract struct field names at compile time + let crate_ctx = CrateContext::parse_from_manifest().ok(); + + Ok(Self { + args, + explicit_discriminators, + program_id_bytes, + crate_ctx, + }) + } + + /// Parse explicit discriminator attributes from all variants. + fn parse_explicit_discriminators( + input: &syn::DeriveInput, + ) -> syn::Result>> { + match &input.data { + syn::Data::Enum(data_enum) => data_enum + .variants + .iter() + .map(parse_explicit_discriminator) + .collect(), + _ => Err(syn::Error::new_spanned( + input, + "InstructionDecoder can only be derived for enums", + )), + } + } + + /// Generate the complete decoder implementation. + pub fn generate(&self, input: &syn::DeriveInput) -> syn::Result { + let name = &self.args.ident; + let decoder_name = format_ident!("{}Decoder", name); + let program_name = self.args.display_name(); + + // Generate match arms + let match_arms = self.generate_match_arms(input)?; + + // Generate decoder based on discriminator size + let inner = self.generate_decoder_impl(&decoder_name, &program_name, &match_arms); + + // Wrap in cfg gate and module + let mod_name = format_ident!("__instruction_decoder_{}", name.to_string().to_lowercase()); + Ok(quote! { + #[cfg(not(target_os = "solana"))] + mod #mod_name { + use super::*; + #inner + } + #[cfg(not(target_os = "solana"))] + pub use #mod_name::#decoder_name; + }) + } + + /// Generate match arms for all variants. + fn generate_match_arms(&self, input: &syn::DeriveInput) -> syn::Result> { + let data_enum = match &input.data { + syn::Data::Enum(data) => data, + _ => { + return Err(syn::Error::new_spanned( + input, + "InstructionDecoder can only be derived for enums", + )) + } + }; + + let variants = self.args.variants(); + + data_enum + .variants + .iter() + .zip(variants.iter()) + .enumerate() + .map(|(idx, (variant, variant_args))| { + self.generate_match_arm(idx, variant, variant_args) + }) + .collect() + } + + /// Generate a single match arm for a variant. + fn generate_match_arm( + &self, + index: usize, + variant: &syn::Variant, + variant_args: &VariantDecoderArgs, + ) -> syn::Result { + let instruction_name = variant.ident.to_string(); + // Pass crate_ctx to resolve struct field names at compile time + let account_names_code = variant_args.account_names_code(self.crate_ctx.as_ref()); + let fields_code = self.generate_fields_code(variant, variant_args)?; + + match self.args.discriminator_size { + 1 => { + let disc = match &self.explicit_discriminators[index] { + Some(ExplicitDiscriminator::U32(d)) => { + if *d > u8::MAX as u32 { + return Err(syn::Error::new( + variant.ident.span(), + format!( + "discriminator value {} exceeds u8::MAX (255) for 1-byte discriminator size", + d + ), + )); + } + *d as u8 + } + Some(ExplicitDiscriminator::Array(_)) => { + return Err(syn::Error::new( + variant.ident.span(), + "array discriminator not supported for 1-byte discriminator size", + )); + } + None => { + if index > u8::MAX as usize { + return Err(syn::Error::new( + variant.ident.span(), + format!( + "variant index {} exceeds u8::MAX (255) for 1-byte discriminator size", + index + ), + )); + } + index as u8 + } + }; + Ok(quote! { + #disc => { + let account_names: Vec = #account_names_code; + let fields = { #fields_code }; + Some(light_instruction_decoder::DecodedInstruction::with_fields_and_accounts( + #instruction_name, + fields, + account_names, + )) + } + }) + } + 4 => { + let disc = match &self.explicit_discriminators[index] { + Some(ExplicitDiscriminator::U32(d)) => *d, + Some(ExplicitDiscriminator::Array(_)) => { + return Err(syn::Error::new( + variant.ident.span(), + "array discriminator not supported for 4-byte discriminator size", + )); + } + None => { + if index > u32::MAX as usize { + return Err(syn::Error::new( + variant.ident.span(), + format!( + "variant index {} exceeds u32::MAX for 4-byte discriminator size", + index + ), + )); + } + index as u32 + } + }; + Ok(quote! { + #disc => { + let account_names: Vec = #account_names_code; + let fields = { #fields_code }; + Some(light_instruction_decoder::DecodedInstruction::with_fields_and_accounts( + #instruction_name, + fields, + account_names, + )) + } + }) + } + 8 => { + // For 8-byte mode: check for explicit array discriminator first, + // then fall back to explicit u32, then to computed Anchor discriminator + let discriminator: [u8; 8] = match &self.explicit_discriminators[index] { + Some(ExplicitDiscriminator::Array(arr)) => *arr, + Some(ExplicitDiscriminator::U32(_)) => { + return Err(syn::Error::new( + variant.ident.span(), + "use array discriminator syntax #[discriminator = [a, b, ...]] for 8-byte discriminator size", + )); + } + None => { + // Fall back to computed Anchor discriminator + let snake_name = to_snake_case(&instruction_name); + compute_anchor_discriminator(&snake_name) + } + }; + let disc_array = discriminator.iter(); + Ok(quote! { + [#(#disc_array),*] => { + let account_names: Vec = #account_names_code; + let fields = { #fields_code }; + Some(light_instruction_decoder::DecodedInstruction::with_fields_and_accounts( + #instruction_name, + fields, + account_names, + )) + } + }) + } + _ => Err(syn::Error::new( + variant.ident.span(), + "unsupported discriminator size", + )), + } + } + + /// Generate field parsing code for a variant. + fn generate_fields_code( + &self, + variant: &syn::Variant, + variant_args: &VariantDecoderArgs, + ) -> syn::Result { + // If params type is specified, use borsh deserialization + if let Some(params_ty) = variant_args.params_type() { + // Check if pretty_formatter is specified + if let Some(formatter_path) = &variant_args.pretty_formatter { + return Ok(quote! { + let mut fields = Vec::new(); + if let Ok(params) = <#params_ty as borsh::BorshDeserialize>::try_from_slice(remaining) { + // Call custom formatter with params and accounts + let formatted = #formatter_path(¶ms, accounts); + fields.push(light_instruction_decoder::DecodedField::new( + "", + formatted, + )); + } else if !remaining.is_empty() { + fields.push(light_instruction_decoder::DecodedField::new( + "data_len", + remaining.len().to_string(), + )); + } + fields + }); + } + + // Default: use Debug formatting + return Ok(quote! { + let mut fields = Vec::new(); + if let Ok(params) = <#params_ty as borsh::BorshDeserialize>::try_from_slice(remaining) { + fields.push(light_instruction_decoder::DecodedField::new( + "", + format!("{:#?}", params), + )); + } else if !remaining.is_empty() { + fields.push(light_instruction_decoder::DecodedField::new( + "data_len", + remaining.len().to_string(), + )); + } + fields + }); + } + + // Otherwise, generate native field parsing based on variant fields + generate_native_fields_code(variant) + } + + /// Generate the decoder struct and impl based on discriminator size. + fn generate_decoder_impl( + &self, + decoder_name: &syn::Ident, + program_name: &str, + match_arms: &[TokenStream2], + ) -> TokenStream2 { + let program_id_bytes = &self.program_id_bytes; + let disc_size = self.args.discriminator_size as usize; + + match self.args.discriminator_size { + 1 => quote! { + /// Generated InstructionDecoder implementation + pub struct #decoder_name; + + impl light_instruction_decoder::InstructionDecoder for #decoder_name { + fn program_id(&self) -> light_instruction_decoder::solana_pubkey::Pubkey { + light_instruction_decoder::solana_pubkey::Pubkey::new_from_array(#program_id_bytes) + } + + fn program_name(&self) -> &'static str { + #program_name + } + + fn decode( + &self, + data: &[u8], + accounts: &[light_instruction_decoder::solana_instruction::AccountMeta], + ) -> Option { + if data.len() < #disc_size { + return None; + } + + let discriminator = data[0]; + let remaining = &data[1..]; + + match discriminator { + #(#match_arms)* + _ => None, + } + } + } + }, + 4 => quote! { + /// Generated InstructionDecoder implementation + pub struct #decoder_name; + + impl light_instruction_decoder::InstructionDecoder for #decoder_name { + fn program_id(&self) -> light_instruction_decoder::solana_pubkey::Pubkey { + light_instruction_decoder::solana_pubkey::Pubkey::new_from_array(#program_id_bytes) + } + + fn program_name(&self) -> &'static str { + #program_name + } + + fn decode( + &self, + data: &[u8], + accounts: &[light_instruction_decoder::solana_instruction::AccountMeta], + ) -> Option { + if data.len() < 4 { + return None; + } + + let discriminator = u32::from_le_bytes([data[0], data[1], data[2], data[3]]); + let remaining = &data[4..]; + + match discriminator { + #(#match_arms)* + _ => None, + } + } + } + }, + _ => quote! { + /// Generated InstructionDecoder implementation + pub struct #decoder_name; + + impl light_instruction_decoder::InstructionDecoder for #decoder_name { + fn program_id(&self) -> light_instruction_decoder::solana_pubkey::Pubkey { + light_instruction_decoder::solana_pubkey::Pubkey::new_from_array(#program_id_bytes) + } + + fn program_name(&self) -> &'static str { + #program_name + } + + fn decode( + &self, + data: &[u8], + accounts: &[light_instruction_decoder::solana_instruction::AccountMeta], + ) -> Option { + if data.len() < 8 { + return None; + } + + let discriminator: [u8; 8] = data[0..8].try_into().ok()?; + let remaining = &data[8..]; + + match discriminator { + #(#match_arms)* + _ => None, + } + } + } + }, + } + } +} + +/// Generate field parsing code for native program instructions. +/// Parses fields based on their types (u8, u16, u32, u64, i64) using little-endian byte reading. +pub fn generate_native_fields_code(variant: &syn::Variant) -> syn::Result { + match &variant.fields { + Fields::Named(fields_named) => { + let mut field_parsers = Vec::new(); + let mut offset: usize = 0; + + for field in &fields_named.named { + let field_name = field.ident.as_ref().unwrap().to_string(); + let field_type = &field.ty; + let type_str = quote!(#field_type).to_string(); + + let (parser, size) = generate_field_parser(&field_name, &type_str, offset); + field_parsers.push(parser); + offset += size; + } + + Ok(quote! { + let mut fields = Vec::new(); + #(#field_parsers)* + fields + }) + } + Fields::Unnamed(fields_unnamed) => { + let mut field_parsers = Vec::new(); + let mut offset: usize = 0; + + for (i, field) in fields_unnamed.unnamed.iter().enumerate() { + let field_name = format!("arg{}", i); + let field_type = &field.ty; + let type_str = quote!(#field_type).to_string(); + + let (parser, size) = generate_field_parser(&field_name, &type_str, offset); + field_parsers.push(parser); + offset += size; + } + + Ok(quote! { + let mut fields = Vec::new(); + #(#field_parsers)* + fields + }) + } + Fields::Unit => Ok(quote! { + let fields: Vec = Vec::new(); + fields + }), + } +} + +/// Generate parser code for a single field based on its type. +fn generate_field_parser(field_name: &str, type_str: &str, offset: usize) -> (TokenStream2, usize) { + match type_str { + "u8" => ( + quote! { + if remaining.len() > #offset { + let value = remaining[#offset]; + fields.push(light_instruction_decoder::DecodedField::new( + #field_name, + value.to_string(), + )); + } + }, + 1, + ), + "u16" => ( + quote! { + if remaining.len() > #offset + 1 { + let value = u16::from_le_bytes([ + remaining[#offset], + remaining[#offset + 1], + ]); + fields.push(light_instruction_decoder::DecodedField::new( + #field_name, + value.to_string(), + )); + } + }, + 2, + ), + "u32" => ( + quote! { + if remaining.len() > #offset + 3 { + let value = u32::from_le_bytes([ + remaining[#offset], + remaining[#offset + 1], + remaining[#offset + 2], + remaining[#offset + 3], + ]); + fields.push(light_instruction_decoder::DecodedField::new( + #field_name, + value.to_string(), + )); + } + }, + 4, + ), + "u64" => ( + quote! { + if remaining.len() > #offset + 7 { + let value = u64::from_le_bytes([ + remaining[#offset], + remaining[#offset + 1], + remaining[#offset + 2], + remaining[#offset + 3], + remaining[#offset + 4], + remaining[#offset + 5], + remaining[#offset + 6], + remaining[#offset + 7], + ]); + fields.push(light_instruction_decoder::DecodedField::new( + #field_name, + value.to_string(), + )); + } + }, + 8, + ), + "i64" => ( + quote! { + if remaining.len() > #offset + 7 { + let value = i64::from_le_bytes([ + remaining[#offset], + remaining[#offset + 1], + remaining[#offset + 2], + remaining[#offset + 3], + remaining[#offset + 4], + remaining[#offset + 5], + remaining[#offset + 6], + remaining[#offset + 7], + ]); + fields.push(light_instruction_decoder::DecodedField::new( + #field_name, + value.to_string(), + )); + } + }, + 8, + ), + _ => ( + quote! { + fields.push(light_instruction_decoder::DecodedField::new( + #field_name, + format!("({}bytes)", remaining.len().saturating_sub(#offset)), + )); + }, + 0, + ), + } +} diff --git a/sdk-libs/instruction-decoder-derive/src/crate_context.rs b/sdk-libs/instruction-decoder-derive/src/crate_context.rs new file mode 100644 index 0000000000..bd5d6418f8 --- /dev/null +++ b/sdk-libs/instruction-decoder-derive/src/crate_context.rs @@ -0,0 +1,319 @@ +//! Anchor-style crate context parser for `#[instruction_decoder]`. +//! +//! This module recursively reads all module files at macro expansion time, +//! allowing `#[instruction_decoder]` to discover all Anchor `#[derive(Accounts)]` structs +//! across the entire crate and extract their field names. +//! +//! Based on Anchor's `CrateContext::parse()` pattern from `anchor-syn/src/parser/context.rs`. + +use std::{ + collections::BTreeMap, + path::{Path, PathBuf}, +}; + +use syn::{Item, ItemStruct}; + +// ============================================================================= +// CRATE CONTEXT +// ============================================================================= + +/// Context containing all parsed modules in the crate. +pub struct CrateContext { + modules: BTreeMap, +} + +impl CrateContext { + /// Parse all modules starting from the crate root (lib.rs or main.rs). + /// + /// Uses `CARGO_MANIFEST_DIR` environment variable to locate the crate root. + pub fn parse_from_manifest() -> syn::Result { + let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").map_err(|_| { + syn::Error::new( + proc_macro2::Span::call_site(), + "CARGO_MANIFEST_DIR not set - cannot parse crate context", + ) + })?; + + let src_dir = PathBuf::from(&manifest_dir).join("src"); + + // Try lib.rs first, then main.rs + let root_file = if src_dir.join("lib.rs").exists() { + src_dir.join("lib.rs") + } else if src_dir.join("main.rs").exists() { + src_dir.join("main.rs") + } else { + return Err(syn::Error::new( + proc_macro2::Span::call_site(), + format!("Could not find lib.rs or main.rs in {:?}", src_dir), + )); + }; + + Self::parse(&root_file) + } + + /// Parse all modules starting from a specific root file. + pub fn parse(root: &Path) -> syn::Result { + let modules = ParsedModule::parse_recursive(root, "crate")?; + Ok(CrateContext { modules }) + } + + /// Iterate over all struct items in all parsed modules. + pub fn structs(&self) -> impl Iterator { + self.modules.values().flat_map(|module| module.structs()) + } + + /// Get field names of a struct by its simple name (e.g., "CreateTwoMints"). + /// + /// Returns None if the struct is not found. + /// + /// # Limitations + /// + /// - **First match wins**: If multiple modules define structs with the same name, + /// this returns the first one found (iteration order is not guaranteed). + /// To avoid ambiguity, ensure struct names are unique across the crate. + /// + /// - **No derive validation**: This does not verify that the struct has + /// `#[derive(Accounts)]`. Any struct with a matching name will be used. + /// Ensure the struct name passed corresponds to an actual Accounts struct. + pub fn get_struct_field_names(&self, struct_name: &str) -> Option> { + for item_struct in self.structs() { + if item_struct.ident == struct_name { + if let syn::Fields::Named(named_fields) = &item_struct.fields { + let field_names: Vec = named_fields + .named + .iter() + .filter_map(|f| f.ident.as_ref().map(|i| i.to_string())) + .collect(); + return Some(field_names); + } + } + } + None + } +} + +/// A parsed module containing its items. +pub struct ParsedModule { + /// All items in the module + items: Vec, +} + +impl ParsedModule { + /// Recursively parse all modules starting from a root file. + fn parse_recursive( + root: &Path, + module_path: &str, + ) -> syn::Result> { + let mut modules = BTreeMap::new(); + + // Read and parse the root file + let content = std::fs::read_to_string(root).map_err(|e| { + syn::Error::new( + proc_macro2::Span::call_site(), + format!("Failed to read {:?}: {}", root, e), + ) + })?; + + let file: syn::File = syn::parse_str(&content).map_err(|e| { + syn::Error::new( + proc_macro2::Span::call_site(), + format!("Failed to parse {:?}: {}", root, e), + ) + })?; + + let root_dir = root.parent().unwrap_or(Path::new(".")); + let root_name = root.file_stem().and_then(|s| s.to_str()).unwrap_or("root"); + + // Create the root module + let root_module = ParsedModule { + items: file.items.clone(), + }; + modules.insert(module_path.to_string(), root_module); + + // Process each item for nested modules + for item in &file.items { + if let Item::Mod(item_mod) = item { + let mod_name = item_mod.ident.to_string(); + let child_path = format!("{}::{}", module_path, mod_name); + + if let Some((_, items)) = &item_mod.content { + // Inline module: mod foo { ... } + let inline_module = ParsedModule { + items: items.clone(), + }; + modules.insert(child_path.clone(), inline_module); + + // For inline module's children, the directory is root_dir/mod_name + let inline_module_dir = root_dir.join(&mod_name); + + // Recursively process nested modules within inline modules + Self::process_inline_modules( + items, + &child_path, + &inline_module_dir, + &mut modules, + ); + } else { + // External module: mod foo; - need to find the file + if let Some(mod_file) = find_module_file(root_dir, root_name, &mod_name) { + // Recursively parse the external module + let child_modules = Self::parse_recursive(&mod_file, &child_path)?; + modules.extend(child_modules); + } + // If file not found, silently skip (might be a cfg'd out module) + } + } + } + + Ok(modules) + } + + /// Get all struct items in this module. + fn structs(&self) -> impl Iterator { + self.items.iter().filter_map(|item| { + if let Item::Struct(s) = item { + Some(s) + } else { + None + } + }) + } + + /// Recursively process inline modules to find nested module declarations. + /// + /// For inline modules like `mod foo { mod bar { ... } }`, this traverses + /// the nested structure and adds each module to the map. Also handles + /// external module references (`mod bar;`) within inline modules. + /// + /// # Arguments + /// * `items` - Items in the current module + /// * `parent_path` - Module path prefix (e.g., "crate::foo") + /// * `module_dir` - Directory where children of this module level would be found + /// * `modules` - Map to insert discovered modules into + fn process_inline_modules( + items: &[Item], + parent_path: &str, + module_dir: &Path, + modules: &mut BTreeMap, + ) { + for item in items { + if let Item::Mod(item_mod) = item { + let mod_name = item_mod.ident.to_string(); + let child_path = format!("{}::{}", parent_path, mod_name); + + if let Some((_, nested_items)) = &item_mod.content { + // Nested inline module + let nested_module = ParsedModule { + items: nested_items.clone(), + }; + modules.insert(child_path.clone(), nested_module); + + // For inline module's children, the directory is module_dir/mod_name + let child_module_dir = module_dir.join(&mod_name); + + // Recursively process deeper nested modules + Self::process_inline_modules( + nested_items, + &child_path, + &child_module_dir, + modules, + ); + } else { + // External module: mod foo; - resolve using file system + // Inline modules act like mod.rs for their children's resolution + if let Some(mod_file) = find_module_file(module_dir, "mod", &mod_name) { + // Load and parse the external module file + if let Ok(content) = std::fs::read_to_string(&mod_file) { + if let Ok(file) = syn::parse_str::(&content) { + let external_module = ParsedModule { + items: file.items.clone(), + }; + modules.insert(child_path.clone(), external_module); + + // Determine the directory for this external module's children + let ext_mod_dir = mod_file.parent().unwrap_or(Path::new(".")); + let ext_mod_name = mod_file + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("mod"); + + // Process nested modules within the external module + Self::process_inline_modules( + &file.items, + &child_path, + &if ext_mod_name == "mod" { + ext_mod_dir.to_path_buf() + } else { + ext_mod_dir.join(ext_mod_name) + }, + modules, + ); + } + } + // If file read/parse fails, silently skip + } + // If file not found, silently skip (might be a cfg'd out module) + } + } + } + } +} + +/// Find the file for an external module declaration. +/// +/// Tries multiple paths following Rust module resolution: +/// - For root files (lib.rs, main.rs, mod.rs): sibling paths first +/// - For non-root files (e.g., foo.rs): parent-namespaced paths first (foo/bar.rs) +fn find_module_file(parent_dir: &Path, parent_name: &str, mod_name: &str) -> Option { + // Standard sibling paths relative to parent directory + let sibling_paths = [ + // sibling file: parent_dir/mod_name.rs + parent_dir.join(format!("{}.rs", mod_name)), + // directory module: parent_dir/mod_name/mod.rs + parent_dir.join(mod_name).join("mod.rs"), + ]; + + // Check if parent is a root file (mod.rs, lib.rs, or main.rs) + let is_root = parent_name == "mod" || parent_name == "lib" || parent_name == "main"; + + if is_root { + // For root files, check sibling paths only + for path in &sibling_paths { + if path.exists() { + return Some(path.clone()); + } + } + } else { + // For non-root files (e.g., foo.rs with `mod bar;`), check parent-namespaced paths FIRST + // This ensures src/foo/bar.rs is preferred over src/bar.rs for crate::foo::bar + let parent_mod_dir = parent_dir.join(parent_name); + let namespaced_paths = [ + parent_mod_dir.join(format!("{}.rs", mod_name)), + parent_mod_dir.join(mod_name).join("mod.rs"), + ]; + + // Check namespaced paths first, then fall back to sibling paths + for path in namespaced_paths.iter().chain(sibling_paths.iter()) { + if path.exists() { + return Some(path.clone()); + } + } + } + + None +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_find_module_file_sibling() { + // This test verifies the path construction logic + let parent = Path::new("/some/src"); + let paths_checked = [parent.join("foo.rs"), parent.join("foo").join("mod.rs")]; + // Just verify the paths are constructed correctly + assert!(paths_checked[0].to_str().unwrap().contains("foo.rs")); + assert!(paths_checked[1].to_str().unwrap().contains("mod.rs")); + } +} diff --git a/sdk-libs/instruction-decoder-derive/src/derive_impl.rs b/sdk-libs/instruction-decoder-derive/src/derive_impl.rs new file mode 100644 index 0000000000..8ae2b9aa98 --- /dev/null +++ b/sdk-libs/instruction-decoder-derive/src/derive_impl.rs @@ -0,0 +1,125 @@ +//! Implementation of the `#[derive(InstructionDecoder)]` macro. +//! +//! This module provides the core implementation for deriving InstructionDecoder +//! on instruction enums. + +use darling::FromDeriveInput; +use proc_macro2::TokenStream as TokenStream2; +use syn::DeriveInput; + +use crate::{builder::InstructionDecoderBuilder, parsing::InstructionDecoderArgs}; + +/// Main implementation for the `#[derive(InstructionDecoder)]` macro. +/// +/// This parses the input enum and its attributes, then generates the decoder +/// struct and trait implementation. +/// +/// # Errors +/// +/// Returns an error if: +/// - Input is not an enum +/// - Required attributes are missing (program_id) +/// - Attribute values are invalid (e.g., invalid base58, unsupported discriminator_size) +pub fn derive_instruction_decoder_impl(input: DeriveInput) -> syn::Result { + // Parse attributes using darling + let args = InstructionDecoderArgs::from_derive_input(&input) + .map_err(|e| syn::Error::new(e.span(), e.to_string()))?; + + // Create builder and generate code + let builder = InstructionDecoderBuilder::new(&args, &input)?; + builder.generate(&input) +} + +#[cfg(test)] +mod tests { + use quote::quote; + + use super::*; + + #[test] + fn test_derive_basic_enum() { + let input: DeriveInput = syn::parse2(quote! { + #[instruction_decoder( + program_id = "11111111111111111111111111111111", + program_name = "Test Program" + )] + pub enum TestInstruction { + Init, + Process, + } + }) + .unwrap(); + + let result = derive_instruction_decoder_impl(input); + assert!(result.is_ok()); + + let output = result.unwrap().to_string(); + assert!(output.contains("TestInstructionDecoder")); + assert!(output.contains("InstructionDecoder")); + } + + #[test] + fn test_derive_with_fields() { + let input: DeriveInput = syn::parse2(quote! { + #[instruction_decoder( + program_id = "11111111111111111111111111111111", + discriminator_size = 1 + )] + pub enum TestInstruction { + Transfer { amount: u64 }, + Withdraw(u64), + } + }) + .unwrap(); + + let result = derive_instruction_decoder_impl(input); + assert!(result.is_ok()); + } + + #[test] + fn test_derive_with_accounts() { + let input: DeriveInput = syn::parse2(quote! { + #[instruction_decoder( + program_id = "11111111111111111111111111111111" + )] + pub enum TestInstruction { + #[instruction_decoder(accounts = CreateRecord)] + CreateRecord, + } + }) + .unwrap(); + + let result = derive_instruction_decoder_impl(input); + assert!(result.is_ok()); + } + + #[test] + fn test_derive_missing_program_id() { + let input: DeriveInput = syn::parse2(quote! { + pub enum TestInstruction { + Init, + } + }) + .unwrap(); + + let result = derive_instruction_decoder_impl(input); + assert!(result.is_err()); + } + + #[test] + fn test_derive_invalid_discriminator_size() { + let input: DeriveInput = syn::parse2(quote! { + #[instruction_decoder( + program_id = "11111111111111111111111111111111", + discriminator_size = 16 + )] + pub enum TestInstruction { + Init, + } + }) + .unwrap(); + + let result = derive_instruction_decoder_impl(input); + assert!(result.is_err()); + } +} diff --git a/sdk-libs/instruction-decoder-derive/src/lib.rs b/sdk-libs/instruction-decoder-derive/src/lib.rs new file mode 100644 index 0000000000..7555c20d16 --- /dev/null +++ b/sdk-libs/instruction-decoder-derive/src/lib.rs @@ -0,0 +1,105 @@ +//! Derive macros for InstructionDecoder implementations +//! +//! This crate provides two macros: +//! 1. `#[derive(InstructionDecoder)]` - For instruction enums (native programs) +//! 2. `#[instruction_decoder]` - Attribute macro for Anchor program modules +//! +//! The attribute macro extracts function names from the program module and generates +//! an instruction enum with `#[derive(InstructionDecoder)]` applied. +//! +//! ## Enhanced InstructionDecoder for Anchor Programs +//! +//! The derive macro supports an enhanced mode that references Anchor-generated types +//! for account names and parameter decoding: +//! +//! ```rust,ignore +//! use light_instruction_decoder_derive::InstructionDecoder; +//! +//! #[derive(InstructionDecoder)] +//! #[instruction_decoder( +//! program_id = "MyProgram111111111111111111111111111111111", +//! program_name = "My Program" +//! )] +//! pub enum MyInstruction { +//! #[instruction_decoder(accounts = CreateRecord, params = CreateRecordParams)] +//! CreateRecord, +//! +//! #[instruction_decoder(accounts = UpdateRecord)] +//! UpdateRecord, +//! } +//! ``` +//! +//! This generates a decoder that: +//! - Gets account names from `>::ACCOUNT_NAMES` +//! - Decodes instruction data using `ParamsType::try_from_slice()` with Debug output + +extern crate proc_macro; + +mod attribute_impl; +mod builder; +mod crate_context; +mod derive_impl; +mod parsing; +mod utils; + +use proc_macro::TokenStream; +use syn::{parse_macro_input, DeriveInput}; + +use crate::utils::into_token_stream; + +/// Derives an InstructionDecoder implementation for an Anchor instruction enum. +/// +/// This macro generates a decoder struct and InstructionDecoder trait implementation +/// that can decode Anchor program instructions for logging purposes. +/// +/// ## Usage +/// +/// ```rust,ignore +/// use light_instruction_decoder_derive::InstructionDecoder; +/// +/// #[derive(InstructionDecoder)] +/// #[instruction_decoder( +/// program_id = "MyProgramId111111111111111111111111111111111", +/// program_name = "My Program" +/// )] +/// pub enum MyInstruction { +/// CreateRecord, +/// UpdateRecord { score: u64 }, +/// DeleteRecord, +/// } +/// ``` +/// +/// This generates a `MyInstructionDecoder` struct that implements `InstructionDecoder`. +#[proc_macro_derive(InstructionDecoder, attributes(instruction_decoder, discriminator))] +pub fn derive_instruction_decoder(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + into_token_stream(derive_impl::derive_instruction_decoder_impl(input)) +} + +/// Attribute macro for generating InstructionDecoder from Anchor program modules. +/// +/// This macro extracts function names from the program module and generates +/// an InstructionDecoder implementation automatically. +/// +/// ## Usage +/// +/// ```rust,ignore +/// use light_instruction_decoder_derive::instruction_decoder; +/// +/// #[instruction_decoder] +/// #[program] +/// pub mod my_program { +/// pub fn create_record(ctx: Context) -> Result<()> { ... } +/// pub fn update_record(ctx: Context) -> Result<()> { ... } +/// } +/// ``` +/// +/// This generates a `MyProgramInstructionDecoder` struct that implements `InstructionDecoder`. +/// The program_id can also be omitted if `declare_id!` is used inside the module. +#[proc_macro_attribute] +pub fn instruction_decoder(attr: TokenStream, item: TokenStream) -> TokenStream { + into_token_stream(attribute_impl::instruction_decoder_attr( + attr.into(), + item.into(), + )) +} diff --git a/sdk-libs/instruction-decoder-derive/src/parsing.rs b/sdk-libs/instruction-decoder-derive/src/parsing.rs new file mode 100644 index 0000000000..b166d427e4 --- /dev/null +++ b/sdk-libs/instruction-decoder-derive/src/parsing.rs @@ -0,0 +1,580 @@ +//! Darling-based attribute parsing for instruction decoder macros. +//! +//! This module provides declarative attribute parsing using the darling crate, +//! replacing manual `parse_nested_meta` implementations with type-safe structs. +//! +//! # Supported Attributes +//! +//! ## Derive macro (`#[derive(InstructionDecoder)]`) +//! +//! Top-level: +//! ```ignore +//! #[instruction_decoder( +//! program_id = "Base58ProgramId...", +//! program_name = "My Program", // optional, defaults to enum name +//! discriminator_size = 8 // optional: 1, 4, or 8 (default: 8) +//! )] +//! ``` +//! +//! Variant-level: +//! ```ignore +//! #[instruction_decoder( +//! accounts = MyAccounts, // Accounts struct implementing ACCOUNT_NAMES +//! params = MyParams, // Params struct implementing BorshDeserialize + Debug +//! account_names = ["a", "b", "c"] // Inline account names (alternative to accounts) +//! )] +//! #[discriminator = 5] // Explicit discriminator value (for 1/4 byte modes) +//! ``` + +use darling::{FromDeriveInput, FromMeta, FromVariant}; +use proc_macro2::TokenStream as TokenStream2; +use quote::quote; +use syn::{Expr, ExprLit, Ident, Lit, Type}; + +use crate::{ + crate_context::CrateContext, + utils::{parse_program_id_bytes, pascal_to_display, validate_discriminator_size}, +}; + +/// Default discriminator size (Anchor-style 8 bytes). +fn default_discriminator_size() -> u8 { + 8 +} + +/// Top-level attributes for `#[derive(InstructionDecoder)]`. +#[derive(Debug, FromDeriveInput)] +#[darling(attributes(instruction_decoder), supports(enum_any))] +pub struct InstructionDecoderArgs { + /// The enum identifier + pub ident: Ident, + + /// Base58-encoded program ID string + pub program_id: String, + + /// Human-readable program name (defaults to enum name with spaces) + #[darling(default)] + pub program_name: Option, + + /// Discriminator size in bytes: 1 (native), 4 (system), or 8 (Anchor) + #[darling(default = "default_discriminator_size")] + pub discriminator_size: u8, + + /// Enum data for accessing variants + pub data: darling::ast::Data, +} + +impl InstructionDecoderArgs { + /// Get the display name for this program. + pub fn display_name(&self) -> String { + self.program_name + .clone() + .unwrap_or_else(|| pascal_to_display(&self.ident.to_string())) + } + + /// Parse and validate the program ID, returning a token stream for the byte array. + pub fn program_id_bytes(&self, span: proc_macro2::Span) -> syn::Result { + parse_program_id_bytes(&self.program_id, span) + } + + /// Validate all arguments. + pub fn validate(&self) -> syn::Result<()> { + validate_discriminator_size(self.discriminator_size, self.ident.span())?; + // Validate program_id can be parsed (will error at code gen time if invalid) + let _ = self.program_id_bytes(self.ident.span())?; + Ok(()) + } + + /// Get variants as a slice. + pub fn variants(&self) -> &[VariantDecoderArgs] { + match &self.data { + darling::ast::Data::Enum(variants) => variants, + _ => &[], + } + } +} + +/// Account names specification - either inline strings or a type reference. +#[derive(Debug, Clone)] +pub enum AccountNamesSpec { + /// Inline list of account name strings + Inline(Vec), + /// Type reference with ACCOUNT_NAMES constant (boxed to reduce enum size) + TypeRef(Box), +} + +/// Wrapper for Type that implements FromMeta by parsing the value as a path. +#[derive(Debug, Clone)] +pub struct TypeWrapper(pub Type); + +impl FromMeta for TypeWrapper { + fn from_meta(item: &syn::Meta) -> darling::Result { + match item { + syn::Meta::NameValue(nv) => { + // Parse the expression as a type (path) + let ty: Type = match &nv.value { + Expr::Path(expr_path) => Type::Path(syn::TypePath { + qself: None, + path: expr_path.path.clone(), + }), + _ => { + return Err( + darling::Error::custom("expected a type path").with_span(&nv.value) + ); + } + }; + Ok(TypeWrapper(ty)) + } + _ => Err(darling::Error::custom("expected name = Type").with_span(item)), + } + } +} + +/// Variant-level attributes for instruction decoder. +#[derive(Debug, FromVariant)] +#[darling(attributes(instruction_decoder))] +pub struct VariantDecoderArgs { + /// Variant identifier (required by darling, used for error messages) + #[allow(dead_code)] + pub ident: Ident, + + /// Variant fields (required by darling) + #[allow(dead_code)] + pub fields: darling::ast::Fields, + + /// Accounts struct type (e.g., `CreateRecord`) + #[darling(default)] + pub accounts: Option, + + /// Params struct type for borsh deserialization + #[darling(default)] + pub params: Option, + + /// Inline account names (e.g., `["source", "dest"]`) + #[darling(default)] + pub account_names: Option, + + /// Optional pretty formatter function path (e.g., `crate::programs::ctoken::format_transfer2`). + /// The function must have signature `fn(&ParamsType, &[AccountMeta]) -> String`. + #[darling(default)] + pub pretty_formatter: Option, +} + +impl VariantDecoderArgs { + /// Get the account names specification for this variant. + pub fn account_names_spec(&self) -> Option { + if let Some(ref inline) = self.account_names { + Some(AccountNamesSpec::Inline(inline.0.clone())) + } else { + self.accounts + .as_ref() + .map(|wrapper| AccountNamesSpec::TypeRef(Box::new(wrapper.0.clone()))) + } + } + + /// Get the params type if specified. + pub fn params_type(&self) -> Option<&Type> { + self.params.as_ref().map(|wrapper| &wrapper.0) + } + + /// Generate code to produce account names at runtime. + /// + /// If `accounts` type is specified, looks up field names from CrateContext. + /// If `account_names` inline list is specified, uses those directly. + /// + /// Emits compile-time warnings if struct resolution fails. + pub fn account_names_code(&self, crate_ctx: Option<&CrateContext>) -> TokenStream2 { + match self.account_names_spec() { + Some(AccountNamesSpec::Inline(names)) => { + // Inline names - use directly + quote! { vec![#(#names.to_string()),*] } + } + Some(AccountNamesSpec::TypeRef(ty)) => { + // Type reference - extract struct name and lookup in CrateContext + let struct_name = extract_struct_name(&ty); + let variant_name = &self.ident; + + let Some(ctx) = crate_ctx else { + eprintln!( + "warning: InstructionDecoder variant '{}': could not parse crate context, \ + account names for '{}' will be empty", + variant_name, struct_name + ); + return quote! { Vec::new() }; + }; + + if let Some(field_names) = ctx.get_struct_field_names(&struct_name) { + // Found in crate - generate inline names + return quote! { vec![#(#field_names.to_string()),*] }; + } + + // Struct not found - emit warning and fallback to empty vec + eprintln!( + "warning: InstructionDecoder variant '{}': struct '{}' not found in crate, \ + account names will be empty. Ensure the struct is defined in this crate.", + variant_name, struct_name + ); + quote! { Vec::new() } + } + None => quote! { Vec::new() }, + } + } +} + +/// Extract the simple struct name from a type path. +/// +/// Examples: +/// - `instruction_accounts::CreateTwoMints` -> "CreateTwoMints" +/// - `CreateTwoMints` -> "CreateTwoMints" +/// - `crate::foo::Bar` -> "Bar" +fn extract_struct_name(ty: &Type) -> String { + match ty { + Type::Path(type_path) => type_path + .path + .segments + .last() + .map(|seg| seg.ident.to_string()) + .unwrap_or_default(), + _ => String::new(), + } +} + +/// Wrapper for parsing inline account names array. +/// +/// Supports: `account_names = ["source", "dest", "authority"]` +#[derive(Debug, Clone, Default)] +pub struct InlineAccountNames(pub Vec); + +impl FromMeta for InlineAccountNames { + fn from_meta(item: &syn::Meta) -> darling::Result { + match item { + syn::Meta::NameValue(nv) => { + // Parse the value as an array expression + if let Expr::Array(arr) = &nv.value { + let names: darling::Result> = arr + .elems + .iter() + .map(|elem| { + if let Expr::Lit(ExprLit { + lit: Lit::Str(s), .. + }) = elem + { + Ok(s.value()) + } else { + Err( + darling::Error::custom("account_names must be string literals") + .with_span(elem), + ) + } + }) + .collect(); + Ok(InlineAccountNames(names?)) + } else { + Err( + darling::Error::custom("account_names must be an array of string literals") + .with_span(&nv.value), + ) + } + } + _ => Err( + darling::Error::custom("expected account_names = [\"...\", ...]").with_span(item), + ), + } + } +} + +/// Explicit discriminator value - either a u32 integer or an 8-byte array. +#[derive(Debug, Clone)] +pub enum ExplicitDiscriminator { + /// Integer discriminator (for 1 or 4 byte modes) + U32(u32), + /// Array discriminator (for 8 byte mode) + Array([u8; 8]), +} + +/// Parse explicit discriminator from `#[discriminator = N]` or `#[discriminator(a, b, c, ...)]` attribute. +/// +/// This is separate from darling parsing because it uses a different attribute name. +/// Supports two formats: +/// - Integer literal: `#[discriminator = 5]` +/// - Array (parenthesized): `#[discriminator(26, 16, 169, 7, 21, 202, 242, 25)]` +pub fn parse_explicit_discriminator( + variant: &syn::Variant, +) -> syn::Result> { + for attr in &variant.attrs { + if attr.path().is_ident("discriminator") { + // Try name-value format first: #[discriminator = 5] + if let Ok(meta) = attr.meta.require_name_value() { + if let Expr::Lit(ExprLit { + lit: Lit::Int(lit_int), + .. + }) = &meta.value + { + return Ok(Some(ExplicitDiscriminator::U32( + lit_int.base10_parse::()?, + ))); + } else { + return Err(syn::Error::new_spanned( + &meta.value, + "discriminator value must be an integer literal (use #[discriminator(a, b, ...)] for arrays)", + )); + } + } + + // Try list format: #[discriminator(26, 16, 169, 7, 21, 202, 242, 25)] + if let Ok(meta) = attr.meta.require_list() { + let bytes: Result, syn::Error> = meta + .parse_args_with( + syn::punctuated::Punctuated::::parse_terminated, + )? + .iter() + .map(|lit| lit.base10_parse::()) + .collect(); + let bytes = bytes?; + if bytes.len() != 8 { + return Err(syn::Error::new_spanned( + &meta.tokens, + format!( + "array discriminator must have exactly 8 bytes, found {}", + bytes.len() + ), + )); + } + let array: [u8; 8] = bytes.try_into().unwrap(); + return Ok(Some(ExplicitDiscriminator::Array(array))); + } + + // Neither format worked + return Err(syn::Error::new_spanned( + attr, + "discriminator must be #[discriminator = N] or #[discriminator(a, b, c, d, e, f, g, h)]", + )); + } + } + Ok(None) +} + +/// Represents either a literal pubkey or a path reference for program ID. +#[derive(Debug, Clone)] +pub enum ProgramIdSource { + /// Literal base58 string converted to bytes + Bytes(TokenStream2), + /// Path reference like `crate::ID` or `ID` + Path(syn::Path), +} + +impl ProgramIdSource { + /// Generate code for the `program_id()` method. + pub fn program_id_impl(&self) -> TokenStream2 { + match self { + ProgramIdSource::Bytes(bytes) => quote! { + fn program_id(&self) -> light_instruction_decoder::solana_pubkey::Pubkey { + light_instruction_decoder::solana_pubkey::Pubkey::new_from_array(#bytes) + } + }, + ProgramIdSource::Path(path) => quote! { + fn program_id(&self) -> light_instruction_decoder::solana_pubkey::Pubkey { + #path + } + }, + } + } +} + +/// Arguments for the `#[instruction_decoder]` attribute macro on modules. +#[derive(Debug, Default)] +pub struct ModuleDecoderArgs { + /// Program ID source (bytes or path) + pub program_id: Option, + /// Human-readable program name + pub program_name: Option, +} + +impl ModuleDecoderArgs { + /// Parse module decoder arguments from attribute tokens. + pub fn parse(attr: TokenStream2) -> syn::Result { + let mut args = ModuleDecoderArgs::default(); + + if attr.is_empty() { + return Ok(args); + } + + let parser = syn::meta::parser(|meta| { + if meta.path.is_ident("program_id") { + let value = meta.value()?; + // Try string literal first + if let Ok(lit) = value.parse::() { + let pubkey_str = lit.value(); + let bytes = bs58::decode(&pubkey_str) + .into_vec() + .map_err(|_| meta.error("invalid base58 pubkey"))?; + if bytes.len() != 32 { + return Err(meta.error("pubkey must be 32 bytes")); + } + args.program_id = Some(ProgramIdSource::Bytes(quote! { [#(#bytes),*] })); + } else { + // Parse as path reference + let path: syn::Path = value.parse()?; + args.program_id = Some(ProgramIdSource::Path(path)); + } + Ok(()) + } else if meta.path.is_ident("program_name") { + let value = meta.value()?; + let lit: syn::LitStr = value.parse()?; + args.program_name = Some(lit.value()); + Ok(()) + } else { + Err(meta.error("unknown attribute")) + } + }); + + syn::parse::Parser::parse2(parser, attr)?; + Ok(args) + } + + /// Try to find program ID from `declare_id!` macro in module content. + pub fn find_declare_id(&mut self, module: &syn::ItemMod) -> syn::Result<()> { + if self.program_id.is_some() { + return Ok(()); + } + + if let Some(ref content) = module.content { + for item in &content.1 { + if let syn::Item::Macro(macro_item) = item { + if macro_item.mac.path.is_ident("declare_id") { + let tokens = ¯o_item.mac.tokens; + let lit: syn::LitStr = syn::parse2(tokens.clone())?; + let pubkey_str = lit.value(); + let bytes = bs58::decode(&pubkey_str) + .into_vec() + .map_err(|_| syn::Error::new_spanned(&lit, "invalid base58 pubkey"))?; + if bytes.len() != 32 { + return Err(syn::Error::new_spanned(&lit, "pubkey must be 32 bytes")); + } + self.program_id = Some(ProgramIdSource::Bytes(quote! { [#(#bytes),*] })); + return Ok(()); + } + } + } + } + + Ok(()) + } + + /// Get program ID source, defaulting to `crate::ID` if not specified. + pub fn program_id_source(&self) -> ProgramIdSource { + self.program_id + .clone() + .unwrap_or_else(|| ProgramIdSource::Path(syn::parse_quote!(crate::ID))) + } + + /// Get program name, defaulting to module name with spaces. + pub fn program_name(&self, module_name: &str) -> String { + self.program_name.clone().unwrap_or_else(|| { + let pascal = crate::utils::to_pascal_case(module_name); + pascal_to_display(&pascal) + }) + } +} + +#[cfg(test)] +mod tests { + use syn::parse_quote; + + use super::*; + + #[test] + fn test_inline_account_names_parsing() { + let meta: syn::Meta = parse_quote!(account_names = ["source", "dest", "authority"]); + let result = InlineAccountNames::from_meta(&meta).unwrap(); + assert_eq!(result.0, vec!["source", "dest", "authority"]); + } + + #[test] + fn test_variant_args_with_accounts_type() { + let variant: syn::Variant = parse_quote! { + #[instruction_decoder(accounts = CreateRecord)] + CreateRecord + }; + let args = VariantDecoderArgs::from_variant(&variant).unwrap(); + assert!(args.accounts.is_some()); + assert!(args.params.is_none()); + } + + #[test] + fn test_variant_args_with_inline_names() { + let variant: syn::Variant = parse_quote! { + #[instruction_decoder(account_names = ["source", "dest"])] + Transfer + }; + let args = VariantDecoderArgs::from_variant(&variant).unwrap(); + assert!(args.account_names.is_some()); + let names = args.account_names.unwrap(); + assert_eq!(names.0, vec!["source", "dest"]); + } + + #[test] + fn test_parse_explicit_discriminator_u32() { + let variant: syn::Variant = parse_quote! { + #[discriminator = 5] + Transfer + }; + let disc = parse_explicit_discriminator(&variant).unwrap(); + assert!(matches!(disc, Some(ExplicitDiscriminator::U32(5)))); + } + + #[test] + fn test_parse_explicit_discriminator_array() { + let variant: syn::Variant = parse_quote! { + #[discriminator(26, 16, 169, 7, 21, 202, 242, 25)] + Invoke + }; + let disc = parse_explicit_discriminator(&variant).unwrap(); + assert!(matches!( + disc, + Some(ExplicitDiscriminator::Array([ + 26, 16, 169, 7, 21, 202, 242, 25 + ])) + )); + } + + #[test] + fn test_parse_explicit_discriminator_array_wrong_length() { + let variant: syn::Variant = parse_quote! { + #[discriminator(1, 2, 3, 4)] + Transfer + }; + let result = parse_explicit_discriminator(&variant); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(err + .to_string() + .contains("array discriminator must have exactly 8 bytes")); + } + + #[test] + fn test_parse_explicit_discriminator_none() { + let variant: syn::Variant = parse_quote! { + Transfer + }; + let disc = parse_explicit_discriminator(&variant).unwrap(); + assert!(disc.is_none()); + } + + #[test] + fn test_extract_struct_name_simple() { + let ty: syn::Type = parse_quote!(CreateTwoMints); + assert_eq!(extract_struct_name(&ty), "CreateTwoMints"); + } + + #[test] + fn test_extract_struct_name_qualified() { + let ty: syn::Type = parse_quote!(instruction_accounts::CreateTwoMints); + assert_eq!(extract_struct_name(&ty), "CreateTwoMints"); + } + + #[test] + fn test_extract_struct_name_crate_path() { + let ty: syn::Type = parse_quote!(crate::foo::bar::MyStruct); + assert_eq!(extract_struct_name(&ty), "MyStruct"); + } +} diff --git a/sdk-libs/instruction-decoder-derive/src/utils.rs b/sdk-libs/instruction-decoder-derive/src/utils.rs new file mode 100644 index 0000000000..af957aeaef --- /dev/null +++ b/sdk-libs/instruction-decoder-derive/src/utils.rs @@ -0,0 +1,193 @@ +//! Utility functions for instruction decoder macros. +//! +//! This module provides common utilities for: +//! - Case conversion (snake_case, PascalCase) +//! - Anchor discriminator computation +//! - Error handling helpers +//! - Program ID validation + +use proc_macro::TokenStream; +use proc_macro2::{Span, TokenStream as TokenStream2}; +use quote::quote; +use sha2::{Digest, Sha256}; + +/// Convert a `syn::Result` to `proc_macro::TokenStream`. +/// +/// This centralizes error handling for macro entry points, ensuring compile +/// errors are properly surfaced to the user with correct span information. +/// +/// # Example +/// +/// ```ignore +/// #[proc_macro_derive(MyMacro)] +/// pub fn my_macro(input: TokenStream) -> TokenStream { +/// into_token_stream(my_macro_impl(input.into())) +/// } +/// ``` +pub(crate) fn into_token_stream(result: syn::Result) -> TokenStream { + result.unwrap_or_else(|err| err.to_compile_error()).into() +} + +/// Convert PascalCase to snake_case using heck for proper acronym handling. +/// +/// Uses heck's ToSnakeCase to match Anchor's discriminator calculation behavior, +/// which groups consecutive capitals as acronyms (e.g., "CreateATA" -> "create_ata"). +/// +/// # Examples +/// +/// ```ignore +/// assert_eq!(to_snake_case("CreateRecord"), "create_record"); +/// assert_eq!(to_snake_case("CreateATA"), "create_ata"); +/// assert_eq!(to_snake_case("Init"), "init"); +/// ``` +pub(crate) fn to_snake_case(name: &str) -> String { + use heck::ToSnakeCase; + name.to_snake_case() +} + +/// Convert snake_case to PascalCase. +/// +/// # Examples +/// +/// ```ignore +/// assert_eq!(to_pascal_case("create_record"), "CreateRecord"); +/// assert_eq!(to_pascal_case("init"), "Init"); +/// ``` +pub(crate) fn to_pascal_case(name: &str) -> String { + name.split('_') + .map(|part| { + let mut chars = part.chars(); + match chars.next() { + None => String::new(), + Some(first) => first.to_uppercase().chain(chars).collect(), + } + }) + .collect() +} + +/// Compute Anchor-style instruction discriminator. +/// +/// Anchor discriminators are the first 8 bytes of SHA256("global:") +/// where instruction_name is in snake_case. +/// +/// # Examples +/// +/// ```ignore +/// let disc = compute_anchor_discriminator("create_record"); +/// assert_eq!(disc.len(), 8); +/// ``` +pub(crate) fn compute_anchor_discriminator(instruction_name: &str) -> [u8; 8] { + let preimage = format!("global:{}", instruction_name); + let hash = Sha256::digest(preimage.as_bytes()); + let mut discriminator = [0u8; 8]; + discriminator.copy_from_slice(&hash[..8]); + discriminator +} + +/// Convert a PascalCase name to human-readable format with spaces. +/// +/// # Examples +/// +/// ```ignore +/// assert_eq!(pascal_to_display("CreateRecord"), "Create Record"); +/// assert_eq!(pascal_to_display("MyProgram"), "My Program"); +/// ``` +pub(crate) fn pascal_to_display(name: &str) -> String { + let mut result = String::new(); + for (i, c) in name.chars().enumerate() { + if i > 0 && c.is_uppercase() { + result.push(' '); + } + result.push(c); + } + result +} + +/// Parse a base58-encoded program ID string and return token stream for byte array. +/// +/// # Errors +/// +/// Returns an error if the string is not valid base58 or doesn't decode to 32 bytes. +pub(crate) fn parse_program_id_bytes(id_str: &str, span: Span) -> syn::Result { + let bytes = bs58::decode(id_str) + .into_vec() + .map_err(|_| syn::Error::new(span, "invalid base58 program ID"))?; + + if bytes.len() != 32 { + return Err(syn::Error::new( + span, + format!("program ID must be 32 bytes, got {}", bytes.len()), + )); + } + + Ok(quote! { [#(#bytes),*] }) +} + +/// Validate discriminator size. +/// +/// Valid sizes are: +/// - 1 byte: Native programs with simple instruction indices +/// - 4 bytes: System-style programs (little-endian u32) +/// - 8 bytes: Anchor programs (SHA256 prefix) +/// +/// # Errors +/// +/// Returns an error if size is not 1, 4, or 8. +pub(crate) fn validate_discriminator_size(size: u8, span: Span) -> syn::Result<()> { + if ![1, 4, 8].contains(&size) { + return Err(syn::Error::new( + span, + "discriminator_size must be 1 (native), 4 (system), or 8 (Anchor)", + )); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_to_snake_case() { + assert_eq!(to_snake_case("CreateRecord"), "create_record"); + assert_eq!(to_snake_case("UpdateScore"), "update_score"); + assert_eq!(to_snake_case("Init"), "init"); + // heck properly handles acronyms - consecutive capitals stay grouped + assert_eq!(to_snake_case("CreateATA"), "create_ata"); + assert_eq!(to_snake_case("HTTPHandler"), "http_handler"); + } + + #[test] + fn test_to_pascal_case() { + assert_eq!(to_pascal_case("create_record"), "CreateRecord"); + assert_eq!(to_pascal_case("update_score"), "UpdateScore"); + assert_eq!(to_pascal_case("init"), "Init"); + } + + #[test] + fn test_compute_anchor_discriminator() { + let disc = compute_anchor_discriminator("create_record"); + assert_eq!(disc.len(), 8); + assert!(disc.iter().any(|&b| b != 0)); + + // Same input should give same output (deterministic) + let disc2 = compute_anchor_discriminator("create_record"); + assert_eq!(disc, disc2); + } + + #[test] + fn test_pascal_to_display() { + assert_eq!(pascal_to_display("CreateRecord"), "Create Record"); + assert_eq!(pascal_to_display("Init"), "Init"); + assert_eq!(pascal_to_display("MyProgram"), "My Program"); + } + + #[test] + fn test_validate_discriminator_size() { + assert!(validate_discriminator_size(1, Span::call_site()).is_ok()); + assert!(validate_discriminator_size(4, Span::call_site()).is_ok()); + assert!(validate_discriminator_size(8, Span::call_site()).is_ok()); + assert!(validate_discriminator_size(2, Span::call_site()).is_err()); + assert!(validate_discriminator_size(16, Span::call_site()).is_err()); + } +} diff --git a/sdk-libs/instruction-decoder/Cargo.toml b/sdk-libs/instruction-decoder/Cargo.toml new file mode 100644 index 0000000000..54db503a20 --- /dev/null +++ b/sdk-libs/instruction-decoder/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "light-instruction-decoder" +version = "0.1.0" +description = "Instruction decoder library for litsvm tests." +repository = "https://github.com/Lightprotocol/light-protocol" +license = "Apache-2.0" +edition = "2021" + +[lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(target_os, values("solana"))'] } + +[features] +default = ["light-protocol"] +light-protocol = [ + "dep:light-compressed-account", + "dep:light-sdk-types", + "dep:light-token-interface", +] + +[dependencies] +solana-pubkey = { version = "2", features = ["curve25519"] } +solana-instruction = { version = "2" } +solana-signature = { version = "2" } +borsh = { workspace = true, features = ["std"] } +bs58 = { workspace = true } +serde = { workspace = true, features = ["derive"] } +light-instruction-decoder-derive = { workspace = true } + +# Light Protocol dependencies (optional, enabled by light-protocol feature) +light-compressed-account = { workspace = true, optional = true } +light-sdk-types = { workspace = true, optional = true } +light-token-interface = { workspace = true, optional = true } + +[target.'cfg(not(target_os = "solana"))'.dependencies] +tabled = { workspace = true } diff --git a/sdk-libs/program-test/src/logging/config.rs b/sdk-libs/instruction-decoder/src/config.rs similarity index 51% rename from sdk-libs/program-test/src/logging/config.rs rename to sdk-libs/instruction-decoder/src/config.rs index 3538cf3dbb..f880a383e8 100644 --- a/sdk-libs/program-test/src/logging/config.rs +++ b/sdk-libs/instruction-decoder/src/config.rs @@ -1,9 +1,13 @@ //! Configuration types for enhanced logging +use std::sync::Arc; + use serde::{Deserialize, Serialize}; +use crate::{registry::DecoderRegistry, InstructionDecoder}; + /// Configuration for enhanced transaction logging -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct EnhancedLoggingConfig { /// Whether enhanced logging is enabled pub enabled: bool, @@ -19,10 +23,36 @@ pub struct EnhancedLoggingConfig { pub show_compute_units: bool, /// Use ANSI colors in output pub use_colors: bool, - /// Maximum number of inner instruction levels to display - pub max_inner_instruction_depth: usize, + /// Maximum CPI depth to display + pub max_cpi_depth: usize, /// Show instruction data for account compression program pub show_compression_instruction_data: bool, + /// Truncate byte arrays: Some((first, last)) shows first N and last N elements; None disables + pub truncate_byte_arrays: Option<(usize, usize)>, + /// Decoder registry containing built-in and custom decoders + /// Wrapped in Arc so it can be shared across clones instead of being lost + #[serde(skip)] + decoder_registry: Option>, +} + +impl Clone for EnhancedLoggingConfig { + fn clone(&self) -> Self { + // Arc clone shares the underlying DecoderRegistry across clones + // This preserves custom decoders registered via with_decoders() + Self { + enabled: self.enabled, + log_events: self.log_events, + verbosity: self.verbosity, + show_account_changes: self.show_account_changes, + decode_light_instructions: self.decode_light_instructions, + show_compute_units: self.show_compute_units, + use_colors: self.use_colors, + max_cpi_depth: self.max_cpi_depth, + show_compression_instruction_data: self.show_compression_instruction_data, + truncate_byte_arrays: self.truncate_byte_arrays, + decoder_registry: self.decoder_registry.clone(), + } + } } impl Default for EnhancedLoggingConfig { @@ -35,8 +65,10 @@ impl Default for EnhancedLoggingConfig { decode_light_instructions: true, show_compute_units: true, use_colors: true, - max_inner_instruction_depth: 60, + max_cpi_depth: 60, show_compression_instruction_data: false, + truncate_byte_arrays: Some((2, 2)), + decoder_registry: Some(Arc::new(DecoderRegistry::new())), } } } @@ -65,8 +97,10 @@ impl EnhancedLoggingConfig { decode_light_instructions: true, show_compute_units: true, use_colors: true, - max_inner_instruction_depth: 60, + max_cpi_depth: 60, show_compression_instruction_data: false, + truncate_byte_arrays: Some((2, 2)), + decoder_registry: Some(Arc::new(DecoderRegistry::new())), } } @@ -80,9 +114,43 @@ impl EnhancedLoggingConfig { decode_light_instructions: false, show_compute_units: false, use_colors: false, - max_inner_instruction_depth: 60, + max_cpi_depth: 60, show_compression_instruction_data: false, + truncate_byte_arrays: Some((2, 2)), + decoder_registry: Some(Arc::new(DecoderRegistry::new())), + } + } + + /// Register custom decoders + /// + /// Note: Uses Arc::get_mut which works correctly in the builder pattern since + /// there's only one Arc reference. If the Arc has been cloned, a new registry + /// is created with built-in decoders plus the custom ones. + pub fn with_decoders(mut self, decoders: Vec>) -> Self { + if let Some(ref mut arc) = self.decoder_registry { + if let Some(registry) = Arc::get_mut(arc) { + registry.register_all(decoders); + return self; + } + } + // Create new registry if none exists or Arc has multiple references + let mut registry = DecoderRegistry::new(); + registry.register_all(decoders); + self.decoder_registry = Some(Arc::new(registry)); + self + } + + /// Get or create the decoder registry + pub fn get_decoder_registry(&mut self) -> &DecoderRegistry { + if self.decoder_registry.is_none() { + self.decoder_registry = Some(Arc::new(DecoderRegistry::new())); } + self.decoder_registry.as_ref().unwrap() + } + + /// Get the decoder registry if it exists (immutable access) + pub fn decoder_registry(&self) -> Option<&DecoderRegistry> { + self.decoder_registry.as_ref().map(|arc| arc.as_ref()) } /// Create config based on environment - always enabled, debug level when RUST_BACKTRACE is set diff --git a/sdk-libs/instruction-decoder/src/core.rs b/sdk-libs/instruction-decoder/src/core.rs new file mode 100644 index 0000000000..40219c4a57 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/core.rs @@ -0,0 +1,74 @@ +//! Core types for instruction decoding. + +use solana_instruction::AccountMeta; +use solana_pubkey::Pubkey; + +/// A decoded instruction field for display. +#[derive(Debug, Clone)] +pub struct DecodedField { + /// Field name + pub name: String, + /// Field value as string + pub value: String, + /// Optional nested fields (for complex types) + pub children: Vec, +} + +impl DecodedField { + /// Create a simple field with name and value. + pub fn new(name: impl Into, value: impl Into) -> Self { + Self { + name: name.into(), + value: value.into(), + children: Vec::new(), + } + } + + /// Create a field with nested children. + pub fn with_children(name: impl Into, children: Vec) -> Self { + Self { + name: name.into(), + value: String::new(), + children, + } + } +} + +/// Result of decoding an instruction. +#[derive(Debug, Clone)] +pub struct DecodedInstruction { + /// Human-readable instruction name (e.g., "Transfer", "MintTo") + pub name: String, + /// Decoded fields to display + pub fields: Vec, + /// Account names in order (index corresponds to account position) + pub account_names: Vec, +} + +impl DecodedInstruction { + /// Create a decoded instruction with fields and account names. + pub fn with_fields_and_accounts( + name: impl Into, + fields: Vec, + account_names: Vec, + ) -> Self { + Self { + name: name.into(), + fields, + account_names, + } + } +} + +/// Trait for instruction decoders - each program implements this. +pub trait InstructionDecoder: Send + Sync { + /// Program ID this decoder handles. + fn program_id(&self) -> Pubkey; + + /// Human-readable program name (e.g., "Compressed Token Program"). + fn program_name(&self) -> &'static str; + + /// Decode instruction data into a structured representation. + /// Returns None if decoding fails or instruction is unknown. + fn decode(&self, data: &[u8], accounts: &[AccountMeta]) -> Option; +} diff --git a/sdk-libs/instruction-decoder/src/formatter.rs b/sdk-libs/instruction-decoder/src/formatter.rs new file mode 100644 index 0000000000..3a7c95dfa4 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/formatter.rs @@ -0,0 +1,1057 @@ +//! Transaction formatting utilities for explorer-style output + +use std::fmt::{self, Write}; + +use solana_pubkey::Pubkey; +use tabled::{Table, Tabled}; + +use crate::{ + config::{EnhancedLoggingConfig, LogVerbosity}, + types::{ + AccountAccess, AccountChange, EnhancedInstructionLog, EnhancedTransactionLog, + TransactionStatus, + }, +}; + +/// Known test accounts and programs mapped to human-readable names +static KNOWN_ACCOUNTS: &[(&str, &str)] = &[ + // Test program + ( + "FNt7byTHev1k5x2cXZLBr8TdWiC3zoP5vcnZR4P682Uy", + "test program", + ), + // V1 test accounts + ( + "smt1NamzXdq4AMqS2fS2F1i5KTYPZRhoHgWx38d8WsT", + "v1 state merkle tree", + ), + ( + "nfq1NvQDJ2GEgnS8zt9prAe8rjjpAW1zFkrvZoBR148", + "v1 nullifier queue", + ), + ( + "cpi1uHzrEhBG733DoEJNgHCyRS3XmmyVNZx5fonubE4", + "v1 cpi context", + ), + ( + "amt1Ayt45jfbdw5YSo7iz6WZxUmnZsQTYXy82hVwyC2", + "v1 address merkle tree", + ), + ( + "aq1S9z4reTSQAdgWHGD2zDaS39sjGrAxbR31vxJ2F4F", + "v1 address queue", + ), + // V2 state trees (5 triples) + ( + "bmt1LryLZUMmF7ZtqESaw7wifBXLfXHQYoE4GAmrahU", + "v2 state merkle tree 1", + ), + ( + "oq1na8gojfdUhsfCpyjNt6h4JaDWtHf1yQj4koBWfto", + "v2 state output queue 1", + ), + ( + "cpi15BoVPKgEPw5o8wc2T816GE7b378nMXnhH3Xbq4y", + "v2 cpi context 1", + ), + ( + "bmt2UxoBxB9xWev4BkLvkGdapsz6sZGkzViPNph7VFi", + "v2 state merkle tree 2", + ), + ( + "oq2UkeMsJLfXt2QHzim242SUi3nvjJs8Pn7Eac9H9vg", + "v2 state output queue 2", + ), + ( + "cpi2yGapXUR3As5SjnHBAVvmApNiLsbeZpF3euWnW6B", + "v2 cpi context 2", + ), + ( + "bmt3ccLd4bqSVZVeCJnH1F6C8jNygAhaDfxDwePyyGb", + "v2 state merkle tree 3", + ), + ( + "oq3AxjekBWgo64gpauB6QtuZNesuv19xrhaC1ZM1THQ", + "v2 state output queue 3", + ), + ( + "cpi3mbwMpSX8FAGMZVP85AwxqCaQMfEk9Em1v8QK9Rf", + "v2 cpi context 3", + ), + ( + "bmt4d3p1a4YQgk9PeZv5s4DBUmbF5NxqYpk9HGjQsd8", + "v2 state merkle tree 4", + ), + ( + "oq4ypwvVGzCUMoiKKHWh4S1SgZJ9vCvKpcz6RT6A8dq", + "v2 state output queue 4", + ), + ( + "cpi4yyPDc4bCgHAnsenunGA8Y77j3XEDyjgfyCKgcoc", + "v2 cpi context 4", + ), + ( + "bmt5yU97jC88YXTuSukYHa8Z5Bi2ZDUtmzfkDTA2mG2", + "v2 state merkle tree 5", + ), + ( + "oq5oh5ZR3yGomuQgFduNDzjtGvVWfDRGLuDVjv9a96P", + "v2 state output queue 5", + ), + ( + "cpi5ZTjdgYpZ1Xr7B1cMLLUE81oTtJbNNAyKary2nV6", + "v2 cpi context 5", + ), + // V2 address tree + ( + "amt2kaJA14v3urZbZvnc5v2np8jqvc4Z8zDep5wbtzx", + "v2 address merkle tree", + ), + // CPI authority + ( + "HZH7qSLcpAeDqCopVU4e5XkhT9j3JFsQiq8CmruY3aru", + "cpi authority pda", + ), + // Solana native programs + ("11111111111111111111111111111111", "system program"), + ( + "ComputeBudget111111111111111111111111111111", + "compute budget program", + ), + ( + "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA", + "token program", + ), + ( + "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL", + "associated token program", + ), +]; + +/// Row for account table display +#[derive(Tabled)] +struct AccountRow { + #[tabled(rename = "#")] + symbol: String, + #[tabled(rename = "Account")] + pubkey: String, + #[tabled(rename = "Type")] + access: String, + #[tabled(rename = "Name")] + name: String, +} + +/// Colors for terminal output +#[derive(Debug, Clone, Default)] +pub struct Colors { + pub bold: &'static str, + pub reset: &'static str, + pub green: &'static str, + pub red: &'static str, + pub yellow: &'static str, + pub blue: &'static str, + pub cyan: &'static str, + pub gray: &'static str, +} + +impl Colors { + pub fn new(use_colors: bool) -> Self { + if use_colors { + Self { + bold: "\x1b[1m", + reset: "\x1b[0m", + green: "\x1b[32m", + red: "\x1b[31m", + yellow: "\x1b[33m", + blue: "\x1b[34m", + cyan: "\x1b[36m", + gray: "\x1b[90m", + } + } else { + Self::default() + } + } +} + +/// Transaction formatter with configurable output +pub struct TransactionFormatter { + config: EnhancedLoggingConfig, + colors: Colors, +} + +impl TransactionFormatter { + pub fn new(config: &EnhancedLoggingConfig) -> Self { + Self { + config: config.clone(), + colors: Colors::new(config.use_colors), + } + } + + /// Apply line breaks to long values in the complete output + fn apply_line_breaks(&self, text: &str) -> String { + let mut result = String::new(); + + for line in text.lines() { + // Look for patterns that need line breaking + if let Some(formatted_line) = self.format_line_if_needed(line) { + result.push_str(&formatted_line); + } else { + result.push_str(line); + } + result.push('\n'); + } + + result + } + + /// Format a line if it contains long values that need breaking + fn format_line_if_needed(&self, line: &str) -> Option { + // Extract leading whitespace/indentation and table characters + let leading_chars = line + .chars() + .take_while(|&c| c.is_whitespace() || "│├└┌┬┴┐┤─".contains(c)) + .collect::(); + + // Match patterns like "address: [0, 1, 2, 3, ...]" or "Raw instruction data (N bytes): [...]" + if line.contains(": [") && line.contains("]") { + // Handle byte arrays + if let Some(start) = line.find(": [") { + if let Some(end_pos) = line[start..].find(']') { + let end = start + end_pos; + let prefix = &line[..start + 2]; // Include ": " + let array_part = &line[start + 2..end + 1]; // The "[...]" part + let suffix = &line[end + 1..]; + + // For raw instruction data, use a shorter line length to better fit in terminal + let max_width = if line.contains("Raw instruction data") { + 80 // Wider for raw instruction data to fit more numbers per line + } else { + 50 // Keep existing width for other arrays + }; + + // Always format if it's raw instruction data or if it exceeds max_width + if line.contains("Raw instruction data") || array_part.len() > max_width { + let formatted_array = self.format_long_value_with_indent( + array_part, + max_width, + &leading_chars, + ); + return Some(format!("{}{}{}", prefix, formatted_array, suffix)); + } + } + } + } + + // Handle long base58 strings (44+ characters) in table cells + if line.contains('|') && !line.trim_start().starts_with('|') { + // This is a table content line, not a border + let mut new_line = String::new(); + let mut any_modified = false; + + // Split by table separators while preserving them + let parts: Vec<&str> = line.split('|').collect(); + for (i, part) in parts.iter().enumerate() { + if i > 0 { + new_line.push('|'); + } + + // Check if this cell contains a long value + let mut cell_modified = false; + for word in part.split_whitespace() { + if word.len() > 44 && word.chars().all(|c| c.is_alphanumeric()) { + let indent = " ".repeat(leading_chars.len() + 2); // Extra space for table formatting + let formatted_word = self.format_long_value_with_indent(word, 44, &indent); + new_line.push_str(&part.replace(word, &formatted_word)); + cell_modified = true; + any_modified = true; + break; + } + } + + if !cell_modified { + new_line.push_str(part); + } + } + + if any_modified { + return Some(new_line); + } + } + + None + } + + /// Format long value with proper indentation for continuation lines + fn format_long_value_with_indent(&self, value: &str, max_width: usize, indent: &str) -> String { + if value.len() <= max_width { + return value.to_string(); + } + + let mut result = String::new(); + + // Handle byte arrays specially by breaking at natural comma boundaries when possible + if value.starts_with('[') && value.ends_with(']') { + // This is a byte array - try to break at comma boundaries for better readability + let inner = &value[1..value.len() - 1]; // Remove [ and ] + let parts: Vec<&str> = inner.split(", ").collect(); + + result.push('['); + let mut current_line = String::new(); + let mut first_line = true; + + for (i, part) in parts.iter().enumerate() { + let addition = if i == 0 { + part.to_string() + } else { + format!(", {}", part) + }; + + // Check if adding this part would exceed the line width + if current_line.len() + addition.len() > max_width && !current_line.is_empty() { + // Add current line to result and start new line + if first_line { + result.push_str(¤t_line); + first_line = false; + } else { + result.push_str(&format!("\n{}{}", indent, current_line)); + } + // Use addition to preserve the ", " separator for non-first items + current_line = addition; + } else { + current_line.push_str(&addition); + } + } + + // Add the last line + if !current_line.is_empty() { + if first_line { + result.push_str(¤t_line); + } else { + result.push_str(&format!("\n{}{}", indent, current_line)); + } + } + + result.push(']'); + } else { + // Fall back to character-based breaking for non-array values + let chars = value.chars().collect::>(); + let mut pos = 0; + + while pos < chars.len() { + let end = (pos + max_width).min(chars.len()); + let chunk: String = chars[pos..end].iter().collect(); + + if pos == 0 { + result.push_str(&chunk); + } else { + result.push_str(&format!("\n{}{}", indent, chunk)); + } + + pos = end; + } + } + + result + } + + /// Format complete transaction log + pub fn format(&self, log: &EnhancedTransactionLog, tx_number: usize) -> String { + let mut output = String::new(); + + // Transaction box header with number (wide enough for signature + slot + status) + writeln!(output, "{}┌──────────────────────────────────────────────────────────── Transaction #{} ─────────────────────────────────────────────────────────────┐{}", self.colors.gray, tx_number, self.colors.reset).expect("Failed to write box header"); + + // Transaction header + self.write_transaction_header(&mut output, log) + .expect("Failed to write header"); + + // Instructions section + if !log.instructions.is_empty() { + self.write_instructions_section(&mut output, log) + .expect("Failed to write instructions"); + } + + // Account changes section + if self.config.show_account_changes && !log.account_changes.is_empty() { + self.write_account_changes_section(&mut output, log) + .expect("Failed to write account changes"); + } + + // Light Protocol events section + if !log.light_events.is_empty() { + self.write_light_events_section(&mut output, log) + .expect("Failed to write Light Protocol events"); + } + + // Program logs section (LiteSVM pretty logs) + if !log.program_logs_pretty.trim().is_empty() { + self.write_program_logs_section(&mut output, log) + .expect("Failed to write program logs"); + } + + // Transaction box footer (matches header width) + writeln!(output, "{}└──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘{}", self.colors.gray, self.colors.reset).expect("Failed to write box footer"); + + // Apply line breaks for long values in the complete output + self.apply_line_breaks(&output) + } + + /// Write transaction header with status, fee, and compute units + fn write_transaction_header( + &self, + output: &mut String, + log: &EnhancedTransactionLog, + ) -> fmt::Result { + writeln!( + output, + "{}│{} {}Transaction: {}{} | Slot: {} | Status: {}{}", + self.colors.gray, + self.colors.reset, + self.colors.bold, + self.colors.cyan, + log.signature, + log.slot, + self.status_color(&log.status), + log.status.text(), + )?; + + writeln!( + output, + "{}│{} Fee: {}{:.6} SOL | Compute Used: {}{}/{} CU{}", + self.colors.gray, + self.colors.reset, + self.colors.yellow, + log.fee as f64 / 1_000_000_000.0, + self.colors.blue, + log.compute_used, + log.compute_total, + self.colors.reset + )?; + + writeln!(output, "{}│{}", self.colors.gray, self.colors.reset)?; + Ok(()) + } + + /// Write instructions hierarchy + fn write_instructions_section( + &self, + output: &mut String, + log: &EnhancedTransactionLog, + ) -> fmt::Result { + writeln!( + output, + "{}│{} {}Instructions ({}):{}", + self.colors.gray, + self.colors.reset, + self.colors.bold, + log.instructions.len(), + self.colors.reset + )?; + writeln!(output, "{}│{}", self.colors.gray, self.colors.reset)?; + + for (i, instruction) in log.instructions.iter().enumerate() { + self.write_instruction(output, instruction, 0, i + 1)?; + } + + Ok(()) + } + + /// Write single instruction with proper indentation and hierarchy + fn write_instruction( + &self, + output: &mut String, + instruction: &EnhancedInstructionLog, + depth: usize, + number: usize, + ) -> fmt::Result { + let indent = self.get_tree_indent(depth); + let prefix = if depth == 0 { "├─" } else { "└─" }; + + // Instruction header + let inner_count = if instruction.inner_instructions.is_empty() { + String::new() + } else { + format!(".{}", instruction.inner_instructions.len()) + }; + + write!( + output, + "{}{} {}#{}{} {}{} ({}{}{})", + indent, + prefix, + self.colors.bold, + number, + inner_count, + self.colors.blue, + instruction.program_id, + self.colors.cyan, + instruction.program_name, + self.colors.reset + )?; + + // Add instruction name if parsed + if let Some(ref name) = instruction.instruction_name { + write!( + output, + " - {}{}{}", + self.colors.yellow, name, self.colors.reset + )?; + } + + // Add compute units if available and requested + if self.config.show_compute_units { + if let Some(compute) = instruction.compute_consumed { + write!( + output, + " {}({}{}CU{})", + self.colors.gray, self.colors.blue, compute, self.colors.gray + )?; + } + } + + writeln!(output, "{}", self.colors.reset)?; + + // Show instruction details based on verbosity + match self.config.verbosity { + LogVerbosity::Detailed | LogVerbosity::Full => { + // Display decoded instruction fields from custom decoder + if let Some(ref decoded) = instruction.decoded_instruction { + if !decoded.fields.is_empty() { + let indent = self.get_tree_indent(depth + 1); + for field in &decoded.fields { + self.write_decoded_field(field, output, &indent, 0)?; + } + } + } else if !instruction.data.is_empty() { + // Show raw instruction data for unparseable instructions with chunking + // Skip instruction data for account compression program unless explicitly configured + let should_show_data = if instruction.program_name == "Account Compression" { + self.config.show_compression_instruction_data + } else { + true + }; + + if should_show_data { + let indent = self.get_tree_indent(depth + 1); + writeln!( + output, + "{}{}Raw instruction data ({} bytes): {}[", + indent, + self.colors.gray, + instruction.data.len(), + self.colors.cyan + )?; + + // Chunk the data into 32-byte groups for better readability + for (i, chunk) in instruction.data.chunks(32).enumerate() { + write!(output, "{} ", indent)?; + for (j, byte) in chunk.iter().enumerate() { + if j > 0 { + write!(output, ", ")?; + } + write!(output, "{}", byte)?; + } + if i < instruction.data.chunks(32).len() - 1 { + writeln!(output, ",")?; + } else { + writeln!(output, "]{}", self.colors.reset)?; + } + } + } + } + } + _ => {} + } + + // Show accounts if verbose + if self.config.verbosity == LogVerbosity::Full && !instruction.accounts.is_empty() { + let accounts_indent = self.get_tree_indent(depth + 1); + writeln!( + output, + "{}{}Accounts ({}):{}", + accounts_indent, + self.colors.gray, + instruction.accounts.len(), + self.colors.reset + )?; + + // Create a table for better account formatting + let mut account_rows: Vec = Vec::new(); + + for (idx, account) in instruction.accounts.iter().enumerate() { + let access = if account.is_signer && account.is_writable { + AccountAccess::SignerWritable + } else if account.is_signer { + AccountAccess::Signer + } else if account.is_writable { + AccountAccess::Writable + } else { + AccountAccess::Readonly + }; + + // Try to get account name from decoded instruction first, then fall back to lookup + let account_name = instruction + .decoded_instruction + .as_ref() + .and_then(|decoded| decoded.account_names.get(idx).cloned()) + .unwrap_or_else(|| self.get_account_name(&account.pubkey)); + account_rows.push(AccountRow { + symbol: access.symbol(idx + 1), + pubkey: account.pubkey.to_string(), + access: access.text().to_string(), + name: account_name, + }); + } + + if !account_rows.is_empty() { + let table = Table::new(account_rows) + .to_string() + .lines() + .map(|line| format!("{}{}", accounts_indent, line)) + .collect::>() + .join("\n"); + writeln!(output, "{}", table)?; + } + } + + // Write inner instructions recursively + for (i, inner) in instruction.inner_instructions.iter().enumerate() { + if depth < self.config.max_cpi_depth { + self.write_instruction(output, inner, depth + 1, i + 1)?; + } + } + + Ok(()) + } + + /// Collapse simple multiline enum variants onto one line + /// Converts `Some(\n 2,\n)` to `Some(2)` + fn collapse_simple_enums(&self, input: &str) -> String { + let mut result = String::with_capacity(input.len()); + let mut chars = input.chars().peekable(); + + while let Some(c) = chars.next() { + if c == '(' { + // Collect content until matching ) + let mut paren_content = String::new(); + let mut paren_depth = 1; + + while let Some(&next_c) = chars.peek() { + chars.next(); + if next_c == '(' { + paren_depth += 1; + paren_content.push(next_c); + } else if next_c == ')' { + paren_depth -= 1; + if paren_depth == 0 { + break; + } + paren_content.push(next_c); + } else { + paren_content.push(next_c); + } + } + + // Check if content is simple (just whitespace and a single value) + let trimmed = paren_content.trim().trim_end_matches(','); + let is_simple = (!trimmed.contains('(') + && !trimmed.contains('{') + && !trimmed.contains('[') + && !trimmed.contains('\n')) + || (trimmed.parse::().is_ok()) + || (trimmed == "true" || trimmed == "false") + || trimmed.is_empty(); + + if is_simple && paren_content.contains('\n') { + // Collapse to single line + result.push('('); + result.push_str(trimmed); + result.push(')'); + } else { + // Keep original + result.push('('); + result.push_str(&paren_content); + result.push(')'); + } + } else { + result.push(c); + } + } + + result + } + + /// Truncate byte arrays in a string to show first N and last N elements + /// Handles both single-line `[1, 2, 3, ...]` and multiline arrays from pretty Debug + fn truncate_byte_arrays(input: &str, show_start: usize, show_end: usize) -> String { + let min_elements_to_truncate = show_start + show_end + 4; + + let mut result = String::with_capacity(input.len()); + let mut chars = input.chars().peekable(); + + while let Some(c) = chars.next() { + if c == '[' { + // Potential start of an array - collect until matching ] + let mut array_content = String::new(); + let mut bracket_depth = 1; + let mut is_byte_array = true; + + while let Some(&next_c) = chars.peek() { + chars.next(); + if next_c == '[' { + bracket_depth += 1; + is_byte_array = false; // Nested arrays aren't simple byte arrays + array_content.push(next_c); + } else if next_c == ']' { + bracket_depth -= 1; + if bracket_depth == 0 { + break; + } + array_content.push(next_c); + } else { + // Check if content looks like a byte array (numbers, commas, whitespace) + if !next_c.is_ascii_digit() && next_c != ',' && !next_c.is_whitespace() { + is_byte_array = false; + } + array_content.push(next_c); + } + } + + if is_byte_array && !array_content.is_empty() { + // Parse elements (split by comma, trim whitespace) + let elements: Vec<&str> = array_content + .split(',') + .map(|s| s.trim()) + .filter(|s| !s.is_empty()) + .collect(); + + if elements.len() >= min_elements_to_truncate { + // Truncate: show first N and last N + let start_elements: Vec<&str> = + elements.iter().take(show_start).copied().collect(); + let end_elements: Vec<&str> = elements + .iter() + .skip(elements.len().saturating_sub(show_end)) + .copied() + .collect(); + + result.push('['); + result.push_str(&start_elements.join(", ")); + result.push_str(", ..."); + result.push_str(&format!("({} bytes)", elements.len())); + result.push_str("..., "); + result.push_str(&end_elements.join(", ")); + result.push(']'); + } else { + // Keep original + result.push('['); + result.push_str(&array_content); + result.push(']'); + } + } else { + // Not a byte array - recursively process the content to handle nested byte arrays + let processed_content = + Self::truncate_byte_arrays(&array_content, show_start, show_end); + result.push('['); + result.push_str(&processed_content); + result.push(']'); + } + } else { + result.push(c); + } + } + + result + } + + /// Write a single decoded field (called recursively for nested fields) + fn write_decoded_field( + &self, + field: &crate::DecodedField, + output: &mut String, + indent: &str, + depth: usize, + ) -> fmt::Result { + let field_indent = format!("{} {}", indent, " ".repeat(depth)); + if field.children.is_empty() { + // Apply formatting transformations if enabled + let display_value = if let Some((first, last)) = self.config.truncate_byte_arrays { + let collapsed = self.collapse_simple_enums(&field.value); + Self::truncate_byte_arrays(&collapsed, first, last) + } else { + field.value.clone() + }; + + // Handle multiline values by indenting each subsequent line + if display_value.contains('\n') { + let continuation_indent = format!("{} ", field_indent); + let indented_value = display_value + .lines() + .enumerate() + .map(|(i, line)| { + if i == 0 { + line.to_string() + } else { + format!("{}{}", continuation_indent, line) + } + }) + .collect::>() + .join("\n"); + // Skip "name: " prefix if field name is empty + if field.name.is_empty() { + writeln!( + output, + "{}{}{}{}", + field_indent, self.colors.cyan, indented_value, self.colors.reset + )?; + } else { + writeln!( + output, + "{}{}{}: {}{}{}", + field_indent, + self.colors.gray, + field.name, + self.colors.cyan, + indented_value, + self.colors.reset + )?; + } + } else { + // Skip "name: " prefix if field name is empty + if field.name.is_empty() { + writeln!( + output, + "{}{}{}{}", + field_indent, self.colors.cyan, display_value, self.colors.reset + )?; + } else { + writeln!( + output, + "{}{}{}: {}{}{}", + field_indent, + self.colors.gray, + field.name, + self.colors.cyan, + display_value, + self.colors.reset + )?; + } + } + } else { + // Skip "name:" if field name is empty + if !field.name.is_empty() { + writeln!( + output, + "{}{}{}:{}", + field_indent, self.colors.gray, field.name, self.colors.reset + )?; + } + // Depth guard to prevent stack overflow from deeply nested fields + if depth < self.config.max_cpi_depth { + for child in &field.children { + self.write_decoded_field(child, output, indent, depth + 1)?; + } + } else { + writeln!( + output, + "{} {}{}", + field_indent, self.colors.gray, self.colors.reset + )?; + } + } + Ok(()) + } + + /// Write account changes section + fn write_account_changes_section( + &self, + output: &mut String, + log: &EnhancedTransactionLog, + ) -> fmt::Result { + writeln!(output)?; + writeln!( + output, + "{}Account Changes ({}):{}\n", + self.colors.bold, + log.account_changes.len(), + self.colors.reset + )?; + + for change in &log.account_changes { + self.write_account_change(output, change)?; + } + + Ok(()) + } + + /// Write single account change + fn write_account_change(&self, output: &mut String, change: &AccountChange) -> fmt::Result { + writeln!( + output, + "│ {}{} {} ({}) - {}{}{}", + change.access.symbol(change.account_index), + self.colors.cyan, + change.pubkey, + change.access.text(), + self.colors.yellow, + change.account_type, + self.colors.reset + )?; + + if change.lamports_before != change.lamports_after { + writeln!( + output, + "│ {}Lamports: {} → {}{}", + self.colors.gray, change.lamports_before, change.lamports_after, self.colors.reset + )?; + } + + Ok(()) + } + + /// Write Light Protocol events section + fn write_light_events_section( + &self, + output: &mut String, + log: &EnhancedTransactionLog, + ) -> fmt::Result { + writeln!(output)?; + writeln!( + output, + "{}Light Protocol Events ({}):{}\n", + self.colors.bold, + log.light_events.len(), + self.colors.reset + )?; + + for event in &log.light_events { + writeln!( + output, + "│ {}Event: {}{}{}", + self.colors.blue, self.colors.yellow, event.event_type, self.colors.reset + )?; + + if !event.compressed_accounts.is_empty() { + writeln!( + output, + "│ {}Compressed Accounts: {}{}", + self.colors.gray, + event.compressed_accounts.len(), + self.colors.reset + )?; + } + + if !event.merkle_tree_changes.is_empty() { + writeln!( + output, + "│ {}Merkle Tree Changes: {}{}", + self.colors.gray, + event.merkle_tree_changes.len(), + self.colors.reset + )?; + } + } + + Ok(()) + } + + /// Write program logs section using LiteSVM's pretty logs + fn write_program_logs_section( + &self, + output: &mut String, + log: &EnhancedTransactionLog, + ) -> fmt::Result { + writeln!(output)?; + writeln!( + output, + "{}│{} {}Program Logs:{}", + self.colors.gray, self.colors.reset, self.colors.bold, self.colors.reset + )?; + writeln!(output, "{}│{}", self.colors.gray, self.colors.reset)?; + + // Display LiteSVM's pretty formatted logs with proper indentation + for line in log.program_logs_pretty.lines() { + if !line.trim().is_empty() { + writeln!( + output, + "{}│{} {}", + self.colors.gray, self.colors.reset, line + )?; + } + } + + Ok(()) + } + + /// Get tree-style indentation for given depth + fn get_tree_indent(&self, depth: usize) -> String { + let border = format!("{}│{} ", self.colors.gray, self.colors.reset); + if depth == 0 { + border + } else { + format!("{}{}", border, "│ ".repeat(depth)) + } + } + + /// Get color for transaction status + fn status_color(&self, status: &TransactionStatus) -> &str { + match status { + TransactionStatus::Success => self.colors.green, + TransactionStatus::Failed(_) => self.colors.red, + TransactionStatus::Unknown => self.colors.yellow, + } + } + + /// Get human-readable name for known accounts using constants and test accounts + fn get_account_name(&self, pubkey: &Pubkey) -> String { + #[cfg(feature = "light-protocol")] + { + use light_sdk_types::constants; + + let pubkey_bytes = pubkey.to_bytes(); + + // Light Protocol Programs and Accounts from constants + let light_accounts: &[([u8; 32], &str)] = &[ + (constants::LIGHT_SYSTEM_PROGRAM_ID, "light system program"), + ( + constants::ACCOUNT_COMPRESSION_PROGRAM_ID, + "account compression program", + ), + (constants::REGISTERED_PROGRAM_PDA, "registered program pda"), + ( + constants::ACCOUNT_COMPRESSION_AUTHORITY_PDA, + "account compression authority", + ), + (constants::NOOP_PROGRAM_ID, "noop program"), + ( + constants::LIGHT_TOKEN_PROGRAM_ID, + "compressed token program", + ), + (constants::ADDRESS_TREE_V1, "address tree v1"), + (constants::ADDRESS_QUEUE_V1, "address queue v1"), + (constants::SOL_POOL_PDA, "sol pool pda"), + ]; + + for (id, name) in light_accounts { + if pubkey_bytes == *id { + return name.to_string(); + } + } + } + + // String-based matches for test accounts and other addresses + let pubkey_str = pubkey.to_string(); + for (addr, name) in KNOWN_ACCOUNTS { + if pubkey_str == *addr { + return name.to_string(); + } + } + + // Classify based on curve: on-curve = wallet, off-curve = pda (or program, but we can't tell without executable flag) + if pubkey.is_on_curve() { + "unknown wallet".to_string() + } else { + "unknown pda".to_string() + } + } +} diff --git a/sdk-libs/instruction-decoder/src/lib.rs b/sdk-libs/instruction-decoder/src/lib.rs new file mode 100644 index 0000000000..2ccea7d1cf --- /dev/null +++ b/sdk-libs/instruction-decoder/src/lib.rs @@ -0,0 +1,63 @@ +//! Instruction decoder library for Light Protocol. +//! +//! This crate provides: +//! - Core types for instruction decoding (DecodedField, DecodedInstruction, InstructionDecoder trait) +//! - Decoder registry for managing multiple program decoders +//! - Built-in decoders for Light Protocol programs (System, Compressed Token, etc.) +//! - Transaction logging configuration and formatting utilities +//! +//! The crate is designed to be independent of LiteSVM/test infrastructure, +//! enabling use in both test environments and standalone tools. +//! +//! Note: Most functionality is only available off-chain (not on Solana targets). + +// Re-export solana types for use by dependent crates (available on all targets) +// Re-export derive macro for #[instruction_decoder] +pub use light_instruction_decoder_derive::instruction_decoder; +pub use solana_instruction; +pub use solana_pubkey; +pub use solana_signature; + +// Core types available on all targets (needed by derive macros) +mod core; +pub use core::{DecodedField, DecodedInstruction, InstructionDecoder}; + +// Off-chain only modules (uses tabled, derive macros, DecoderRegistry) +#[cfg(not(target_os = "solana"))] +pub mod config; +#[cfg(not(target_os = "solana"))] +pub mod formatter; +#[cfg(not(target_os = "solana"))] +pub mod programs; +#[cfg(not(target_os = "solana"))] +pub mod registry; +#[cfg(not(target_os = "solana"))] +pub mod types; + +// Re-export main types from types module +// Re-export config types +#[cfg(not(target_os = "solana"))] +pub use config::{EnhancedLoggingConfig, LogVerbosity}; +// Re-export formatter +#[cfg(not(target_os = "solana"))] +pub use formatter::{Colors, TransactionFormatter}; +// Re-export Light Protocol program decoders (requires light-protocol feature) +#[cfg(all(not(target_os = "solana"), feature = "light-protocol"))] +pub use programs::{ + AccountCompressionInstructionDecoder, CTokenInstructionDecoder, LightSystemInstructionDecoder, + RegistryInstructionDecoder, +}; +// Re-export program decoders (generic Solana programs) +#[cfg(not(target_os = "solana"))] +pub use programs::{ + ComputeBudgetInstructionDecoder, SplTokenInstructionDecoder, SystemInstructionDecoder, + Token2022InstructionDecoder, +}; +// Re-export registry +#[cfg(not(target_os = "solana"))] +pub use registry::DecoderRegistry; +#[cfg(not(target_os = "solana"))] +pub use types::{ + AccountAccess, AccountChange, CompressedAccountInfo, EnhancedInstructionLog, + EnhancedTransactionLog, LightProtocolEvent, MerkleTreeChange, TransactionStatus, +}; diff --git a/sdk-libs/instruction-decoder/src/programs/account_compression.rs b/sdk-libs/instruction-decoder/src/programs/account_compression.rs new file mode 100644 index 0000000000..6e8bafc2ee --- /dev/null +++ b/sdk-libs/instruction-decoder/src/programs/account_compression.rs @@ -0,0 +1,133 @@ +//! Account Compression program instruction decoder. +//! +//! This module provides a macro-derived decoder for the Account Compression program, +//! which uses 8-byte Anchor discriminators. +//! +//! The Account Compression program manages: +//! - Group authority and program registration +//! - State Merkle tree initialization and operations +//! - Address Merkle tree initialization and operations +//! - Batched tree operations with ZK proofs +//! - Tree rollover operations +//! - State migration + +// Allow the macro-generated code to reference types from this crate +extern crate self as light_instruction_decoder; + +use light_instruction_decoder_derive::InstructionDecoder; + +/// Account Compression program instructions. +/// +/// The Account Compression program uses 8-byte Anchor discriminators computed from +/// sha256("global:"). +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq", + program_name = "Account Compression", + discriminator_size = 8 +)] +pub enum AccountCompressionInstruction { + // ======================================================================== + // Group Authority Management + // ======================================================================== + /// Initialize a group authority (allows multiple programs to share Merkle trees) + #[instruction_decoder(account_names = ["authority", "seed", "group_authority", "system_program"])] + InitializeGroupAuthority, + + /// Update the group authority + #[instruction_decoder(account_names = ["authority", "group_authority"])] + UpdateGroupAuthority, + + // ======================================================================== + // Program Registration + // ======================================================================== + /// Register a program to a group + #[instruction_decoder(account_names = ["authority", "program_to_be_registered", "registered_program_pda", "group_authority_pda", "system_program"])] + RegisterProgramToGroup, + + /// Deregister a program from its group + #[instruction_decoder(account_names = ["authority", "registered_program_pda", "group_authority_pda", "close_recipient"])] + DeregisterProgram, + + /// Resize a registered program PDA (v1 to v2 migration) + #[instruction_decoder(account_names = ["authority", "registered_program_pda", "system_program"])] + ResizeRegisteredProgramPda, + + // ======================================================================== + // State Tree Operations (v1 - concurrent Merkle tree) + // ======================================================================== + /// Initialize a state Merkle tree and nullifier queue + #[instruction_decoder(account_names = ["authority", "merkle_tree", "nullifier_queue", "registered_program_pda"])] + InitializeStateMerkleTreeAndNullifierQueue, + + /// Nullify leaves in a state Merkle tree + #[instruction_decoder(account_names = ["authority", "registered_program_pda", "log_wrapper", "merkle_tree", "nullifier_queue"])] + NullifyLeaves, + + /// Rollover a state Merkle tree and nullifier queue + #[instruction_decoder(account_names = ["fee_payer", "authority", "registered_program_pda", "new_state_merkle_tree", "new_nullifier_queue", "old_state_merkle_tree", "old_nullifier_queue"])] + RolloverStateMerkleTreeAndNullifierQueue, + + // ======================================================================== + // Address Tree Operations (v1 - indexed Merkle tree) + // ======================================================================== + /// Initialize an address Merkle tree and queue + #[instruction_decoder(account_names = ["authority", "merkle_tree", "queue", "registered_program_pda"])] + InitializeAddressMerkleTreeAndQueue, + + /// Update an address Merkle tree with a new address + #[instruction_decoder(account_names = ["authority", "registered_program_pda", "queue", "merkle_tree", "log_wrapper"])] + UpdateAddressMerkleTree, + + /// Rollover an address Merkle tree and queue + #[instruction_decoder(account_names = ["fee_payer", "authority", "registered_program_pda", "new_address_merkle_tree", "new_queue", "old_address_merkle_tree", "old_queue"])] + RolloverAddressMerkleTreeAndQueue, + + // ======================================================================== + // Queue Operations + // ======================================================================== + /// Insert nullifiers, leaves, and addresses into v1 and batched Merkle trees + #[instruction_decoder(account_names = ["authority"])] + InsertIntoQueues, + + // ======================================================================== + // Batched Tree Operations (v2 - with ZK proofs) + // ======================================================================== + /// Initialize a batched state Merkle tree and output queue + #[instruction_decoder(account_names = ["authority", "merkle_tree", "queue", "registered_program_pda"])] + InitializeBatchedStateMerkleTree, + + /// Initialize a batched address Merkle tree + #[instruction_decoder(account_names = ["authority", "merkle_tree", "registered_program_pda"])] + InitializeBatchedAddressMerkleTree, + + /// Nullify a batch of leaves from input queue to state Merkle tree with ZK proof + #[instruction_decoder(account_names = ["authority", "registered_program_pda", "log_wrapper", "merkle_tree"])] + BatchNullify, + + /// Append a batch of leaves from output queue to state Merkle tree with ZK proof + #[instruction_decoder(account_names = ["authority", "registered_program_pda", "log_wrapper", "merkle_tree", "output_queue"])] + BatchAppend, + + /// Insert a batch of addresses into a batched address Merkle tree with ZK proof + #[instruction_decoder(account_names = ["authority", "registered_program_pda", "log_wrapper", "merkle_tree"])] + BatchUpdateAddressTree, + + // ======================================================================== + // Batched Rollover Operations + // ======================================================================== + /// Rollover a batched address Merkle tree + #[instruction_decoder(account_names = ["fee_payer", "authority", "registered_program_pda", "new_address_merkle_tree", "old_address_merkle_tree"])] + RolloverBatchedAddressMerkleTree, + + /// Rollover a batched state Merkle tree and output queue + #[instruction_decoder(account_names = ["fee_payer", "authority", "registered_program_pda", "new_state_merkle_tree", "old_state_merkle_tree", "new_output_queue", "old_output_queue"])] + RolloverBatchedStateMerkleTree, + + // ======================================================================== + // Migration + // ======================================================================== + /// Migrate state from a v1 state Merkle tree to a v2 state Merkle tree + #[instruction_decoder(account_names = ["authority", "registered_program_pda", "log_wrapper", "merkle_tree", "output_queue"])] + MigrateState, +} diff --git a/sdk-libs/instruction-decoder/src/programs/compute_budget.rs b/sdk-libs/instruction-decoder/src/programs/compute_budget.rs new file mode 100644 index 0000000000..afceeb1222 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/programs/compute_budget.rs @@ -0,0 +1,36 @@ +//! ComputeBudget program instruction decoder. +//! +//! This module provides a macro-derived decoder for the Solana ComputeBudget program, +//! which uses single-byte discriminators based on variant indices. + +// Allow the macro-generated code to reference types from this crate +extern crate self as light_instruction_decoder; + +use light_instruction_decoder_derive::InstructionDecoder; + +/// ComputeBudget program instructions. +/// +/// The ComputeBudget program uses a 1-byte discriminator (variant index). +/// Each variant's discriminator is its position in this enum (0, 1, 2, ...). +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "ComputeBudget111111111111111111111111111111", + program_name = "Compute Budget", + discriminator_size = 1 +)] +pub enum ComputeBudgetInstruction { + /// Deprecated variant (index 0) + Unused, + + /// Request a specific heap frame size (index 1) + RequestHeapFrame { bytes: u32 }, + + /// Set compute unit limit for the transaction (index 2) + SetComputeUnitLimit { units: u32 }, + + /// Set compute unit price in micro-lamports (index 3) + SetComputeUnitPrice { micro_lamports: u64 }, + + /// Set loaded accounts data size limit (index 4) + SetLoadedAccountsDataSizeLimit { bytes: u32 }, +} diff --git a/sdk-libs/instruction-decoder/src/programs/ctoken.rs b/sdk-libs/instruction-decoder/src/programs/ctoken.rs new file mode 100644 index 0000000000..3faa163c24 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/programs/ctoken.rs @@ -0,0 +1,490 @@ +//! Compressed Token (CToken) program instruction decoder. +//! +//! This module provides a macro-derived decoder for the Light Token (CToken) program, +//! which uses non-sequential 1-byte discriminators for Pinocchio instructions. +//! +//! Note: This decoder only handles Pinocchio (1-byte) instructions. +//! Anchor (8-byte) instructions are not decoded by this macro-derived decoder. +//! +//! ## Instruction Data Formats +//! +//! Most CToken instructions have optional max_top_up suffix: +//! - Transfer, MintTo, Burn: 8 bytes (amount) or 10 bytes (amount + max_top_up) +//! - TransferChecked, MintToChecked, BurnChecked: 9 bytes (amount + decimals) or 11 bytes (+ max_top_up) +//! - Approve: 8 bytes (amount) or 10 bytes (amount + max_top_up) +//! - Revoke: 0 bytes or 2 bytes (max_top_up) + +// Allow the macro-generated code to reference types from this crate +extern crate self as light_instruction_decoder; + +use light_instruction_decoder_derive::InstructionDecoder; +use light_token_interface::instructions::{ + mint_action::MintActionCompressedInstructionData, + transfer2::CompressedTokenInstructionDataTransfer2, +}; +use solana_instruction::AccountMeta; + +/// Standard token accounts (before packed_accounts). +/// Transfer2 has 10 fixed accounts at indices 0-9. +const PACKED_ACCOUNTS_START: usize = 10; + +/// Format Transfer2 instruction data with resolved pubkeys. +/// +/// This formatter provides a human-readable view of the transfer instruction, +/// resolving account indices to actual pubkeys from the instruction accounts. +/// +/// Mode detection: +/// - CPI context mode (cpi_context is Some): Packed accounts are passed via CPI context account, +/// not in the instruction's accounts array. Shows raw indices only. +/// - Direct mode (cpi_context is None): Packed accounts are in the accounts array at +/// PACKED_ACCOUNTS_START offset. Resolves indices to actual pubkeys. +/// +/// Index resolution: +/// - In CPI context mode: all indices shown as packed[N] (stored in CPI context account) +/// - In direct mode: all indices (owner, mint, delegate, merkle_tree, queue) are resolved +/// using PACKED_ACCOUNTS_START offset. Note: this assumes a specific account layout and +/// may show OUT_OF_BOUNDS if the actual layout differs. +#[cfg(not(target_os = "solana"))] +pub fn format_transfer2( + data: &CompressedTokenInstructionDataTransfer2, + accounts: &[AccountMeta], +) -> String { + use std::fmt::Write; + let mut output = String::new(); + + // Determine if packed accounts are in CPI context (not directly in accounts array) + // When cpi_context is Some, packed accounts are stored in/read from a CPI context account + let uses_cpi_context = data.cpi_context.is_some(); + + // Helper to resolve account index + // In CPI context mode: all indices are packed indices stored in CPI context + // In direct mode: packed indices are offset by PACKED_ACCOUNTS_START + let resolve = |index: u8| -> String { + if uses_cpi_context { + // All accounts (including trees/queues) are in CPI context + format!("packed[{}]", index) + } else { + accounts + .get(PACKED_ACCOUNTS_START + index as usize) + .map(|a| a.pubkey.to_string()) + .unwrap_or_else(|| { + format!("OUT_OF_BOUNDS({})", PACKED_ACCOUNTS_START + index as usize) + }) + } + }; + + // Header with mode indicator + if uses_cpi_context { + let _ = writeln!( + output, + "[CPI Context Mode - packed accounts in CPI context]" + ); + } + + // Top-level fields + let _ = writeln!(output, "output_queue: {}", resolve(data.output_queue)); + if data.max_top_up > 0 { + let _ = writeln!(output, "max_top_up: {}", data.max_top_up); + } + if data.with_transaction_hash { + let _ = writeln!(output, "with_transaction_hash: true"); + } + + // Input tokens + let _ = writeln!(output, "Input Tokens ({}):", data.in_token_data.len()); + for (i, token) in data.in_token_data.iter().enumerate() { + let _ = writeln!(output, " [{}]", i); + let _ = writeln!(output, " owner: {}", resolve(token.owner)); + let _ = writeln!(output, " mint: {}", resolve(token.mint)); + let _ = writeln!(output, " amount: {}", token.amount); + if token.has_delegate { + let _ = writeln!(output, " delegate: {}", resolve(token.delegate)); + } + let _ = writeln!(output, " version: {}", token.version); + // Merkle context + let _ = writeln!( + output, + " merkle_tree: {}", + resolve(token.merkle_context.merkle_tree_pubkey_index) + ); + let _ = writeln!( + output, + " queue: {}", + resolve(token.merkle_context.queue_pubkey_index) + ); + let _ = writeln!( + output, + " leaf_index: {}", + token.merkle_context.leaf_index + ); + let _ = writeln!(output, " root_index: {}", token.root_index); + } + + // Output tokens + let _ = writeln!(output, "Output Tokens ({}):", data.out_token_data.len()); + for (i, token) in data.out_token_data.iter().enumerate() { + let _ = writeln!(output, " [{}]", i); + let _ = writeln!(output, " owner: {}", resolve(token.owner)); + let _ = writeln!(output, " mint: {}", resolve(token.mint)); + let _ = writeln!(output, " amount: {}", token.amount); + if token.has_delegate { + let _ = writeln!(output, " delegate: {}", resolve(token.delegate)); + } + let _ = writeln!(output, " version: {}", token.version); + } + + // Compressions if present + if let Some(compressions) = &data.compressions { + let _ = writeln!(output, "Compressions ({}):", compressions.len()); + for (i, comp) in compressions.iter().enumerate() { + let _ = writeln!(output, " [{}]", i); + let _ = writeln!(output, " mode: {:?}", comp.mode); + let _ = writeln!(output, " amount: {}", comp.amount); + let _ = writeln!(output, " mint: {}", resolve(comp.mint)); + let _ = writeln!( + output, + " source_or_recipient: {}", + resolve(comp.source_or_recipient) + ); + let _ = writeln!(output, " authority: {}", resolve(comp.authority)); + } + } + + output +} + +/// Format MintAction instruction data with resolved pubkeys. +/// +/// This formatter provides a human-readable view of the mint action instruction, +/// resolving account indices to actual pubkeys from the instruction accounts. +/// +/// Mode detection: +/// - CPI context mode (cpi_context.set_context || first_set_context): Packed accounts are passed +/// via CPI context account, not in the instruction's accounts array. Shows raw indices only. +/// - Direct mode: Packed accounts are in the accounts array at PACKED_ACCOUNTS_START offset. +/// Resolves indices to actual pubkeys. +#[cfg(not(target_os = "solana"))] +pub fn format_mint_action( + data: &MintActionCompressedInstructionData, + accounts: &[AccountMeta], +) -> String { + use std::fmt::Write; + + use light_token_interface::instructions::mint_action::Action; + let mut output = String::new(); + + // CPI context mode: set_context OR first_set_context means packed accounts in CPI context + let uses_cpi_context = data + .cpi_context + .as_ref() + .map(|ctx| ctx.set_context || ctx.first_set_context) + .unwrap_or(false); + + // Helper to resolve account index + let resolve = |index: u8| -> String { + if uses_cpi_context { + format!("packed[{}]", index) + } else { + accounts + .get(PACKED_ACCOUNTS_START + index as usize) + .map(|a| a.pubkey.to_string()) + .unwrap_or_else(|| { + format!("OUT_OF_BOUNDS({})", PACKED_ACCOUNTS_START + index as usize) + }) + } + }; + + // Header with mode indicator + if uses_cpi_context { + let _ = writeln!( + output, + "[CPI Context Mode - packed accounts in CPI context]" + ); + } + + // Top-level fields + if data.create_mint.is_some() { + let _ = writeln!(output, "create_mint: true"); + } else { + let _ = writeln!(output, "leaf_index: {}", data.leaf_index); + if data.prove_by_index { + let _ = writeln!(output, "prove_by_index: true"); + } + } + let _ = writeln!(output, "root_index: {}", data.root_index); + if data.max_top_up > 0 { + let _ = writeln!(output, "max_top_up: {}", data.max_top_up); + } + + // Mint data summary (if present) + if let Some(mint) = &data.mint { + let _ = writeln!(output, "Mint:"); + let _ = writeln!(output, " supply: {}", mint.supply); + let _ = writeln!(output, " decimals: {}", mint.decimals); + if let Some(auth) = &mint.mint_authority { + let _ = writeln!( + output, + " mint_authority: {}", + bs58::encode(auth).into_string() + ); + } + if let Some(auth) = &mint.freeze_authority { + let _ = writeln!( + output, + " freeze_authority: {}", + bs58::encode(auth).into_string() + ); + } + if let Some(exts) = &mint.extensions { + let _ = writeln!(output, " extensions: {}", exts.len()); + } + } + + // Actions + let _ = writeln!(output, "Actions ({}):", data.actions.len()); + for (i, action) in data.actions.iter().enumerate() { + match action { + Action::MintToCompressed(a) => { + let _ = writeln!(output, " [{}] MintToCompressed:", i); + let _ = writeln!(output, " version: {}", a.token_account_version); + for (j, r) in a.recipients.iter().enumerate() { + let _ = writeln!( + output, + " recipient[{}]: {} amount: {}", + j, + bs58::encode(&r.recipient).into_string(), + r.amount + ); + } + } + Action::UpdateMintAuthority(a) => { + let authority_str = a + .new_authority + .as_ref() + .map(|p| bs58::encode(p).into_string()) + .unwrap_or_else(|| "None".to_string()); + let _ = writeln!(output, " [{}] UpdateMintAuthority: {}", i, authority_str); + } + Action::UpdateFreezeAuthority(a) => { + let authority_str = a + .new_authority + .as_ref() + .map(|p| bs58::encode(p).into_string()) + .unwrap_or_else(|| "None".to_string()); + let _ = writeln!(output, " [{}] UpdateFreezeAuthority: {}", i, authority_str); + } + Action::MintTo(a) => { + let _ = writeln!( + output, + " [{}] MintTo: account: {}, amount: {}", + i, + resolve(a.account_index), + a.amount + ); + } + Action::UpdateMetadataField(a) => { + let field_name = match a.field_type { + 0 => "Name", + 1 => "Symbol", + 2 => "Uri", + _ => "Custom", + }; + let _ = writeln!( + output, + " [{}] UpdateMetadataField: ext[{}] {} = {:?}", + i, + a.extension_index, + field_name, + String::from_utf8_lossy(&a.value) + ); + } + Action::UpdateMetadataAuthority(a) => { + let _ = writeln!( + output, + " [{}] UpdateMetadataAuthority: ext[{}] = {}", + i, + a.extension_index, + bs58::encode(&a.new_authority).into_string() + ); + } + Action::RemoveMetadataKey(a) => { + let _ = writeln!( + output, + " [{}] RemoveMetadataKey: ext[{}] key={:?} idempotent={}", + i, + a.extension_index, + String::from_utf8_lossy(&a.key), + a.idempotent != 0 + ); + } + Action::DecompressMint(a) => { + let _ = writeln!( + output, + " [{}] DecompressMint: rent_payment={} write_top_up={}", + i, a.rent_payment, a.write_top_up + ); + } + Action::CompressAndCloseMint(a) => { + let _ = writeln!( + output, + " [{}] CompressAndCloseMint: idempotent={}", + i, + a.idempotent != 0 + ); + } + } + } + + // CPI context details (if present) + if let Some(ctx) = &data.cpi_context { + let _ = writeln!(output, "CPI Context:"); + let _ = writeln!( + output, + " mode: {}", + if ctx.first_set_context { + "first_set_context" + } else if ctx.set_context { + "set_context" + } else { + "read" + } + ); + let _ = writeln!(output, " in_tree: packed[{}]", ctx.in_tree_index); + let _ = writeln!(output, " in_queue: packed[{}]", ctx.in_queue_index); + let _ = writeln!(output, " out_queue: packed[{}]", ctx.out_queue_index); + if ctx.token_out_queue_index > 0 { + let _ = writeln!( + output, + " token_out_queue: packed[{}]", + ctx.token_out_queue_index + ); + } + let _ = writeln!( + output, + " address_tree: {}", + bs58::encode(&ctx.address_tree_pubkey).into_string() + ); + } + + output +} + +/// Compressed Token (CToken) program instructions. +/// +/// The CToken program uses non-sequential 1-byte discriminators. +/// Each variant has an explicit #[discriminator = N] attribute. +/// +/// Field definitions show the base required fields; max_top_up is optional. +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m", + program_name = "Light Token", + discriminator_size = 1 +)] +pub enum CTokenInstruction { + /// Transfer compressed tokens (discriminator 3) + /// Data: amount (u64) [+ max_top_up (u16)] + #[discriminator = 3] + #[instruction_decoder(account_names = ["source", "destination", "authority"])] + Transfer { amount: u64 }, + + /// Approve delegate for compressed tokens (discriminator 4) + /// Data: amount (u64) [+ max_top_up (u16)] + #[discriminator = 4] + #[instruction_decoder(account_names = ["source", "delegate", "owner"])] + Approve { amount: u64 }, + + /// Revoke delegate authority (discriminator 5) + /// Data: [max_top_up (u16)] + #[discriminator = 5] + #[instruction_decoder(account_names = ["source", "owner"])] + Revoke, + + /// Mint compressed tokens to an account (discriminator 7) + /// Data: amount (u64) [+ max_top_up (u16)] + #[discriminator = 7] + #[instruction_decoder(account_names = ["cmint", "destination", "authority"])] + MintTo { amount: u64 }, + + /// Burn compressed tokens (discriminator 8) + /// Data: amount (u64) [+ max_top_up (u16)] + #[discriminator = 8] + #[instruction_decoder(account_names = ["source", "cmint", "authority"])] + Burn { amount: u64 }, + + /// Close a compressed token account (discriminator 9) + #[discriminator = 9] + #[instruction_decoder(account_names = ["account", "destination", "authority"])] + CloseTokenAccount, + + /// Freeze a compressed token account (discriminator 10) + #[discriminator = 10] + #[instruction_decoder(account_names = ["account", "mint", "authority"])] + FreezeAccount, + + /// Thaw a frozen compressed token account (discriminator 11) + #[discriminator = 11] + #[instruction_decoder(account_names = ["account", "mint", "authority"])] + ThawAccount, + + /// Transfer compressed tokens with decimals check (discriminator 12) + /// Data: amount (u64) + decimals (u8) [+ max_top_up (u16)] + #[discriminator = 12] + #[instruction_decoder(account_names = ["source", "mint", "destination", "authority"])] + TransferChecked { amount: u64, decimals: u8 }, + + /// Mint compressed tokens with decimals check (discriminator 14) + /// Data: amount (u64) + decimals (u8) [+ max_top_up (u16)] + #[discriminator = 14] + #[instruction_decoder(account_names = ["cmint", "destination", "authority"])] + MintToChecked { amount: u64, decimals: u8 }, + + /// Burn compressed tokens with decimals check (discriminator 15) + /// Data: amount (u64) + decimals (u8) [+ max_top_up (u16)] + #[discriminator = 15] + #[instruction_decoder(account_names = ["source", "cmint", "authority"])] + BurnChecked { amount: u64, decimals: u8 }, + + /// Create a new compressed token account (discriminator 18) + #[discriminator = 18] + #[instruction_decoder(account_names = ["token_account", "mint", "payer", "config", "system_program", "rent_payer"])] + CreateTokenAccount, + + /// Create an associated compressed token account (discriminator 100) + #[discriminator = 100] + #[instruction_decoder(account_names = ["owner", "mint", "fee_payer", "ata", "system_program", "config", "rent_payer"])] + CreateAssociatedTokenAccount, + + /// Transfer v2 with additional options (discriminator 101) + #[discriminator = 101] + #[instruction_decoder( + account_names = ["fee_payer", "authority", "registered_program_pda", "noop_program", "account_compression_authority", "account_compression_program", "self_program", "cpi_signer", "light_system_program", "system_program"], + params = CompressedTokenInstructionDataTransfer2, + pretty_formatter = crate::programs::ctoken::format_transfer2 + )] + Transfer2, + + /// Create associated token account idempotently (discriminator 102) + #[discriminator = 102] + #[instruction_decoder(account_names = ["owner", "mint", "fee_payer", "ata", "system_program", "config", "rent_payer"])] + CreateAssociatedTokenAccountIdempotent, + + /// Mint action for compressed tokens (discriminator 103) + #[discriminator = 103] + #[instruction_decoder( + account_names = ["fee_payer", "authority", "registered_program_pda", "noop_program", "account_compression_authority", "account_compression_program", "self_program", "cpi_signer", "light_system_program", "system_program"], + params = MintActionCompressedInstructionData, + pretty_formatter = crate::programs::ctoken::format_mint_action + )] + MintAction, + + /// Claim compressed tokens (discriminator 104) + #[discriminator = 104] + #[instruction_decoder(account_names = ["forester", "ctoken_account", "rent_recipient", "config"])] + Claim, + + /// Withdraw from funding pool (discriminator 105) + #[discriminator = 105] + #[instruction_decoder(account_names = ["authority", "rent_recipient", "config", "destination"])] + WithdrawFundingPool, +} diff --git a/sdk-libs/instruction-decoder/src/programs/light_system.rs b/sdk-libs/instruction-decoder/src/programs/light_system.rs new file mode 100644 index 0000000000..d65b7316c8 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/programs/light_system.rs @@ -0,0 +1,758 @@ +//! Light System Program instruction decoder. +//! +//! This module provides a macro-derived decoder for the Light System Program, +//! which uses 8-byte discriminators for compressed account operations. +//! +//! ## Instructions +//! +//! - `Invoke`: Direct invocation of Light System (has 4-byte Anchor prefix after discriminator) +//! - `InvokeCpi`: CPI invocation from another program (has 4-byte Anchor prefix after discriminator) +//! - `InvokeCpiWithReadOnly`: CPI with read-only accounts (no prefix, borsh-only) +//! - `InvokeCpiWithAccountInfo`: CPI with full account info (no prefix, borsh-only) + +// Allow the macro-generated code to reference types from this crate +extern crate self as light_instruction_decoder; + +use borsh::BorshDeserialize; +use light_compressed_account::instruction_data::{ + data::InstructionDataInvoke, invoke_cpi::InstructionDataInvokeCpi, + with_account_info::InstructionDataInvokeCpiWithAccountInfo, + with_readonly::InstructionDataInvokeCpiWithReadOnly, +}; +use light_instruction_decoder_derive::InstructionDecoder; +use solana_instruction::AccountMeta; +use solana_pubkey::Pubkey; + +/// System program ID string for account resolution +const SYSTEM_PROGRAM_ID: &str = "11111111111111111111111111111111"; + +// ============================================================================ +// Helper Functions for Deduplicating Formatter Code +// ============================================================================ + +/// Format input compressed accounts section for Invoke/InvokeCpi. +/// +/// Formats `PackedCompressedAccountWithMerkleContext` accounts with: +/// owner, address, lamports, data_hash, discriminator, merkle_tree, leaf_index, root_index +#[cfg(not(target_os = "solana"))] +fn format_input_accounts_section( + output: &mut String, + accounts: &[light_compressed_account::compressed_account::PackedCompressedAccountWithMerkleContext], + instruction_accounts: &[AccountMeta], +) { + use std::fmt::Write; + + if accounts.is_empty() { + return; + } + + let _ = writeln!(output, "Input Accounts ({}):", accounts.len()); + for (i, acc) in accounts.iter().enumerate() { + let _ = writeln!(output, " [{}]", i); + let _ = writeln!( + output, + " owner: {}", + Pubkey::new_from_array(acc.compressed_account.owner.to_bytes()) + ); + if let Some(addr) = acc.compressed_account.address { + let _ = writeln!(output, " address: {:?}", addr); + } + let _ = writeln!( + output, + " lamports: {}", + acc.compressed_account.lamports + ); + if let Some(ref acc_data) = acc.compressed_account.data { + let _ = writeln!(output, " data_hash: {:?}", acc_data.data_hash); + let _ = writeln!(output, " discriminator: {:?}", acc_data.discriminator); + } + let tree_idx = Some(acc.merkle_context.merkle_tree_pubkey_index); + let queue_idx = Some(acc.merkle_context.queue_pubkey_index); + let (tree_pubkey, _queue_pubkey) = + resolve_tree_and_queue_pubkeys(instruction_accounts, tree_idx, queue_idx); + if let Some(tp) = tree_pubkey { + let _ = writeln!( + output, + " merkle_tree_pubkey (index {}): {}", + acc.merkle_context.merkle_tree_pubkey_index, tp + ); + } + let _ = writeln!( + output, + " leaf_index: {}", + acc.merkle_context.leaf_index + ); + let _ = writeln!(output, " root_index: {}", acc.root_index); + } +} + +/// Format input compressed accounts section for InvokeCpiWithReadOnly. +/// +/// Formats `InAccount` accounts with a shared owner from `invoking_program_id`. +#[cfg(not(target_os = "solana"))] +fn format_readonly_input_accounts_section( + output: &mut String, + accounts: &[light_compressed_account::instruction_data::with_readonly::InAccount], + invoking_program_id: &light_compressed_account::pubkey::Pubkey, + instruction_accounts: &[AccountMeta], +) { + use std::fmt::Write; + + if accounts.is_empty() { + return; + } + + let _ = writeln!(output, "Input Accounts ({}):", accounts.len()); + for (i, acc) in accounts.iter().enumerate() { + let _ = writeln!(output, " [{}]", i); + let _ = writeln!( + output, + " owner: {}", + Pubkey::new_from_array(invoking_program_id.to_bytes()) + ); + if let Some(addr) = acc.address { + let _ = writeln!(output, " address: {:?}", addr); + } + let _ = writeln!(output, " lamports: {}", acc.lamports); + let _ = writeln!(output, " data_hash: {:?}", acc.data_hash); + let _ = writeln!(output, " discriminator: {:?}", acc.discriminator); + let tree_idx = Some(acc.merkle_context.merkle_tree_pubkey_index); + let queue_idx = Some(acc.merkle_context.queue_pubkey_index); + let (tree_pubkey, _queue_pubkey) = + resolve_tree_and_queue_pubkeys(instruction_accounts, tree_idx, queue_idx); + if let Some(tp) = tree_pubkey { + let _ = writeln!( + output, + " merkle_tree_pubkey (index {}): {}", + acc.merkle_context.merkle_tree_pubkey_index, tp + ); + } + let _ = writeln!( + output, + " leaf_index: {}", + acc.merkle_context.leaf_index + ); + let _ = writeln!(output, " root_index: {}", acc.root_index); + } +} + +/// Format output compressed accounts section. +/// +/// Formats `OutputCompressedAccountWithPackedContext` accounts with: +/// owner, address, lamports, data_hash, discriminator, data, merkle_tree +#[cfg(not(target_os = "solana"))] +fn format_output_accounts_section( + output: &mut String, + accounts: &[light_compressed_account::instruction_data::data::OutputCompressedAccountWithPackedContext], + instruction_accounts: &[AccountMeta], +) { + use std::fmt::Write; + + if accounts.is_empty() { + return; + } + + let _ = writeln!(output, "Output Accounts ({}):", accounts.len()); + for (i, acc) in accounts.iter().enumerate() { + let _ = writeln!(output, " [{}]", i); + let _ = writeln!( + output, + " owner: {}", + Pubkey::new_from_array(acc.compressed_account.owner.to_bytes()) + ); + if let Some(addr) = acc.compressed_account.address { + let _ = writeln!(output, " address: {:?}", addr); + } + let _ = writeln!( + output, + " lamports: {}", + acc.compressed_account.lamports + ); + if let Some(ref acc_data) = acc.compressed_account.data { + let _ = writeln!(output, " data_hash: {:?}", acc_data.data_hash); + let _ = writeln!(output, " discriminator: {:?}", acc_data.discriminator); + let _ = writeln!( + output, + " data ({} bytes): {:?}", + acc_data.data.len(), + acc_data.data + ); + } + let tree_idx = Some(acc.merkle_tree_index); + let (tree_pubkey, _) = resolve_tree_and_queue_pubkeys(instruction_accounts, tree_idx, None); + if let Some(tp) = tree_pubkey { + let _ = writeln!( + output, + " merkle_tree_pubkey (index {}): {}", + acc.merkle_tree_index, tp + ); + } + } +} + +/// Format output compressed accounts section for InvokeCpiWithReadOnly. +/// +/// Uses `invoking_program_id` as owner instead of per-account owner. +#[cfg(not(target_os = "solana"))] +fn format_readonly_output_accounts_section( + output: &mut String, + accounts: &[light_compressed_account::instruction_data::data::OutputCompressedAccountWithPackedContext], + invoking_program_id: &light_compressed_account::pubkey::Pubkey, + instruction_accounts: &[AccountMeta], +) { + use std::fmt::Write; + + if accounts.is_empty() { + return; + } + + let _ = writeln!(output, "Output Accounts ({}):", accounts.len()); + for (i, acc) in accounts.iter().enumerate() { + let _ = writeln!(output, " [{}]", i); + let _ = writeln!( + output, + " owner: {}", + Pubkey::new_from_array(invoking_program_id.to_bytes()) + ); + if let Some(addr) = acc.compressed_account.address { + let _ = writeln!(output, " address: {:?}", addr); + } + let _ = writeln!( + output, + " lamports: {}", + acc.compressed_account.lamports + ); + if let Some(ref acc_data) = acc.compressed_account.data { + let _ = writeln!(output, " data_hash: {:?}", acc_data.data_hash); + let _ = writeln!(output, " discriminator: {:?}", acc_data.discriminator); + let _ = writeln!( + output, + " data ({} bytes): {:?}", + acc_data.data.len(), + acc_data.data + ); + } + let tree_idx = Some(acc.merkle_tree_index); + let (tree_pubkey, _) = resolve_tree_and_queue_pubkeys(instruction_accounts, tree_idx, None); + if let Some(tp) = tree_pubkey { + let _ = writeln!( + output, + " merkle_tree_pubkey (index {}): {}", + acc.merkle_tree_index, tp + ); + } + } +} + +/// Format new address params section for Invoke/InvokeCpi. +/// +/// Formats `NewAddressParamsPacked` with: seed, queue, tree +#[cfg(not(target_os = "solana"))] +fn format_new_address_params_section( + output: &mut String, + params: &[light_compressed_account::instruction_data::data::NewAddressParamsPacked], + instruction_accounts: &[AccountMeta], +) { + use std::fmt::Write; + + if params.is_empty() { + return; + } + + let _ = writeln!(output, "New Addresses ({}):", params.len()); + for (i, param) in params.iter().enumerate() { + let _ = writeln!(output, " [{}] seed: {:?}", i, param.seed); + let tree_idx = Some(param.address_merkle_tree_account_index); + let queue_idx = Some(param.address_queue_account_index); + let (tree_pubkey, queue_pubkey) = + resolve_tree_and_queue_pubkeys(instruction_accounts, tree_idx, queue_idx); + if let Some(qp) = queue_pubkey { + let _ = writeln!( + output, + " queue[{}]: {}", + param.address_queue_account_index, qp + ); + } + if let Some(tp) = tree_pubkey { + let _ = writeln!( + output, + " tree[{}]: {}", + param.address_merkle_tree_account_index, tp + ); + } + } +} + +/// Format new address params section with assignment info. +/// +/// Formats `NewAddressParamsAssignedPacked` with: seed, queue, tree, assigned +#[cfg(not(target_os = "solana"))] +fn format_new_address_params_assigned_section( + output: &mut String, + params: &[light_compressed_account::instruction_data::data::NewAddressParamsAssignedPacked], + instruction_accounts: &[AccountMeta], +) { + use std::fmt::Write; + + if params.is_empty() { + return; + } + + let _ = writeln!(output, "New Addresses ({}):", params.len()); + for (i, param) in params.iter().enumerate() { + let _ = writeln!(output, " [{}] seed: {:?}", i, param.seed); + let tree_idx = Some(param.address_merkle_tree_account_index); + let queue_idx = Some(param.address_queue_account_index); + let (tree_pubkey, queue_pubkey) = + resolve_tree_and_queue_pubkeys(instruction_accounts, tree_idx, queue_idx); + if let Some(qp) = queue_pubkey { + let _ = writeln!( + output, + " queue[{}]: {}", + param.address_queue_account_index, qp + ); + } + if let Some(tp) = tree_pubkey { + let _ = writeln!( + output, + " tree[{}]: {}", + param.address_merkle_tree_account_index, tp + ); + } + let assigned = if param.assigned_to_account { + format!("account[{}]", param.assigned_account_index) + } else { + "None".to_string() + }; + let _ = writeln!(output, " assigned: {}", assigned); + } +} + +/// Format read-only addresses section. +/// +/// Formats `PackedReadOnlyAddress` with: address, tree +#[cfg(not(target_os = "solana"))] +fn format_read_only_addresses_section( + output: &mut String, + addresses: &[light_compressed_account::instruction_data::data::PackedReadOnlyAddress], + instruction_accounts: &[AccountMeta], +) { + use std::fmt::Write; + + if addresses.is_empty() { + return; + } + + let _ = writeln!(output, "Read-Only Addresses ({}):", addresses.len()); + for (i, addr) in addresses.iter().enumerate() { + let _ = writeln!(output, " [{}] address: {:?}", i, addr.address); + let tree_idx = Some(addr.address_merkle_tree_account_index); + let (tree_pubkey, _) = resolve_tree_and_queue_pubkeys(instruction_accounts, tree_idx, None); + if let Some(tp) = tree_pubkey { + let _ = writeln!( + output, + " tree[{}]: {}", + addr.address_merkle_tree_account_index, tp + ); + } + } +} + +/// Format compress/decompress and relay fee section for Invoke/InvokeCpi. +#[cfg(not(target_os = "solana"))] +fn format_fee_section( + output: &mut String, + compress_or_decompress_lamports: Option, + is_compress: bool, + relay_fee: Option, +) { + use std::fmt::Write; + + if let Some(lamports) = compress_or_decompress_lamports { + let _ = writeln!( + output, + "Compress/Decompress: {} lamports (is_compress: {})", + lamports, is_compress + ); + } + + if let Some(fee) = relay_fee { + let _ = writeln!(output, "Relay fee: {} lamports", fee); + } +} + +/// Format compress/decompress section for ReadOnly/AccountInfo variants. +/// +/// Uses u64 directly instead of Option. +#[cfg(not(target_os = "solana"))] +fn format_compress_decompress_section( + output: &mut String, + compress_or_decompress_lamports: u64, + is_compress: bool, +) { + use std::fmt::Write; + + if compress_or_decompress_lamports > 0 { + let _ = writeln!( + output, + "Compress/Decompress: {} lamports (is_compress: {})", + compress_or_decompress_lamports, is_compress + ); + } +} + +/// Format account infos section for InvokeCpiWithAccountInfo. +/// +/// Formats `CompressedAccountInfo` with combined input/output per account. +#[cfg(not(target_os = "solana"))] +fn format_account_infos_section( + output: &mut String, + account_infos: &[light_compressed_account::instruction_data::with_account_info::CompressedAccountInfo], + instruction_accounts: &[AccountMeta], +) { + use std::fmt::Write; + + if account_infos.is_empty() { + return; + } + + let _ = writeln!(output, "Account Infos ({}):", account_infos.len()); + for (i, account_info) in account_infos.iter().enumerate() { + let _ = writeln!(output, " [{}]", i); + if let Some(addr) = account_info.address { + let _ = writeln!(output, " address: {:?}", addr); + } + + if let Some(ref input) = account_info.input { + let _ = writeln!(output, " Input:"); + let _ = writeln!(output, " lamports: {}", input.lamports); + let _ = writeln!(output, " data_hash: {:?}", input.data_hash); + let _ = writeln!(output, " discriminator: {:?}", input.discriminator); + let _ = writeln!( + output, + " leaf_index: {}", + input.merkle_context.leaf_index + ); + let _ = writeln!(output, " root_index: {}", input.root_index); + } + + if let Some(ref out) = account_info.output { + let _ = writeln!(output, " Output:"); + let _ = writeln!(output, " lamports: {}", out.lamports); + let _ = writeln!(output, " data_hash: {:?}", out.data_hash); + let _ = writeln!(output, " discriminator: {:?}", out.discriminator); + if !out.data.is_empty() { + let _ = writeln!( + output, + " data ({} bytes): {:?}", + out.data.len(), + out.data + ); + } + let tree_idx = Some(out.output_merkle_tree_index); + let (tree_pubkey, _) = + resolve_tree_and_queue_pubkeys(instruction_accounts, tree_idx, None); + if let Some(tp) = tree_pubkey { + let _ = writeln!( + output, + " merkle_tree_pubkey (index {}): {}", + out.output_merkle_tree_index, tp + ); + } + } + } +} + +/// Helper to resolve merkle tree and queue pubkeys from instruction accounts. +/// Tree accounts start 2 positions after the system program account. +fn resolve_tree_and_queue_pubkeys( + accounts: &[AccountMeta], + merkle_tree_index: Option, + nullifier_queue_index: Option, +) -> (Option, Option) { + let mut tree_pubkey = None; + let mut queue_pubkey = None; + + // Find the system program account position + let mut system_program_pos = None; + for (i, account) in accounts.iter().enumerate() { + if account.pubkey.to_string() == SYSTEM_PROGRAM_ID { + system_program_pos = Some(i); + break; + } + } + + if let Some(system_pos) = system_program_pos { + // Tree accounts start 2 positions after system program + let tree_accounts_start = system_pos + 2; + + if let Some(tree_idx) = merkle_tree_index { + let tree_account_pos = tree_accounts_start + tree_idx as usize; + if tree_account_pos < accounts.len() { + tree_pubkey = Some(accounts[tree_account_pos].pubkey); + } + } + + if let Some(queue_idx) = nullifier_queue_index { + let queue_account_pos = tree_accounts_start + queue_idx as usize; + if queue_account_pos < accounts.len() { + queue_pubkey = Some(accounts[queue_account_pos].pubkey); + } + } + } + + (tree_pubkey, queue_pubkey) +} + +/// Format InvokeCpiWithReadOnly instruction data. +/// +/// Note: This instruction does NOT have the 4-byte Anchor prefix - it uses pure borsh. +#[cfg(not(target_os = "solana"))] +pub fn format_invoke_cpi_readonly( + data: &InstructionDataInvokeCpiWithReadOnly, + accounts: &[AccountMeta], +) -> String { + use std::fmt::Write; + let mut output = String::new(); + + let _ = writeln!( + output, + "Accounts: in: {}, out: {}", + data.input_compressed_accounts.len(), + data.output_compressed_accounts.len() + ); + let _ = writeln!(output, "Proof: Validity proof"); + + format_readonly_input_accounts_section( + &mut output, + &data.input_compressed_accounts, + &data.invoking_program_id, + accounts, + ); + format_readonly_output_accounts_section( + &mut output, + &data.output_compressed_accounts, + &data.invoking_program_id, + accounts, + ); + format_new_address_params_assigned_section(&mut output, &data.new_address_params, accounts); + format_read_only_addresses_section(&mut output, &data.read_only_addresses, accounts); + format_compress_decompress_section( + &mut output, + data.compress_or_decompress_lamports, + data.is_compress, + ); + + output +} + +/// Format InvokeCpiWithAccountInfo instruction data. +/// +/// Note: This instruction does NOT have the 4-byte Anchor prefix - it uses pure borsh. +#[cfg(not(target_os = "solana"))] +pub fn format_invoke_cpi_account_info( + data: &InstructionDataInvokeCpiWithAccountInfo, + accounts: &[AccountMeta], +) -> String { + use std::fmt::Write; + let mut output = String::new(); + + let input_count = data + .account_infos + .iter() + .filter(|a| a.input.is_some()) + .count(); + let output_count = data + .account_infos + .iter() + .filter(|a| a.output.is_some()) + .count(); + + let _ = writeln!( + output, + "Accounts: in: {}, out: {}", + input_count, output_count + ); + let _ = writeln!(output, "Proof: Validity proof"); + + // Account infos with input/output (unique structure, kept inline) + format_account_infos_section(&mut output, &data.account_infos, accounts); + + format_new_address_params_assigned_section(&mut output, &data.new_address_params, accounts); + format_read_only_addresses_section(&mut output, &data.read_only_addresses, accounts); + format_compress_decompress_section( + &mut output, + data.compress_or_decompress_lamports, + data.is_compress, + ); + + output +} + +/// Wrapper type for Invoke instruction that handles the 4-byte Anchor prefix. +/// +/// The derive macro's borsh deserialization expects the data immediately after +/// the discriminator, but Invoke/InvokeCpi have a 4-byte vec length prefix. +/// This wrapper type's deserialize implementation skips those 4 bytes. +#[derive(Debug)] +pub struct InvokeWrapper(pub InstructionDataInvoke); + +impl BorshDeserialize for InvokeWrapper { + fn deserialize_reader(reader: &mut R) -> std::io::Result { + // Skip 4-byte Anchor vec length prefix + let mut prefix = [0u8; 4]; + reader.read_exact(&mut prefix)?; + // Deserialize the actual data + let inner = InstructionDataInvoke::deserialize_reader(reader)?; + Ok(InvokeWrapper(inner)) + } +} + +/// Wrapper type for InvokeCpi instruction that handles the 4-byte Anchor prefix. +#[derive(Debug)] +pub struct InvokeCpiWrapper(pub InstructionDataInvokeCpi); + +impl BorshDeserialize for InvokeCpiWrapper { + fn deserialize_reader(reader: &mut R) -> std::io::Result { + // Skip 4-byte Anchor vec length prefix + let mut prefix = [0u8; 4]; + reader.read_exact(&mut prefix)?; + // Deserialize the actual data + let inner = InstructionDataInvokeCpi::deserialize_reader(reader)?; + Ok(InvokeCpiWrapper(inner)) + } +} + +/// Formatter wrapper that takes raw bytes and handles the prefix skip internally. +#[cfg(not(target_os = "solana"))] +pub fn format_invoke_wrapper(data: &InvokeWrapper, accounts: &[AccountMeta]) -> String { + // We already have the parsed data, format it directly + format_invoke_inner(&data.0, accounts) +} + +/// Formatter wrapper that takes raw bytes and handles the prefix skip internally. +#[cfg(not(target_os = "solana"))] +pub fn format_invoke_cpi_wrapper(data: &InvokeCpiWrapper, accounts: &[AccountMeta]) -> String { + // We already have the parsed data, format it directly + format_invoke_cpi_inner(&data.0, accounts) +} + +/// Format InstructionDataInvoke (internal helper). +#[cfg(not(target_os = "solana"))] +fn format_invoke_inner(data: &InstructionDataInvoke, accounts: &[AccountMeta]) -> String { + use std::fmt::Write; + let mut output = String::new(); + + let _ = writeln!( + output, + "Accounts: in: {}, out: {}", + data.input_compressed_accounts_with_merkle_context.len(), + data.output_compressed_accounts.len() + ); + + if data.proof.is_some() { + let _ = writeln!(output, "Proof: Validity proof"); + } + + format_input_accounts_section( + &mut output, + &data.input_compressed_accounts_with_merkle_context, + accounts, + ); + format_output_accounts_section(&mut output, &data.output_compressed_accounts, accounts); + format_new_address_params_section(&mut output, &data.new_address_params, accounts); + format_fee_section( + &mut output, + data.compress_or_decompress_lamports, + data.is_compress, + data.relay_fee, + ); + + output +} + +/// Format InstructionDataInvokeCpi (internal helper). +#[cfg(not(target_os = "solana"))] +fn format_invoke_cpi_inner(data: &InstructionDataInvokeCpi, accounts: &[AccountMeta]) -> String { + use std::fmt::Write; + let mut output = String::new(); + + let _ = writeln!( + output, + "Accounts: in: {}, out: {}", + data.input_compressed_accounts_with_merkle_context.len(), + data.output_compressed_accounts.len() + ); + + if data.proof.is_some() { + let _ = writeln!(output, "Proof: Validity proof"); + } + + format_input_accounts_section( + &mut output, + &data.input_compressed_accounts_with_merkle_context, + accounts, + ); + format_output_accounts_section(&mut output, &data.output_compressed_accounts, accounts); + format_new_address_params_section(&mut output, &data.new_address_params, accounts); + format_fee_section( + &mut output, + data.compress_or_decompress_lamports, + data.is_compress, + data.relay_fee, + ); + + output +} + +/// Light System Program instructions. +/// +/// The Light System Program uses 8-byte discriminators for compressed account operations. +/// Each instruction has an explicit discriminator attribute. +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7", + program_name = "Light System Program", + discriminator_size = 8 +)] +pub enum LightSystemInstruction { + /// Direct invocation of Light System - creates/modifies compressed accounts. + /// Has 4-byte Anchor vec length prefix after discriminator. + #[discriminator(26, 16, 169, 7, 21, 202, 242, 25)] + #[instruction_decoder( + account_names = ["fee_payer", "authority", "registered_program_pda", "noop_program", "account_compression_authority", "account_compression_program", "self_program"], + params = InvokeWrapper, + pretty_formatter = crate::programs::light_system::format_invoke_wrapper + )] + Invoke, + + /// CPI invocation from another program. + /// Has 4-byte Anchor vec length prefix after discriminator. + #[discriminator(49, 212, 191, 129, 39, 194, 43, 196)] + #[instruction_decoder( + account_names = ["fee_payer", "authority", "registered_program_pda", "noop_program", "account_compression_authority", "account_compression_program", "invoking_program", "cpi_signer"], + params = InvokeCpiWrapper, + pretty_formatter = crate::programs::light_system::format_invoke_cpi_wrapper + )] + InvokeCpi, + + /// CPI with read-only compressed accounts. + /// Uses pure borsh serialization (no 4-byte prefix). + #[discriminator(86, 47, 163, 166, 21, 223, 92, 8)] + #[instruction_decoder( + account_names = ["fee_payer", "authority", "registered_program_pda", "noop_program", "account_compression_authority", "account_compression_program", "invoking_program", "cpi_signer"], + params = InstructionDataInvokeCpiWithReadOnly, + pretty_formatter = crate::programs::light_system::format_invoke_cpi_readonly + )] + InvokeCpiWithReadOnly, + + /// CPI with full account info for each compressed account. + /// Uses pure borsh serialization (no 4-byte prefix). + #[discriminator(228, 34, 128, 84, 47, 139, 86, 240)] + #[instruction_decoder( + account_names = ["fee_payer", "authority", "registered_program_pda", "noop_program", "account_compression_authority", "account_compression_program", "invoking_program", "cpi_signer"], + params = InstructionDataInvokeCpiWithAccountInfo, + pretty_formatter = crate::programs::light_system::format_invoke_cpi_account_info + )] + InvokeCpiWithAccountInfo, +} diff --git a/sdk-libs/instruction-decoder/src/programs/mod.rs b/sdk-libs/instruction-decoder/src/programs/mod.rs new file mode 100644 index 0000000000..b205fa17b9 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/programs/mod.rs @@ -0,0 +1,37 @@ +//! Native Solana program decoders using macro-derived implementations. +//! +//! This module contains instruction decoders for native Solana programs +//! that use various discriminator sizes: +//! - 1-byte: SPL Token, Token 2022, Compute Budget, Light Token (CToken) +//! - 4-byte: System Program +//! - 8-byte: Anchor programs (Light Registry, Account Compression, Light System) + +// Generic Solana program decoders (always available) +pub mod compute_budget; +pub mod spl_token; +pub mod system; +pub mod token_2022; + +pub use compute_budget::ComputeBudgetInstructionDecoder; +pub use spl_token::SplTokenInstructionDecoder; +pub use system::SystemInstructionDecoder; +pub use token_2022::Token2022InstructionDecoder; + +// Light Protocol program decoders (requires light-protocol feature) +#[cfg(feature = "light-protocol")] +pub mod account_compression; +#[cfg(feature = "light-protocol")] +pub mod ctoken; +#[cfg(feature = "light-protocol")] +pub mod light_system; +#[cfg(feature = "light-protocol")] +pub mod registry; + +#[cfg(feature = "light-protocol")] +pub use account_compression::AccountCompressionInstructionDecoder; +#[cfg(feature = "light-protocol")] +pub use ctoken::CTokenInstructionDecoder; +#[cfg(feature = "light-protocol")] +pub use light_system::LightSystemInstructionDecoder; +#[cfg(feature = "light-protocol")] +pub use registry::RegistryInstructionDecoder; diff --git a/sdk-libs/instruction-decoder/src/programs/registry.rs b/sdk-libs/instruction-decoder/src/programs/registry.rs new file mode 100644 index 0000000000..0d5cb9c877 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/programs/registry.rs @@ -0,0 +1,193 @@ +//! Light Registry program instruction decoder. +//! +//! This module provides a macro-derived decoder for the Light Registry program, +//! which uses 8-byte Anchor discriminators. +//! +//! The Registry program manages: +//! - Protocol configuration +//! - Forester registration and epochs +//! - Merkle tree initialization and operations +//! - Rollover operations +//! - Compressible config management + +// Allow the macro-generated code to reference types from this crate +extern crate self as light_instruction_decoder; + +use light_instruction_decoder_derive::InstructionDecoder; + +/// Light Registry program instructions. +/// +/// The Registry program uses 8-byte Anchor discriminators computed from +/// sha256("global:"). +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "Lighton6oQpVkeewmo2mcPTQQp7kYHr4fWpAgJyEmDX", + program_name = "Light Registry", + discriminator_size = 8 +)] +pub enum RegistryInstruction { + // ======================================================================== + // Protocol Config + // ======================================================================== + /// Initialize the protocol configuration + #[instruction_decoder(account_names = ["fee_payer", "authority", "protocol_config_pda", "system_program", "self_program"])] + InitializeProtocolConfig { bump: u8 }, + + /// Update the protocol configuration + #[instruction_decoder(account_names = ["fee_payer", "authority", "protocol_config_pda", "new_authority"])] + UpdateProtocolConfig, + + // ======================================================================== + // Forester Management + // ======================================================================== + /// Register a new forester + #[instruction_decoder(account_names = ["fee_payer", "authority", "protocol_config_pda", "forester_pda", "system_program"])] + RegisterForester { bump: u8 }, + + /// Update a forester PDA + #[instruction_decoder(account_names = ["authority", "forester_pda", "new_authority"])] + UpdateForesterPda, + + /// Update a forester's weight + #[instruction_decoder(account_names = ["authority", "protocol_config_pda", "forester_pda"])] + UpdateForesterPdaWeight { new_weight: u64 }, + + // ======================================================================== + // Epoch Management + // ======================================================================== + /// Register a forester for an epoch + #[instruction_decoder(account_names = ["fee_payer", "authority", "forester_pda", "forester_epoch_pda", "protocol_config", "epoch_pda", "system_program"])] + RegisterForesterEpoch { epoch: u64 }, + + /// Finalize forester registration + #[instruction_decoder(account_names = ["authority", "forester_epoch_pda", "epoch_pda"])] + FinalizeRegistration, + + /// Report work done by forester + #[instruction_decoder(account_names = ["authority", "forester_epoch_pda", "epoch_pda"])] + ReportWork, + + // ======================================================================== + // System Program Registration + // ======================================================================== + /// Register a system program + #[instruction_decoder(account_names = ["authority", "cpi_authority", "program_to_be_registered", "registered_program_pda", "group_pda", "account_compression_program", "system_program"])] + RegisterSystemProgram { bump: u8 }, + + /// Deregister a system program + #[instruction_decoder(account_names = ["authority", "cpi_authority", "registered_program_pda", "group_pda", "account_compression_program"])] + DeregisterSystemProgram { bump: u8 }, + + // ======================================================================== + // Tree Initialization + // ======================================================================== + /// Initialize an address Merkle tree + #[instruction_decoder(account_names = ["authority", "merkle_tree", "queue", "registered_program_pda", "cpi_authority", "account_compression_program", "protocol_config_pda", "cpi_context_account", "light_system_program"])] + InitializeAddressMerkleTree { bump: u8 }, + + /// Initialize a state Merkle tree + #[instruction_decoder(account_names = ["authority", "merkle_tree", "queue", "registered_program_pda", "cpi_authority", "account_compression_program", "protocol_config_pda", "cpi_context_account", "light_system_program"])] + InitializeStateMerkleTree { bump: u8 }, + + /// Initialize a batched state Merkle tree + #[instruction_decoder(account_names = ["authority", "merkle_tree", "queue", "registered_program_pda", "cpi_authority", "account_compression_program", "protocol_config_pda", "cpi_context_account", "light_system_program"])] + InitializeBatchedStateMerkleTree { bump: u8 }, + + /// Initialize a batched address Merkle tree + #[instruction_decoder(account_names = ["authority", "merkle_tree", "registered_program_pda", "cpi_authority", "account_compression_program", "protocol_config_pda"])] + InitializeBatchedAddressMerkleTree { bump: u8 }, + + // ======================================================================== + // Tree Operations + // ======================================================================== + /// Nullify a leaf in the tree + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "cpi_authority", "registered_program_pda", "account_compression_program", "log_wrapper", "merkle_tree", "nullifier_queue"])] + Nullify { bump: u8 }, + + /// Update an address Merkle tree + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "cpi_authority", "registered_program_pda", "account_compression_program", "log_wrapper", "merkle_tree", "queue"])] + UpdateAddressMerkleTree { bump: u8 }, + + /// Batch nullify leaves + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "cpi_authority", "registered_program_pda", "account_compression_program", "log_wrapper", "merkle_tree"])] + BatchNullify { bump: u8 }, + + /// Batch append to output queue + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "cpi_authority", "registered_program_pda", "account_compression_program", "log_wrapper", "merkle_tree", "output_queue"])] + BatchAppend { bump: u8 }, + + /// Batch update an address tree + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "cpi_authority", "registered_program_pda", "account_compression_program", "log_wrapper", "merkle_tree"])] + BatchUpdateAddressTree { bump: u8 }, + + // ======================================================================== + // Rollover Operations + // ======================================================================== + /// Rollover address Merkle tree and queue + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "cpi_authority", "registered_program_pda", "account_compression_program", "new_merkle_tree", "new_queue", "old_merkle_tree", "old_queue"])] + RolloverAddressMerkleTreeAndQueue { bump: u8 }, + + /// Rollover state Merkle tree and queue + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "cpi_authority", "registered_program_pda", "account_compression_program", "new_merkle_tree", "new_queue", "old_merkle_tree", "old_queue", "cpi_context_account", "light_system_program", "protocol_config_pda"])] + RolloverStateMerkleTreeAndQueue { bump: u8 }, + + /// Rollover batched address Merkle tree + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "cpi_authority", "registered_program_pda", "account_compression_program", "new_address_merkle_tree", "old_address_merkle_tree"])] + RolloverBatchedAddressMerkleTree { bump: u8 }, + + /// Rollover batched state Merkle tree + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "new_state_merkle_tree", "old_state_merkle_tree", "new_output_queue", "old_output_queue", "cpi_context_account", "registered_program_pda", "cpi_authority", "account_compression_program", "protocol_config_pda", "light_system_program"])] + RolloverBatchedStateMerkleTree { bump: u8 }, + + // ======================================================================== + // Migration + // ======================================================================== + /// Migrate state + #[instruction_decoder(account_names = ["registered_forester_pda", "authority", "cpi_authority", "registered_program_pda", "account_compression_program", "merkle_tree"])] + MigrateState { bump: u8 }, + + // ======================================================================== + // Compressible Config + // ======================================================================== + /// Create a config counter + #[instruction_decoder(account_names = ["fee_payer", "authority", "protocol_config_pda", "config_counter", "system_program"])] + CreateConfigCounter, + + /// Create a compressible config + #[instruction_decoder(account_names = ["fee_payer", "authority", "protocol_config_pda", "config_counter", "compressible_config", "system_program"])] + CreateCompressibleConfig, + + /// Update a compressible config + #[instruction_decoder(account_names = ["update_authority", "compressible_config", "new_update_authority", "new_withdrawal_authority"])] + UpdateCompressibleConfig, + + /// Pause a compressible config (only requires update_authority and compressible_config) + #[instruction_decoder(account_names = ["update_authority", "compressible_config"])] + PauseCompressibleConfig, + + /// Unpause a compressible config (only requires update_authority and compressible_config) + #[instruction_decoder(account_names = ["update_authority", "compressible_config"])] + UnpauseCompressibleConfig, + + /// Deprecate a compressible config (only requires update_authority and compressible_config) + #[instruction_decoder(account_names = ["update_authority", "compressible_config"])] + DeprecateCompressibleConfig, + + // ======================================================================== + // Token Operations + // ======================================================================== + /// Withdraw from funding pool + #[instruction_decoder(account_names = ["fee_payer", "withdrawal_authority", "compressible_config", "rent_sponsor", "compression_authority", "destination", "system_program", "compressed_token_program"])] + WithdrawFundingPool { amount: u64 }, + + /// Claim compressed tokens + #[instruction_decoder(account_names = ["authority", "registered_forester_pda", "rent_sponsor", "compression_authority", "compressible_config", "compressed_token_program"])] + Claim, + + /// Compress and close token account + #[instruction_decoder(account_names = ["authority", "registered_forester_pda", "compression_authority", "compressible_config"])] + CompressAndClose { + authority_index: u8, + destination_index: u8, + }, +} diff --git a/sdk-libs/instruction-decoder/src/programs/spl_token.rs b/sdk-libs/instruction-decoder/src/programs/spl_token.rs new file mode 100644 index 0000000000..9292aa75e9 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/programs/spl_token.rs @@ -0,0 +1,130 @@ +//! SPL Token program instruction decoder. +//! +//! This module provides a macro-derived decoder for the SPL Token program, +//! which uses single-byte discriminators based on variant indices. + +// Allow the macro-generated code to reference types from this crate +extern crate self as light_instruction_decoder; + +use light_instruction_decoder_derive::InstructionDecoder; + +/// SPL Token program instructions. +/// +/// The SPL Token program uses a 1-byte discriminator (variant index). +/// Each variant's discriminator is its position in this enum (0, 1, 2, ...). +/// +/// Note: Complex types (Pubkey, COption) are not fully parsed; +/// only primitive fields are extracted. +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA", + program_name = "SPL Token", + discriminator_size = 1 +)] +pub enum SplTokenInstruction { + /// Initialize a new mint (index 0) + /// Fields: decimals: u8, mint_authority: Pubkey, freeze_authority: COption + #[instruction_decoder(account_names = ["mint", "rent"])] + InitializeMint { decimals: u8 }, + + /// Initialize a new token account (index 1) + #[instruction_decoder(account_names = ["account", "mint", "owner", "rent"])] + InitializeAccount, + + /// Initialize a multisig account (index 2) + #[instruction_decoder(account_names = ["multisig", "rent"])] + InitializeMultisig { m: u8 }, + + /// Transfer tokens (index 3) + #[instruction_decoder(account_names = ["source", "destination", "authority"])] + Transfer { amount: u64 }, + + /// Approve a delegate (index 4) + #[instruction_decoder(account_names = ["source", "delegate", "owner"])] + Approve { amount: u64 }, + + /// Revoke delegate authority (index 5) + #[instruction_decoder(account_names = ["source", "owner"])] + Revoke, + + /// Set a new authority (index 6) + /// Fields: authority_type: u8, new_authority: COption + #[instruction_decoder(account_names = ["account_or_mint", "current_authority"])] + SetAuthority { authority_type: u8 }, + + /// Mint new tokens (index 7) + #[instruction_decoder(account_names = ["mint", "destination", "authority"])] + MintTo { amount: u64 }, + + /// Burn tokens (index 8) + #[instruction_decoder(account_names = ["source", "mint", "authority"])] + Burn { amount: u64 }, + + /// Close a token account (index 9) + #[instruction_decoder(account_names = ["account", "destination", "authority"])] + CloseAccount, + + /// Freeze a token account (index 10) + #[instruction_decoder(account_names = ["account", "mint", "authority"])] + FreezeAccount, + + /// Thaw a frozen token account (index 11) + #[instruction_decoder(account_names = ["account", "mint", "authority"])] + ThawAccount, + + /// Transfer tokens with decimals check (index 12) + #[instruction_decoder(account_names = ["source", "mint", "destination", "authority"])] + TransferChecked { amount: u64, decimals: u8 }, + + /// Approve delegate with decimals check (index 13) + #[instruction_decoder(account_names = ["source", "mint", "delegate", "owner"])] + ApproveChecked { amount: u64, decimals: u8 }, + + /// Mint tokens with decimals check (index 14) + #[instruction_decoder(account_names = ["mint", "destination", "authority"])] + MintToChecked { amount: u64, decimals: u8 }, + + /// Burn tokens with decimals check (index 15) + #[instruction_decoder(account_names = ["source", "mint", "authority"])] + BurnChecked { amount: u64, decimals: u8 }, + + /// Initialize account with owner in data (index 16) + /// Fields: owner: Pubkey (32 bytes) + #[instruction_decoder(account_names = ["account", "mint", "rent"])] + InitializeAccount2, + + /// Sync native SOL balance (index 17) + #[instruction_decoder(account_names = ["account"])] + SyncNative, + + /// Initialize account without rent sysvar (index 18) + /// Fields: owner: Pubkey (32 bytes) + #[instruction_decoder(account_names = ["account", "mint"])] + InitializeAccount3, + + /// Initialize multisig without rent sysvar (index 19) + #[instruction_decoder(account_names = ["multisig"])] + InitializeMultisig2 { m: u8 }, + + /// Initialize mint without rent sysvar (index 20) + /// Fields: decimals: u8, mint_authority: Pubkey, freeze_authority: COption + #[instruction_decoder(account_names = ["mint"])] + InitializeMint2 { decimals: u8 }, + + /// Get required account size (index 21) + #[instruction_decoder(account_names = ["mint"])] + GetAccountDataSize, + + /// Initialize immutable owner extension (index 22) + #[instruction_decoder(account_names = ["account"])] + InitializeImmutableOwner, + + /// Convert amount to UI amount string (index 23) + #[instruction_decoder(account_names = ["mint"])] + AmountToUiAmount { amount: u64 }, + + /// Convert UI amount string to amount (index 24) + /// Fields: ui_amount: &str (variable length) + #[instruction_decoder(account_names = ["mint"])] + UiAmountToAmount, +} diff --git a/sdk-libs/instruction-decoder/src/programs/system.rs b/sdk-libs/instruction-decoder/src/programs/system.rs new file mode 100644 index 0000000000..0cbf08d456 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/programs/system.rs @@ -0,0 +1,84 @@ +//! Solana System Program instruction decoder. +//! +//! This module provides a macro-derived decoder for the Solana System Program, +//! which uses 4-byte (u32) discriminators for instruction types. + +// Allow the macro-generated code to reference types from this crate +extern crate self as light_instruction_decoder; + +use light_instruction_decoder_derive::InstructionDecoder; + +/// Solana System Program instructions. +/// +/// The System Program uses a 4-byte discriminator (u32 little-endian). +/// Each variant's discriminator is its position in this enum (0, 1, 2, ...). +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "11111111111111111111111111111111", + program_name = "System Program", + discriminator_size = 4 +)] +pub enum SystemInstruction { + /// Create a new account (index 0) + /// Data: lamports (u64) + space (u64) + owner (Pubkey) + #[instruction_decoder(account_names = ["funding_account", "new_account"])] + CreateAccount { lamports: u64, space: u64 }, + + /// Assign account to a program (index 1) + /// Data: owner (Pubkey) + #[instruction_decoder(account_names = ["account"])] + Assign, + + /// Transfer lamports (index 2) + /// Data: lamports (u64) + #[instruction_decoder(account_names = ["from", "to"])] + Transfer { lamports: u64 }, + + /// Create account with seed (index 3) + /// Data: base (Pubkey) + seed (String) + lamports (u64) + space (u64) + owner (Pubkey) + #[instruction_decoder(account_names = ["funding_account", "created_account", "base_account"])] + CreateAccountWithSeed { lamports: u64, space: u64 }, + + /// Advance nonce account (index 4) + #[instruction_decoder(account_names = ["nonce_account", "recent_blockhashes_sysvar", "nonce_authority"])] + AdvanceNonceAccount, + + /// Withdraw from nonce account (index 5) + /// Data: lamports (u64) + #[instruction_decoder(account_names = ["nonce_account", "recipient", "recent_blockhashes_sysvar", "rent_sysvar", "nonce_authority"])] + WithdrawNonceAccount { lamports: u64 }, + + /// Initialize nonce account (index 6) + /// Data: authority (Pubkey) + #[instruction_decoder(account_names = ["nonce_account", "recent_blockhashes_sysvar", "rent_sysvar"])] + InitializeNonceAccount, + + /// Authorize nonce account (index 7) + /// Data: new_authority (Pubkey) + #[instruction_decoder(account_names = ["nonce_account", "nonce_authority"])] + AuthorizeNonceAccount, + + /// Allocate space for account (index 8) + /// Data: space (u64) + #[instruction_decoder(account_names = ["account"])] + Allocate { space: u64 }, + + /// Allocate space with seed (index 9) + /// Data: base (Pubkey) + seed (String) + space (u64) + owner (Pubkey) + #[instruction_decoder(account_names = ["account", "base_account"])] + AllocateWithSeed { space: u64 }, + + /// Assign account with seed (index 10) + /// Data: base (Pubkey) + seed (String) + owner (Pubkey) + #[instruction_decoder(account_names = ["account", "base_account"])] + AssignWithSeed, + + /// Transfer with seed (index 11) + /// Data: lamports (u64) + from_seed (String) + from_owner (Pubkey) + #[instruction_decoder(account_names = ["funding_account", "base_account", "recipient"])] + TransferWithSeed { lamports: u64 }, + + /// Upgrade nonce account (index 12) + #[instruction_decoder(account_names = ["nonce_account"])] + UpgradeNonceAccount, +} diff --git a/sdk-libs/instruction-decoder/src/programs/token_2022.rs b/sdk-libs/instruction-decoder/src/programs/token_2022.rs new file mode 100644 index 0000000000..95bb20de6f --- /dev/null +++ b/sdk-libs/instruction-decoder/src/programs/token_2022.rs @@ -0,0 +1,209 @@ +//! Token 2022 (Token Extensions) program instruction decoder. +//! +//! This module provides a macro-derived decoder for the Token 2022 program, +//! which uses single-byte discriminators based on variant indices. + +// Allow the macro-generated code to reference types from this crate +extern crate self as light_instruction_decoder; + +use light_instruction_decoder_derive::InstructionDecoder; + +/// Token 2022 program instructions. +/// +/// The Token 2022 program uses a 1-byte discriminator (variant index). +/// Each variant's discriminator is its position in this enum (0, 1, 2, ...). +/// +/// Token 2022 is a superset of SPL Token (indices 0-24 are compatible). +/// Indices 25+ are Token Extensions specific instructions. +/// +/// Note: Complex types (Pubkey, COption, Vec) are not +/// fully parsed; only primitive fields are extracted. +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb", + program_name = "Token 2022", + discriminator_size = 1 +)] +pub enum Token2022Instruction { + // ===== SPL Token compatible instructions (0-24) ===== + /// Initialize a new mint (index 0) + #[instruction_decoder(account_names = ["mint", "rent"])] + InitializeMint { decimals: u8 }, + + /// Initialize a new token account (index 1) + #[instruction_decoder(account_names = ["account", "mint", "owner", "rent"])] + InitializeAccount, + + /// Initialize a multisig account (index 2) + #[instruction_decoder(account_names = ["multisig", "rent"])] + InitializeMultisig { m: u8 }, + + /// Transfer tokens - DEPRECATED, use TransferChecked (index 3) + #[instruction_decoder(account_names = ["source", "destination", "authority"])] + Transfer { amount: u64 }, + + /// Approve a delegate (index 4) + #[instruction_decoder(account_names = ["source", "delegate", "owner"])] + Approve { amount: u64 }, + + /// Revoke delegate authority (index 5) + #[instruction_decoder(account_names = ["source", "owner"])] + Revoke, + + /// Set a new authority (index 6) + #[instruction_decoder(account_names = ["account_or_mint", "current_authority"])] + SetAuthority { authority_type: u8 }, + + /// Mint new tokens (index 7) + #[instruction_decoder(account_names = ["mint", "destination", "authority"])] + MintTo { amount: u64 }, + + /// Burn tokens (index 8) + #[instruction_decoder(account_names = ["source", "mint", "authority"])] + Burn { amount: u64 }, + + /// Close a token account (index 9) + #[instruction_decoder(account_names = ["account", "destination", "authority"])] + CloseAccount, + + /// Freeze a token account (index 10) + #[instruction_decoder(account_names = ["account", "mint", "authority"])] + FreezeAccount, + + /// Thaw a frozen token account (index 11) + #[instruction_decoder(account_names = ["account", "mint", "authority"])] + ThawAccount, + + /// Transfer tokens with decimals check (index 12) + #[instruction_decoder(account_names = ["source", "mint", "destination", "authority"])] + TransferChecked { amount: u64, decimals: u8 }, + + /// Approve delegate with decimals check (index 13) + #[instruction_decoder(account_names = ["source", "mint", "delegate", "owner"])] + ApproveChecked { amount: u64, decimals: u8 }, + + /// Mint tokens with decimals check (index 14) + #[instruction_decoder(account_names = ["mint", "destination", "authority"])] + MintToChecked { amount: u64, decimals: u8 }, + + /// Burn tokens with decimals check (index 15) + #[instruction_decoder(account_names = ["source", "mint", "authority"])] + BurnChecked { amount: u64, decimals: u8 }, + + /// Initialize account with owner in data (index 16) + #[instruction_decoder(account_names = ["account", "mint", "rent"])] + InitializeAccount2, + + /// Sync native SOL balance (index 17) + #[instruction_decoder(account_names = ["account"])] + SyncNative, + + /// Initialize account without rent sysvar (index 18) + #[instruction_decoder(account_names = ["account", "mint"])] + InitializeAccount3, + + /// Initialize multisig without rent sysvar (index 19) + #[instruction_decoder(account_names = ["multisig"])] + InitializeMultisig2 { m: u8 }, + + /// Initialize mint without rent sysvar (index 20) + #[instruction_decoder(account_names = ["mint"])] + InitializeMint2 { decimals: u8 }, + + /// Get required account size (index 21) + #[instruction_decoder(account_names = ["mint"])] + GetAccountDataSize, + + /// Initialize immutable owner extension (index 22) + #[instruction_decoder(account_names = ["account"])] + InitializeImmutableOwner, + + /// Convert amount to UI amount string (index 23) + #[instruction_decoder(account_names = ["mint"])] + AmountToUiAmount { amount: u64 }, + + /// Convert UI amount string to amount (index 24) + #[instruction_decoder(account_names = ["mint"])] + UiAmountToAmount, + + // ===== Token Extensions specific instructions (25+) ===== + /// Initialize mint close authority extension (index 25) + #[instruction_decoder(account_names = ["mint"])] + InitializeMintCloseAuthority, + + /// Transfer fee extension instruction prefix (index 26) + #[instruction_decoder(account_names = ["mint"])] + TransferFeeExtension, + + /// Confidential transfer extension instruction prefix (index 27) + #[instruction_decoder(account_names = ["account"])] + ConfidentialTransferExtension, + + /// Default account state extension instruction prefix (index 28) + #[instruction_decoder(account_names = ["mint"])] + DefaultAccountStateExtension, + + /// Reallocate account for extensions (index 29) + #[instruction_decoder(account_names = ["account", "payer", "system_program"])] + Reallocate, + + /// Memo transfer extension instruction prefix (index 30) + #[instruction_decoder(account_names = ["account", "owner"])] + MemoTransferExtension, + + /// Create the native mint (index 31) + #[instruction_decoder(account_names = ["mint", "funding_account", "system_program"])] + CreateNativeMint, + + /// Initialize non-transferable mint extension (index 32) + #[instruction_decoder(account_names = ["mint"])] + InitializeNonTransferableMint, + + /// Interest bearing mint extension instruction prefix (index 33) + #[instruction_decoder(account_names = ["mint"])] + InterestBearingMintExtension, + + /// CPI guard extension instruction prefix (index 34) + #[instruction_decoder(account_names = ["account", "owner"])] + CpiGuardExtension, + + /// Initialize permanent delegate extension (index 35) + #[instruction_decoder(account_names = ["mint"])] + InitializePermanentDelegate, + + /// Transfer hook extension instruction prefix (index 36) + #[instruction_decoder(account_names = ["mint"])] + TransferHookExtension, + + /// Confidential transfer fee extension instruction prefix (index 37) + #[instruction_decoder(account_names = ["mint"])] + ConfidentialTransferFeeExtension, + + /// Withdraw excess lamports from token account (index 38) + #[instruction_decoder(account_names = ["source", "destination", "authority"])] + WithdrawExcessLamports, + + /// Metadata pointer extension instruction prefix (index 39) + #[instruction_decoder(account_names = ["mint"])] + MetadataPointerExtension, + + /// Group pointer extension instruction prefix (index 40) + #[instruction_decoder(account_names = ["mint"])] + GroupPointerExtension, + + /// Group member pointer extension instruction prefix (index 41) + #[instruction_decoder(account_names = ["mint"])] + GroupMemberPointerExtension, + + /// Confidential mint/burn extension instruction prefix (index 42) + #[instruction_decoder(account_names = ["mint"])] + ConfidentialMintBurnExtension, + + /// Scaled UI amount extension instruction prefix (index 43) + #[instruction_decoder(account_names = ["mint"])] + ScaledUiAmountExtension, + + /// Pausable extension instruction prefix (index 44) + #[instruction_decoder(account_names = ["mint"])] + PausableExtension, +} diff --git a/sdk-libs/instruction-decoder/src/registry.rs b/sdk-libs/instruction-decoder/src/registry.rs new file mode 100644 index 0000000000..f04b411011 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/registry.rs @@ -0,0 +1,96 @@ +//! Instruction decoder registry for Light Protocol and common Solana programs + +use std::collections::HashMap; + +use solana_instruction::AccountMeta; +use solana_pubkey::Pubkey; + +use crate::{DecodedInstruction, InstructionDecoder}; + +// ============================================================================ +// Trait-based Decoder Registry +// ============================================================================ + +/// Registry of instruction decoders +pub struct DecoderRegistry { + decoders: HashMap>, +} + +impl std::fmt::Debug for DecoderRegistry { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("DecoderRegistry") + .field("decoder_count", &self.decoders.len()) + .field("program_ids", &self.decoders.keys().collect::>()) + .finish() + } +} + +impl DecoderRegistry { + /// Create a new registry with built-in decoders + pub fn new() -> Self { + let mut registry = Self { + decoders: HashMap::new(), + }; + + // Register generic Solana program decoders (always available) + registry.register(Box::new(crate::programs::ComputeBudgetInstructionDecoder)); + registry.register(Box::new(crate::programs::SplTokenInstructionDecoder)); + registry.register(Box::new(crate::programs::Token2022InstructionDecoder)); + registry.register(Box::new(crate::programs::SystemInstructionDecoder)); + + // Register Light Protocol decoders (requires light-protocol feature) + #[cfg(feature = "light-protocol")] + { + registry.register(Box::new(crate::programs::LightSystemInstructionDecoder)); + registry.register(Box::new( + crate::programs::AccountCompressionInstructionDecoder, + )); + registry.register(Box::new(crate::programs::CTokenInstructionDecoder)); + registry.register(Box::new(crate::programs::RegistryInstructionDecoder)); + } + + registry + } + + /// Register a custom decoder + pub fn register(&mut self, decoder: Box) { + self.decoders.insert(decoder.program_id(), decoder); + } + + /// Register multiple decoders from a Vec + pub fn register_all(&mut self, decoders: Vec>) { + for decoder in decoders { + self.register(decoder); + } + } + + /// Decode an instruction using registered decoders + pub fn decode( + &self, + program_id: &Pubkey, + data: &[u8], + accounts: &[AccountMeta], + ) -> Option<(DecodedInstruction, &dyn InstructionDecoder)> { + self.decoders.get(program_id).and_then(|decoder| { + decoder + .decode(data, accounts) + .map(|d| (d, decoder.as_ref())) + }) + } + + /// Get a decoder by program ID + pub fn get_decoder(&self, program_id: &Pubkey) -> Option<&dyn InstructionDecoder> { + self.decoders.get(program_id).map(|d| d.as_ref()) + } + + /// Check if a decoder exists for a program ID + pub fn has_decoder(&self, program_id: &Pubkey) -> bool { + self.decoders.contains_key(program_id) + } +} + +impl Default for DecoderRegistry { + fn default() -> Self { + Self::new() + } +} diff --git a/sdk-libs/instruction-decoder/src/types.rs b/sdk-libs/instruction-decoder/src/types.rs new file mode 100644 index 0000000000..423535c8d8 --- /dev/null +++ b/sdk-libs/instruction-decoder/src/types.rs @@ -0,0 +1,224 @@ +//! Type definitions for enhanced logging +//! +//! This module contains all the data types used for instruction decoding +//! and transaction logging. These types are independent of any test framework +//! (LiteSVM, etc.) and can be used in standalone tools. + +use solana_instruction::AccountMeta; +use solana_pubkey::Pubkey; +use solana_signature::Signature; + +use crate::{DecodedInstruction, DecoderRegistry, EnhancedLoggingConfig}; + +/// Enhanced transaction log containing all formatting information +#[derive(Debug, Clone)] +pub struct EnhancedTransactionLog { + pub signature: Signature, + pub slot: u64, + pub status: TransactionStatus, + pub fee: u64, + pub compute_used: u64, + pub compute_total: u64, + pub instructions: Vec, + pub account_changes: Vec, + pub program_logs_pretty: String, + pub light_events: Vec, +} + +impl EnhancedTransactionLog { + /// Create a new empty transaction log with basic info + pub fn new(signature: Signature, slot: u64) -> Self { + Self { + signature, + slot, + status: TransactionStatus::Unknown, + fee: 0, + compute_used: 0, + compute_total: 1_400_000, + instructions: Vec::new(), + account_changes: Vec::new(), + program_logs_pretty: String::new(), + light_events: Vec::new(), + } + } +} + +/// Transaction execution status +#[derive(Debug, Clone)] +pub enum TransactionStatus { + Success, + Failed(String), + Unknown, +} + +impl TransactionStatus { + pub fn text(&self) -> String { + match self { + TransactionStatus::Success => "Success".to_string(), + TransactionStatus::Failed(err) => format!("Failed: {}", err), + TransactionStatus::Unknown => "Unknown".to_string(), + } + } +} + +/// Enhanced instruction log with hierarchy and parsing +#[derive(Debug, Clone)] +pub struct EnhancedInstructionLog { + pub index: usize, + pub program_id: Pubkey, + pub program_name: String, + pub instruction_name: Option, + pub accounts: Vec, + pub data: Vec, + /// Decoded instruction from custom decoder (if available) + pub decoded_instruction: Option, + pub inner_instructions: Vec, + pub compute_consumed: Option, + pub success: bool, + pub depth: usize, +} + +impl EnhancedInstructionLog { + /// Create a new instruction log + pub fn new(index: usize, program_id: Pubkey, program_name: String) -> Self { + Self { + index, + program_id, + program_name, + instruction_name: None, + accounts: Vec::new(), + data: Vec::new(), + decoded_instruction: None, + inner_instructions: Vec::new(), + compute_consumed: None, + success: true, + depth: 0, + } + } + + /// Decode this instruction using the provided config's decoder registry + pub fn decode(&mut self, config: &EnhancedLoggingConfig) { + if !config.decode_light_instructions { + return; + } + + // Try the decoder registry (includes custom decoders) + if let Some(registry) = config.decoder_registry() { + if let Some((decoded, decoder)) = + registry.decode(&self.program_id, &self.data, &self.accounts) + { + self.instruction_name = Some(decoded.name.clone()); + self.decoded_instruction = Some(decoded); + self.program_name = decoder.program_name().to_string(); + } + } + } + + /// Find parent instruction at target depth for nesting + pub fn find_parent_for_instruction( + instructions: &mut [EnhancedInstructionLog], + target_depth: usize, + ) -> Option<&mut EnhancedInstructionLog> { + for instruction in instructions.iter_mut().rev() { + if instruction.depth == target_depth { + return Some(instruction); + } + if let Some(parent) = + Self::find_parent_for_instruction(&mut instruction.inner_instructions, target_depth) + { + return Some(parent); + } + } + None + } +} + +/// Account state changes during transaction +#[derive(Debug, Clone)] +pub struct AccountChange { + pub pubkey: Pubkey, + pub account_type: String, + pub access: AccountAccess, + pub account_index: usize, + pub lamports_before: u64, + pub lamports_after: u64, + pub data_len_before: usize, + pub data_len_after: usize, + pub owner: Pubkey, + pub executable: bool, + pub rent_epoch: u64, +} + +/// Account access pattern during transaction +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AccountAccess { + Readonly, + Writable, + Signer, + SignerWritable, +} + +impl AccountAccess { + pub fn symbol(&self, index: usize) -> String { + format!("#{}", index) + } + + pub fn text(&self) -> &'static str { + match self { + AccountAccess::Readonly => "readonly", + AccountAccess::Writable => "writable", + AccountAccess::Signer => "signer", + AccountAccess::SignerWritable => "signer+writable", + } + } +} + +/// Light Protocol specific events +#[derive(Debug, Clone)] +pub struct LightProtocolEvent { + pub event_type: String, + pub compressed_accounts: Vec, + pub merkle_tree_changes: Vec, + pub nullifiers: Vec, +} + +/// Compressed account information +#[derive(Debug, Clone)] +pub struct CompressedAccountInfo { + pub hash: String, + pub owner: Pubkey, + pub lamports: u64, + pub data: Option>, + pub address: Option, +} + +/// Merkle tree state change +#[derive(Debug, Clone)] +pub struct MerkleTreeChange { + pub tree_pubkey: Pubkey, + pub tree_type: String, + pub sequence_number: u64, + pub leaf_index: u64, +} + +/// Get human-readable program name from pubkey +/// +/// First consults the decoder registry if provided, then falls back to hardcoded mappings. +pub fn get_program_name(program_id: &Pubkey, registry: Option<&DecoderRegistry>) -> String { + // First try to get the name from the decoder registry + if let Some(reg) = registry { + if let Some(decoder) = reg.get_decoder(program_id) { + return decoder.program_name().to_string(); + } + } + + // Fall back to hardcoded mappings for programs without decoders + match program_id.to_string().as_str() { + "11111111111111111111111111111111" => "System Program".to_string(), + "ComputeBudget111111111111111111111111111111" => "Compute Budget".to_string(), + "SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7" => "Light System Program".to_string(), + "compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq" => "Account Compression".to_string(), + "cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m" => "Compressed Token Program".to_string(), + _ => format!("Unknown Program ({})", program_id), + } +} diff --git a/sdk-libs/program-test/Cargo.toml b/sdk-libs/program-test/Cargo.toml index 9a23add219..c672640c05 100644 --- a/sdk-libs/program-test/Cargo.toml +++ b/sdk-libs/program-test/Cargo.toml @@ -53,6 +53,7 @@ solana-sdk = { workspace = true } solana-banks-client = { workspace = true } solana-pubkey = { workspace = true } solana-instruction = { workspace = true } +light-instruction-decoder = { workspace = true } solana-account = { workspace = true } solana-compute-budget = { workspace = true } rand = { workspace = true } diff --git a/sdk-libs/program-test/src/logging/decoder.rs b/sdk-libs/program-test/src/logging/decoder.rs deleted file mode 100644 index 91706c189b..0000000000 --- a/sdk-libs/program-test/src/logging/decoder.rs +++ /dev/null @@ -1,1043 +0,0 @@ -//! Instruction decoder for Light Protocol and common Solana programs - -use borsh::BorshDeserialize; -use light_compressed_account::instruction_data::{ - data::InstructionDataInvoke, invoke_cpi::InstructionDataInvokeCpi, - with_account_info::InstructionDataInvokeCpiWithAccountInfo, - with_readonly::InstructionDataInvokeCpiWithReadOnly, -}; -use solana_sdk::{instruction::AccountMeta, pubkey::Pubkey, system_program}; - -use super::types::ParsedInstructionData; - -/// Helper to resolve merkle tree and queue pubkeys from instruction accounts -/// For InvokeCpi instructions, tree accounts start 2 positions after the system program -fn resolve_tree_and_queue_pubkeys( - accounts: &[AccountMeta], - merkle_tree_index: Option, - nullifier_queue_index: Option, -) -> (Option, Option) { - let mut tree_pubkey = None; - let mut queue_pubkey = None; - - // Find the system program account position - let mut system_program_pos = None; - for (i, account) in accounts.iter().enumerate() { - if account.pubkey == system_program::ID { - system_program_pos = Some(i); - break; - } - } - - if let Some(system_pos) = system_program_pos { - // Tree accounts start 2 positions after system program - let tree_accounts_start = system_pos + 2; - - if let Some(tree_idx) = merkle_tree_index { - let tree_account_pos = tree_accounts_start + tree_idx as usize; - if tree_account_pos < accounts.len() { - tree_pubkey = Some(accounts[tree_account_pos].pubkey); - } - } - - if let Some(queue_idx) = nullifier_queue_index { - let queue_account_pos = tree_accounts_start + queue_idx as usize; - if queue_account_pos < accounts.len() { - queue_pubkey = Some(accounts[queue_account_pos].pubkey); - } - } - } - - (tree_pubkey, queue_pubkey) -} - -/// Decode instruction data for known programs -pub fn decode_instruction( - program_id: &Pubkey, - data: &[u8], - accounts: &[AccountMeta], -) -> Option { - match program_id.to_string().as_str() { - // Light System Program - "SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7" => { - decode_light_system_instruction(data, accounts, program_id) - } - - // Compute Budget Program - "ComputeBudget111111111111111111111111111111" => decode_compute_budget_instruction(data), - - // System Program - id if id == system_program::ID.to_string() => decode_system_instruction(data), - - // Account Compression Program - "compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq" => decode_compression_instruction(data), - - // Compressed Token Program - "cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m" => decode_compressed_token_instruction(data), - - _ => Some(ParsedInstructionData::Unknown { - program_name: get_program_name(program_id), - data_preview: bs58::encode(&data[..data.len().min(16)]).into_string(), - }), - } -} - -/// Decode Light System Program instructions -fn decode_light_system_instruction( - data: &[u8], - accounts: &[AccountMeta], - program_id: &Pubkey, -) -> Option { - if data.is_empty() { - return None; - } - - // Light System Program uses 8-byte discriminators - if data.len() < 8 { - return Some(ParsedInstructionData::LightSystemProgram { - instruction_type: "Invalid".to_string(), - compressed_accounts: None, - proof_info: None, - address_params: None, - fee_info: None, - input_account_data: None, - output_account_data: None, - }); - } - - // Extract the 8-byte discriminator - let discriminator: [u8; 8] = data[0..8].try_into().unwrap(); - - // Light Protocol discriminators from compressed-account/src/discriminators.rs - let ( - instruction_type, - compressed_accounts, - proof_info, - address_params, - fee_info, - input_account_data, - output_account_data, - ) = match discriminator { - [26, 16, 169, 7, 21, 202, 242, 25] => { - // DISCRIMINATOR_INVOKE - match parse_invoke_instruction(&data[8..], accounts) { - Ok(parsed) => parsed, - Err(_) => ( - "Invoke (parse error)".to_string(), - None, - None, - None, - None, - None, - None, - ), - } - } - [49, 212, 191, 129, 39, 194, 43, 196] => { - // DISCRIMINATOR_INVOKE_CPI - match parse_invoke_cpi_instruction(&data[8..], accounts) { - Ok(parsed) => parsed, - Err(_) => ( - "InvokeCpi (parse error)".to_string(), - None, - None, - None, - None, - None, - None, - ), - } - } - [86, 47, 163, 166, 21, 223, 92, 8] => { - // DISCRIMINATOR_INVOKE_CPI_WITH_READ_ONLY - match parse_invoke_cpi_readonly_instruction(&data[8..], accounts) { - Ok(parsed) => parsed, - Err(_) => ( - "InvokeCpiWithReadOnly (parse error)".to_string(), - None, - None, - None, - None, - None, - None, - ), - } - } - [228, 34, 128, 84, 47, 139, 86, 240] => { - // INVOKE_CPI_WITH_ACCOUNT_INFO_INSTRUCTION - match parse_invoke_cpi_account_info_instruction(&data[8..], accounts, program_id) { - Ok(parsed) => parsed, - Err(_) => ( - "InvokeCpiWithAccountInfo (parse error)".to_string(), - None, - None, - None, - None, - None, - None, - ), - } - } - _ => { - // Unknown discriminator - show the discriminator bytes for debugging - let discriminator_str = format!("{:?}", discriminator); - ( - format!("Unknown({})", discriminator_str), - None, - None, - None, - None, - None, - None, - ) - } - }; - - Some(ParsedInstructionData::LightSystemProgram { - instruction_type, - compressed_accounts, - proof_info, - address_params, - fee_info, - input_account_data, - output_account_data, - }) -} - -type InstructionParseResult = Result< - ( - String, - Option, - Option, - Option>, - Option, - Option>, - Option>, - ), - Box, ->; - -/// Parse Invoke instruction data - display data hashes directly -fn parse_invoke_instruction(data: &[u8], accounts: &[AccountMeta]) -> InstructionParseResult { - // Skip the 4-byte vec length prefix that Anchor adds - if data.len() < 4 { - return Err("Instruction data too short for Anchor prefix".into()); - } - let instruction_data = InstructionDataInvoke::try_from_slice(&data[4..])?; - - let compressed_accounts = Some(super::types::CompressedAccountSummary { - input_accounts: instruction_data - .input_compressed_accounts_with_merkle_context - .len(), - output_accounts: instruction_data.output_compressed_accounts.len(), - lamports_change: instruction_data - .compress_or_decompress_lamports - .map(|l| l as i64), - }); - - let proof_info = instruction_data - .proof - .as_ref() - .map(|_| super::types::ProofSummary { - proof_type: "Validity".to_string(), - has_validity_proof: true, - }); - - // Extract actual address parameters with values - let address_params = if !instruction_data.new_address_params.is_empty() { - Some( - instruction_data - .new_address_params - .iter() - .map(|param| { - let tree_idx = Some(param.address_merkle_tree_account_index); - let queue_idx = Some(param.address_queue_account_index); - let (tree_pubkey, queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, queue_idx); - - super::types::AddressParam { - seed: param.seed, - address_queue_index: queue_idx, - address_queue_pubkey: queue_pubkey, - merkle_tree_index: tree_idx, - address_merkle_tree_pubkey: tree_pubkey, - root_index: Some(param.address_merkle_tree_root_index), - derived_address: None, - assigned_account_index: super::types::AddressAssignment::V1, - } - }) - .collect(), - ) - } else { - None - }; - - // Extract input account data - let input_account_data = if !instruction_data - .input_compressed_accounts_with_merkle_context - .is_empty() - { - Some( - instruction_data - .input_compressed_accounts_with_merkle_context - .iter() - .map(|acc| { - let tree_idx = Some(acc.merkle_context.merkle_tree_pubkey_index); - let queue_idx = Some(acc.merkle_context.queue_pubkey_index); - let (tree_pubkey, queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, queue_idx); - - super::types::InputAccountData { - lamports: acc.compressed_account.lamports, - owner: Some(acc.compressed_account.owner.into()), - merkle_tree_index: tree_idx, - merkle_tree_pubkey: tree_pubkey, - queue_index: queue_idx, - queue_pubkey, - address: acc.compressed_account.address, - data_hash: if let Some(ref data) = acc.compressed_account.data { - data.data_hash.to_vec() - } else { - vec![] - }, - discriminator: if let Some(ref data) = acc.compressed_account.data { - data.discriminator.to_vec() - } else { - vec![] - }, - leaf_index: Some(acc.merkle_context.leaf_index), - root_index: Some(acc.root_index), - } - }) - .collect(), - ) - } else { - None - }; - - // Extract output account data - let output_account_data = if !instruction_data.output_compressed_accounts.is_empty() { - Some( - instruction_data - .output_compressed_accounts - .iter() - .map(|acc| { - let tree_idx = Some(acc.merkle_tree_index); - let (tree_pubkey, _queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, None); - - super::types::OutputAccountData { - lamports: acc.compressed_account.lamports, - data: acc.compressed_account.data.as_ref().map(|d| d.data.clone()), - owner: Some(acc.compressed_account.owner.into()), - merkle_tree_index: tree_idx, - merkle_tree_pubkey: tree_pubkey, - queue_index: None, - queue_pubkey: None, - address: acc.compressed_account.address, - data_hash: if let Some(ref data) = acc.compressed_account.data { - data.data_hash.to_vec() - } else { - vec![] - }, - discriminator: if let Some(ref data) = acc.compressed_account.data { - data.discriminator.to_vec() - } else { - vec![] - }, - } - }) - .collect(), - ) - } else { - None - }; - - let fee_info = instruction_data - .relay_fee - .map(|fee| super::types::FeeSummary { - relay_fee: Some(fee), - compression_fee: None, - }); - - Ok(( - "Invoke".to_string(), - compressed_accounts, - proof_info, - address_params, - fee_info, - input_account_data, - output_account_data, - )) -} - -/// Parse InvokeCpi instruction data - display data hashes directly -fn parse_invoke_cpi_instruction(data: &[u8], accounts: &[AccountMeta]) -> InstructionParseResult { - // Skip the 4-byte vec length prefix that Anchor adds - if data.len() < 4 { - return Err("Instruction data too short for Anchor prefix".into()); - } - let instruction_data = InstructionDataInvokeCpi::try_from_slice(&data[4..])?; - - let compressed_accounts = Some(super::types::CompressedAccountSummary { - input_accounts: instruction_data - .input_compressed_accounts_with_merkle_context - .len(), - output_accounts: instruction_data.output_compressed_accounts.len(), - lamports_change: instruction_data - .compress_or_decompress_lamports - .map(|l| l as i64), - }); - - let proof_info = instruction_data - .proof - .as_ref() - .map(|_| super::types::ProofSummary { - proof_type: "Validity".to_string(), - has_validity_proof: true, - }); - - // Extract actual address parameters with values - let address_params = if !instruction_data.new_address_params.is_empty() { - Some( - instruction_data - .new_address_params - .iter() - .map(|param| { - let tree_idx = Some(param.address_merkle_tree_account_index); - let queue_idx = Some(param.address_queue_account_index); - let (tree_pubkey, queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, queue_idx); - - super::types::AddressParam { - seed: param.seed, - address_queue_index: queue_idx, - address_queue_pubkey: queue_pubkey, - merkle_tree_index: tree_idx, - address_merkle_tree_pubkey: tree_pubkey, - root_index: Some(param.address_merkle_tree_root_index), - derived_address: None, - assigned_account_index: super::types::AddressAssignment::V1, - } - }) - .collect(), - ) - } else { - None - }; - - // Extract input account data - let input_account_data = if !instruction_data - .input_compressed_accounts_with_merkle_context - .is_empty() - { - Some( - instruction_data - .input_compressed_accounts_with_merkle_context - .iter() - .map(|acc| { - let tree_idx = Some(acc.merkle_context.merkle_tree_pubkey_index); - let queue_idx = Some(acc.merkle_context.queue_pubkey_index); - let (tree_pubkey, queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, queue_idx); - - super::types::InputAccountData { - lamports: acc.compressed_account.lamports, - owner: Some(acc.compressed_account.owner.into()), - merkle_tree_index: tree_idx, - merkle_tree_pubkey: tree_pubkey, - queue_index: queue_idx, - queue_pubkey, - address: acc.compressed_account.address, - data_hash: if let Some(ref data) = acc.compressed_account.data { - data.data_hash.to_vec() - } else { - vec![] - }, - discriminator: if let Some(ref data) = acc.compressed_account.data { - data.discriminator.to_vec() - } else { - vec![] - }, - leaf_index: Some(acc.merkle_context.leaf_index), - root_index: Some(acc.root_index), - } - }) - .collect(), - ) - } else { - None - }; - - // Extract output account data - let output_account_data = if !instruction_data.output_compressed_accounts.is_empty() { - Some( - instruction_data - .output_compressed_accounts - .iter() - .map(|acc| { - let tree_idx = Some(acc.merkle_tree_index); - let (tree_pubkey, _queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, None); - - super::types::OutputAccountData { - lamports: acc.compressed_account.lamports, - data: acc.compressed_account.data.as_ref().map(|d| d.data.clone()), - owner: Some(acc.compressed_account.owner.into()), - merkle_tree_index: tree_idx, - merkle_tree_pubkey: tree_pubkey, - queue_index: None, - queue_pubkey: None, - address: acc.compressed_account.address, - data_hash: if let Some(ref data) = acc.compressed_account.data { - data.data_hash.to_vec() - } else { - vec![] - }, - discriminator: if let Some(ref data) = acc.compressed_account.data { - data.discriminator.to_vec() - } else { - vec![] - }, - } - }) - .collect(), - ) - } else { - None - }; - - let fee_info = instruction_data - .relay_fee - .map(|fee| super::types::FeeSummary { - relay_fee: Some(fee), - compression_fee: None, - }); - - Ok(( - "InvokeCpi".to_string(), - compressed_accounts, - proof_info, - address_params, - fee_info, - input_account_data, - output_account_data, - )) -} - -/// Parse InvokeCpiWithReadOnly instruction data - display data hashes directly -fn parse_invoke_cpi_readonly_instruction( - data: &[u8], - accounts: &[AccountMeta], -) -> InstructionParseResult { - let instruction_data = InstructionDataInvokeCpiWithReadOnly::try_from_slice(data)?; - - let compressed_accounts = Some(super::types::CompressedAccountSummary { - input_accounts: instruction_data.input_compressed_accounts.len(), - output_accounts: instruction_data.output_compressed_accounts.len(), - lamports_change: if instruction_data.compress_or_decompress_lamports > 0 { - Some(instruction_data.compress_or_decompress_lamports as i64) - } else { - None - }, - }); - - let proof_info = Some(super::types::ProofSummary { - proof_type: "Validity".to_string(), - has_validity_proof: true, - }); - - // Extract actual address parameters with values - let mut address_params = Vec::new(); - - // Add new address parameters with actual values - for param in &instruction_data.new_address_params { - let tree_idx = Some(param.address_merkle_tree_account_index); - let queue_idx = Some(param.address_queue_account_index); - let (tree_pubkey, queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, queue_idx); - - address_params.push(super::types::AddressParam { - seed: param.seed, - address_queue_index: queue_idx, - address_queue_pubkey: queue_pubkey, - merkle_tree_index: tree_idx, - address_merkle_tree_pubkey: tree_pubkey, - root_index: Some(param.address_merkle_tree_root_index), - derived_address: None, - assigned_account_index: if param.assigned_to_account { - super::types::AddressAssignment::AssignedIndex(param.assigned_account_index) - } else { - super::types::AddressAssignment::None - }, - }); - } - - // Add readonly address parameters - for readonly_addr in &instruction_data.read_only_addresses { - let tree_idx = Some(readonly_addr.address_merkle_tree_account_index); - let (tree_pubkey, _queue_pubkey) = resolve_tree_and_queue_pubkeys(accounts, tree_idx, None); - - address_params.push(super::types::AddressParam { - seed: [0; 32], // ReadOnly addresses don't have seeds in the same way - address_queue_index: None, - address_queue_pubkey: None, - merkle_tree_index: tree_idx, - address_merkle_tree_pubkey: tree_pubkey, - root_index: Some(readonly_addr.address_merkle_tree_root_index), - derived_address: Some(readonly_addr.address), - assigned_account_index: super::types::AddressAssignment::None, - }); - } - - let address_params = if !address_params.is_empty() { - Some(address_params) - } else { - None - }; - - // Extract input account data - use data_hash from InAccount - let input_account_data = if !instruction_data.input_compressed_accounts.is_empty() { - Some( - instruction_data - .input_compressed_accounts - .iter() - .map(|acc| { - let tree_idx = Some(acc.merkle_context.merkle_tree_pubkey_index); - let queue_idx = Some(acc.merkle_context.queue_pubkey_index); - let (tree_pubkey, queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, queue_idx); - - super::types::InputAccountData { - lamports: acc.lamports, - owner: Some(instruction_data.invoking_program_id.into()), // Use invoking program as owner - merkle_tree_index: tree_idx, - merkle_tree_pubkey: tree_pubkey, - queue_index: queue_idx, - queue_pubkey, - address: acc.address, - data_hash: acc.data_hash.to_vec(), - discriminator: acc.discriminator.to_vec(), - leaf_index: Some(acc.merkle_context.leaf_index), - root_index: Some(acc.root_index), - } - }) - .collect(), - ) - } else { - None - }; - - // Extract output account data - let output_account_data = if !instruction_data.output_compressed_accounts.is_empty() { - Some( - instruction_data - .output_compressed_accounts - .iter() - .map(|acc| { - let tree_idx = Some(acc.merkle_tree_index); - let (tree_pubkey, _queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, None); - - super::types::OutputAccountData { - lamports: acc.compressed_account.lamports, - data: acc.compressed_account.data.as_ref().map(|d| d.data.clone()), - owner: Some(instruction_data.invoking_program_id.into()), // Use invoking program as owner for consistency - merkle_tree_index: tree_idx, - merkle_tree_pubkey: tree_pubkey, - queue_index: None, - queue_pubkey: None, - address: acc.compressed_account.address, - data_hash: if let Some(ref data) = acc.compressed_account.data { - data.data_hash.to_vec() - } else { - vec![] - }, - discriminator: if let Some(ref data) = acc.compressed_account.data { - data.discriminator.to_vec() - } else { - vec![] - }, - } - }) - .collect(), - ) - } else { - None - }; - - Ok(( - "InvokeCpiWithReadOnly".to_string(), - compressed_accounts, - proof_info, - address_params, - None, - input_account_data, - output_account_data, - )) -} - -/// Parse InvokeCpiWithAccountInfo instruction data - display data hashes directly -fn parse_invoke_cpi_account_info_instruction( - data: &[u8], - accounts: &[AccountMeta], - program_id: &Pubkey, -) -> InstructionParseResult { - let instruction_data = InstructionDataInvokeCpiWithAccountInfo::try_from_slice(data)?; - - let input_accounts = instruction_data - .account_infos - .iter() - .filter(|a| a.input.is_some()) - .count(); - let output_accounts = instruction_data - .account_infos - .iter() - .filter(|a| a.output.is_some()) - .count(); - - let compressed_accounts = Some(super::types::CompressedAccountSummary { - input_accounts, - output_accounts, - lamports_change: if instruction_data.compress_or_decompress_lamports > 0 { - Some(instruction_data.compress_or_decompress_lamports as i64) - } else { - None - }, - }); - - let proof_info = Some(super::types::ProofSummary { - proof_type: "Validity".to_string(), - has_validity_proof: true, - }); - - // Extract actual address parameters with values - let mut address_params = Vec::new(); - - // Add new address parameters with actual values - for param in &instruction_data.new_address_params { - let tree_idx = Some(param.address_merkle_tree_account_index); - let queue_idx = Some(param.address_queue_account_index); - let (tree_pubkey, queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, queue_idx); - - address_params.push(super::types::AddressParam { - seed: param.seed, - address_queue_index: queue_idx, - address_queue_pubkey: queue_pubkey, - merkle_tree_index: tree_idx, - address_merkle_tree_pubkey: tree_pubkey, - root_index: Some(param.address_merkle_tree_root_index), - derived_address: None, - assigned_account_index: if param.assigned_to_account { - super::types::AddressAssignment::AssignedIndex(param.assigned_account_index) - } else { - super::types::AddressAssignment::None - }, - }); - } - - // Add readonly address parameters - for readonly_addr in &instruction_data.read_only_addresses { - let tree_idx = Some(readonly_addr.address_merkle_tree_account_index); - let (tree_pubkey, _queue_pubkey) = resolve_tree_and_queue_pubkeys(accounts, tree_idx, None); - - address_params.push(super::types::AddressParam { - seed: [0; 32], // ReadOnly addresses don't have seeds in the same way - address_queue_index: None, - address_queue_pubkey: None, - merkle_tree_index: tree_idx, - address_merkle_tree_pubkey: tree_pubkey, - root_index: Some(readonly_addr.address_merkle_tree_root_index), - derived_address: Some(readonly_addr.address), - assigned_account_index: super::types::AddressAssignment::None, - }); - } - - let address_params = if !address_params.is_empty() { - Some(address_params) - } else { - None - }; - - // Extract input account data from account_infos - let input_account_data = { - let mut input_data = Vec::new(); - for account_info in &instruction_data.account_infos { - if let Some(ref input) = account_info.input { - input_data.push(super::types::InputAccountData { - lamports: input.lamports, - owner: Some(*program_id), // Use invoking program as owner - merkle_tree_index: None, // Note: merkle tree context not available in CompressedAccountInfo - merkle_tree_pubkey: None, - queue_index: None, - queue_pubkey: None, - address: account_info.address, // Use address from CompressedAccountInfo - data_hash: input.data_hash.to_vec(), - discriminator: input.discriminator.to_vec(), - leaf_index: Some(input.merkle_context.leaf_index), - root_index: Some(input.root_index), - }); - } - } - if !input_data.is_empty() { - Some(input_data) - } else { - None - } - }; - - // Extract output account data from account_infos - let output_account_data = { - let mut output_data = Vec::new(); - for account_info in &instruction_data.account_infos { - if let Some(ref output) = account_info.output { - let tree_idx = Some(output.output_merkle_tree_index); - let (tree_pubkey, _queue_pubkey) = - resolve_tree_and_queue_pubkeys(accounts, tree_idx, None); - - output_data.push(super::types::OutputAccountData { - lamports: output.lamports, - data: if !output.data.is_empty() { - Some(output.data.clone()) - } else { - None - }, - owner: Some(*program_id), // Use invoking program as owner - merkle_tree_index: tree_idx, - merkle_tree_pubkey: tree_pubkey, - queue_index: None, - queue_pubkey: None, - address: account_info.address, // Use address from CompressedAccountInfo - data_hash: output.data_hash.to_vec(), - discriminator: output.discriminator.to_vec(), - }); - } - } - if !output_data.is_empty() { - Some(output_data) - } else { - None - } - }; - - Ok(( - "InvokeCpiWithAccountInfo".to_string(), - compressed_accounts, - proof_info, - address_params, - None, - input_account_data, - output_account_data, - )) -} - -/// Decode Compute Budget Program instructions -fn decode_compute_budget_instruction(data: &[u8]) -> Option { - if data.len() < 4 { - return None; - } - - let instruction_discriminator = u32::from_le_bytes([data[0], data[1], data[2], data[3]]); - - match instruction_discriminator { - 0 => { - // RequestUnitsDeprecated - if data.len() >= 12 { - let units = u32::from_le_bytes([data[4], data[5], data[6], data[7]]) as u64; - let _additional_fee = - u32::from_le_bytes([data[8], data[9], data[10], data[11]]) as u64; - Some(ParsedInstructionData::ComputeBudget { - instruction_type: "RequestUnitsDeprecated".to_string(), - value: Some(units), - }) - } else { - None - } - } - 1 => { - // RequestHeapFrame - if data.len() >= 8 { - let bytes = u32::from_le_bytes([data[4], data[5], data[6], data[7]]) as u64; - Some(ParsedInstructionData::ComputeBudget { - instruction_type: "RequestHeapFrame".to_string(), - value: Some(bytes), - }) - } else { - None - } - } - 2 => { - // SetComputeUnitLimit - if data.len() >= 8 { - let units = u32::from_le_bytes([data[4], data[5], data[6], data[7]]) as u64; - Some(ParsedInstructionData::ComputeBudget { - instruction_type: "SetComputeUnitLimit".to_string(), - value: Some(units), - }) - } else { - None - } - } - 3 => { - // SetComputeUnitPrice - if data.len() >= 12 { - let price = u64::from_le_bytes([ - data[4], data[5], data[6], data[7], data[8], data[9], data[10], data[11], - ]); - Some(ParsedInstructionData::ComputeBudget { - instruction_type: "SetComputeUnitPrice".to_string(), - value: Some(price), - }) - } else { - None - } - } - _ => Some(ParsedInstructionData::ComputeBudget { - instruction_type: "Unknown".to_string(), - value: None, - }), - } -} - -/// Decode System Program instructions -fn decode_system_instruction(data: &[u8]) -> Option { - if data.len() < 4 { - return None; - } - - let instruction_type = u32::from_le_bytes([data[0], data[1], data[2], data[3]]); - - match instruction_type { - 0 => { - // CreateAccount - if data.len() >= 52 { - let lamports = u64::from_le_bytes([ - data[4], data[5], data[6], data[7], data[8], data[9], data[10], data[11], - ]); - let space = u64::from_le_bytes([ - data[12], data[13], data[14], data[15], data[16], data[17], data[18], data[19], - ]); - - Some(ParsedInstructionData::System { - instruction_type: "CreateAccount".to_string(), - lamports: Some(lamports), - space: Some(space), - new_account: None, - }) - } else { - None - } - } - 2 => { - // Transfer - if data.len() >= 12 { - let lamports = u64::from_le_bytes([ - data[4], data[5], data[6], data[7], data[8], data[9], data[10], data[11], - ]); - - Some(ParsedInstructionData::System { - instruction_type: "Transfer".to_string(), - lamports: Some(lamports), - space: None, - new_account: None, - }) - } else { - None - } - } - 8 => { - // Allocate - if data.len() >= 12 { - let space = u64::from_le_bytes([ - data[4], data[5], data[6], data[7], data[8], data[9], data[10], data[11], - ]); - - Some(ParsedInstructionData::System { - instruction_type: "Allocate".to_string(), - lamports: None, - space: Some(space), - new_account: None, - }) - } else { - None - } - } - _ => Some(ParsedInstructionData::System { - instruction_type: "Unknown".to_string(), - lamports: None, - space: None, - new_account: None, - }), - } -} - -/// Decode Account Compression Program instructions -fn decode_compression_instruction(data: &[u8]) -> Option { - // Return basic instruction info for account compression - let instruction_name = if data.len() >= 8 { - // Common account compression operations - "InsertIntoQueues" - } else { - "Unknown" - }; - - Some(ParsedInstructionData::Unknown { - program_name: "Account Compression".to_string(), - data_preview: format!("{}({}bytes)", instruction_name, data.len()), - }) -} - -/// Decode Compressed Token Program instructions -fn decode_compressed_token_instruction(data: &[u8]) -> Option { - // Return basic instruction info for compressed token operations - let instruction_name = if data.len() >= 8 { - // Common compressed token operations - "TokenOperation" - } else { - "Unknown" - }; - - Some(ParsedInstructionData::Unknown { - program_name: "Compressed Token".to_string(), - data_preview: format!("{}({}bytes)", instruction_name, data.len()), - }) -} - -/// Get human-readable program name -fn get_program_name(program_id: &Pubkey) -> String { - match program_id.to_string().as_str() { - id if id == system_program::ID.to_string() => "System Program".to_string(), - "ComputeBudget111111111111111111111111111111" => "Compute Budget".to_string(), - "SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7" => "Light System Program".to_string(), - "compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq" => "Account Compression".to_string(), - "FNt7byTHev1k5x2cXZLBr8TdWiC3zoP5vcnZR4P682Uy" => "Test Program".to_string(), - _ => { - let pubkey_str = program_id.to_string(); - format!("Program {}", &pubkey_str[..8]) - } - } -} - -/// Extract Light Protocol events from transaction logs and metadata -pub fn extract_light_events( - logs: &[String], - _events: &Option>, // Light Protocol events for future enhancement -) -> Vec { - let mut light_events = Vec::new(); - - // Parse events from logs - for log in logs { - if log.contains("PublicTransactionEvent") || log.contains("BatchPublicTransactionEvent") { - // Parse Light Protocol events from logs - light_events.push(super::types::LightProtocolEvent { - event_type: "PublicTransactionEvent".to_string(), - compressed_accounts: Vec::new(), - merkle_tree_changes: Vec::new(), - nullifiers: Vec::new(), - }); - } - } - - light_events -} diff --git a/sdk-libs/program-test/src/logging/formatter.rs b/sdk-libs/program-test/src/logging/formatter.rs deleted file mode 100644 index 3ad41c612d..0000000000 --- a/sdk-libs/program-test/src/logging/formatter.rs +++ /dev/null @@ -1,1288 +0,0 @@ -//! Transaction formatting utilities for explorer-style output - -use std::fmt::{self, Write}; - -use solana_sdk::system_program; -use tabled::{Table, Tabled}; - -use super::{ - config::{EnhancedLoggingConfig, LogVerbosity}, - types::{ - AccountAccess, AccountChange, EnhancedInstructionLog, EnhancedTransactionLog, - TransactionStatus, - }, -}; - -/// Row for account table display -#[derive(Tabled)] -struct AccountRow { - #[tabled(rename = "#")] - symbol: String, - #[tabled(rename = "Account")] - pubkey: String, - #[tabled(rename = "Type")] - access: String, - #[tabled(rename = "Name")] - name: String, -} - -/// Colors for terminal output -#[derive(Debug, Clone)] -pub struct Colors { - pub bold: String, - pub reset: String, - pub green: String, - pub red: String, - pub yellow: String, - pub blue: String, - pub cyan: String, - pub gray: String, -} - -impl Colors { - pub fn new(use_colors: bool) -> Self { - if use_colors { - Self { - bold: "\x1b[1m".to_string(), - reset: "\x1b[0m".to_string(), - green: "\x1b[32m".to_string(), - red: "\x1b[31m".to_string(), - yellow: "\x1b[33m".to_string(), - blue: "\x1b[34m".to_string(), - cyan: "\x1b[36m".to_string(), - gray: "\x1b[90m".to_string(), - } - } else { - Self { - bold: String::new(), - reset: String::new(), - green: String::new(), - red: String::new(), - yellow: String::new(), - blue: String::new(), - cyan: String::new(), - gray: String::new(), - } - } - } -} - -/// Transaction formatter with configurable output -pub struct TransactionFormatter { - config: EnhancedLoggingConfig, - colors: Colors, -} - -impl TransactionFormatter { - pub fn new(config: &EnhancedLoggingConfig) -> Self { - Self { - config: config.clone(), - colors: Colors::new(config.use_colors), - } - } - - /// Apply line breaks to long values in the complete output - fn apply_line_breaks(&self, text: &str) -> String { - let mut result = String::new(); - - for line in text.lines() { - // Look for patterns that need line breaking - if let Some(formatted_line) = self.format_line_if_needed(line) { - result.push_str(&formatted_line); - } else { - result.push_str(line); - } - result.push('\n'); - } - - result - } - - /// Format a line if it contains long values that need breaking - fn format_line_if_needed(&self, line: &str) -> Option { - // Extract leading whitespace/indentation and table characters - let leading_chars = line - .chars() - .take_while(|&c| c.is_whitespace() || "│├└┌┬┴┐┤─".contains(c)) - .collect::(); - - // Match patterns like "address: [0, 1, 2, 3, ...]" or "Raw instruction data (N bytes): [...]" - if line.contains(": [") && line.contains("]") { - // Handle byte arrays - if let Some(start) = line.find(": [") { - if let Some(end_pos) = line[start..].find(']') { - let end = start + end_pos; - let prefix = &line[..start + 2]; // Include ": " - let array_part = &line[start + 2..end + 1]; // The "[...]" part - let suffix = &line[end + 1..]; - - // For raw instruction data, use a shorter line length to better fit in terminal - let max_width = if line.contains("Raw instruction data") { - 80 // Wider for raw instruction data to fit more numbers per line - } else { - 50 // Keep existing width for other arrays - }; - - // Always format if it's raw instruction data or if it exceeds max_width - if line.contains("Raw instruction data") || array_part.len() > max_width { - let formatted_array = self.format_long_value_with_indent( - array_part, - max_width, - &leading_chars, - ); - return Some(format!("{}{}{}", prefix, formatted_array, suffix)); - } - } - } - } - - // Handle long base58 strings (44+ characters) in table cells - if line.contains('|') && !line.trim_start().starts_with('|') { - // This is a table content line, not a border - let mut new_line = String::new(); - let mut modified = false; - - // Split by table separators while preserving them - let parts: Vec<&str> = line.split('|').collect(); - for (i, part) in parts.iter().enumerate() { - if i > 0 { - new_line.push('|'); - } - - // Check if this cell contains a long value - for word in part.split_whitespace() { - if word.len() > 44 && word.chars().all(|c| c.is_alphanumeric()) { - let indent = " ".repeat(leading_chars.len() + 2); // Extra space for table formatting - let formatted_word = self.format_long_value_with_indent(word, 44, &indent); - new_line.push_str(&part.replace(word, &formatted_word)); - modified = true; - break; - } - } - - if !modified { - new_line.push_str(part); - } - } - - if modified { - return Some(new_line); - } - } - - None - } - - /// Format long value with proper indentation for continuation lines - fn format_long_value_with_indent(&self, value: &str, max_width: usize, indent: &str) -> String { - if value.len() <= max_width { - return value.to_string(); - } - - let mut result = String::new(); - - // Handle byte arrays specially by breaking at natural comma boundaries when possible - if value.starts_with('[') && value.ends_with(']') { - // This is a byte array - try to break at comma boundaries for better readability - let inner = &value[1..value.len() - 1]; // Remove [ and ] - let parts: Vec<&str> = inner.split(", ").collect(); - - result.push('['); - let mut current_line = String::new(); - let mut first_line = true; - - for (i, part) in parts.iter().enumerate() { - let addition = if i == 0 { - part.to_string() - } else { - format!(", {}", part) - }; - - // Check if adding this part would exceed the line width - if current_line.len() + addition.len() > max_width && !current_line.is_empty() { - // Add current line to result and start new line - if first_line { - result.push_str(¤t_line); - first_line = false; - } else { - result.push_str(&format!("\n{}{}", indent, current_line)); - } - current_line = part.to_string(); - } else { - current_line.push_str(&addition); - } - } - - // Add the last line - if !current_line.is_empty() { - if first_line { - result.push_str(¤t_line); - } else { - result.push_str(&format!("\n{}{}", indent, current_line)); - } - } - - result.push(']'); - } else { - // Fall back to character-based breaking for non-array values - let chars = value.chars().collect::>(); - let mut pos = 0; - - while pos < chars.len() { - let end = (pos + max_width).min(chars.len()); - let chunk: String = chars[pos..end].iter().collect(); - - if pos == 0 { - result.push_str(&chunk); - } else { - result.push_str(&format!("\n{}{}", indent, chunk)); - } - - pos = end; - } - } - - result - } - - /// Format complete transaction log - pub fn format(&self, log: &EnhancedTransactionLog, tx_number: usize) -> String { - let mut output = String::new(); - - // Transaction box header with number - writeln!(output, "{}┌───────────────────────────────────────── Transaction #{} ─────────────────────────────────────────────┐{}", self.colors.gray, tx_number, self.colors.reset).expect("Failed to write box header"); - - // Transaction header - self.write_transaction_header(&mut output, log) - .expect("Failed to write header"); - - // Instructions section - if !log.instructions.is_empty() { - self.write_instructions_section(&mut output, log) - .expect("Failed to write instructions"); - } - - // Account changes section - if self.config.show_account_changes && !log.account_changes.is_empty() { - self.write_account_changes_section(&mut output, log) - .expect("Failed to write account changes"); - } - - // Light Protocol events section - if !log.light_events.is_empty() { - self.write_light_events_section(&mut output, log) - .expect("Failed to write Light Protocol events"); - } - - // Program logs section (LiteSVM pretty logs) - if !log.program_logs_pretty.trim().is_empty() { - self.write_program_logs_section(&mut output, log) - .expect("Failed to write program logs"); - } - - // Transaction box footer - writeln!(output, "{}└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘{}", self.colors.gray, self.colors.reset).expect("Failed to write box footer"); - - // Apply line breaks for long values in the complete output - self.apply_line_breaks(&output) - } - - /// Write transaction header with status, fee, and compute units - fn write_transaction_header( - &self, - output: &mut String, - log: &EnhancedTransactionLog, - ) -> fmt::Result { - writeln!( - output, - "{}│{} {}Transaction: {}{} | Slot: {} | Status: {}{}", - self.colors.gray, - self.colors.reset, - self.colors.bold, - self.colors.cyan, - log.signature, - log.slot, - self.status_color(&log.status), - log.status.text(), - )?; - - writeln!( - output, - "{}│{} Fee: {}{:.6} SOL | Compute Used: {}{}/{} CU{}", - self.colors.gray, - self.colors.reset, - self.colors.yellow, - log.fee as f64 / 1_000_000_000.0, - self.colors.blue, - log.compute_used, - log.compute_total, - self.colors.reset - )?; - - writeln!(output, "{}│{}", self.colors.gray, self.colors.reset)?; - Ok(()) - } - - /// Write instructions hierarchy - fn write_instructions_section( - &self, - output: &mut String, - log: &EnhancedTransactionLog, - ) -> fmt::Result { - writeln!( - output, - "{}│{} {}Instructions ({}):{}", - self.colors.gray, - self.colors.reset, - self.colors.bold, - log.instructions.len(), - self.colors.reset - )?; - writeln!(output, "{}│{}", self.colors.gray, self.colors.reset)?; - - for (i, instruction) in log.instructions.iter().enumerate() { - self.write_instruction(output, instruction, 0, i + 1)?; - } - - Ok(()) - } - - /// Write single instruction with proper indentation and hierarchy - fn write_instruction( - &self, - output: &mut String, - instruction: &EnhancedInstructionLog, - depth: usize, - number: usize, - ) -> fmt::Result { - let indent = self.get_tree_indent(depth); - let prefix = if depth == 0 { "├─" } else { "└─" }; - - // Instruction header - let inner_count = if instruction.inner_instructions.is_empty() { - String::new() - } else { - format!(".{}", instruction.inner_instructions.len()) - }; - - write!( - output, - "{}{} {}#{}{} {}{} ({}{}{})", - indent, - prefix, - self.colors.bold, - number, - inner_count, - self.colors.blue, - instruction.program_id, - self.colors.cyan, - instruction.program_name, - self.colors.reset - )?; - - // Add instruction name if parsed - if let Some(ref name) = instruction.instruction_name { - write!( - output, - " - {}{}{}", - self.colors.yellow, name, self.colors.reset - )?; - } - - // Add compute units if available and requested - if self.config.show_compute_units { - if let Some(compute) = instruction.compute_consumed { - write!( - output, - " {}({}{}CU{})", - self.colors.gray, self.colors.blue, compute, self.colors.gray - )?; - } - } - - writeln!(output, "{}", self.colors.reset)?; - - // Show instruction details based on verbosity - match self.config.verbosity { - LogVerbosity::Detailed | LogVerbosity::Full => { - if let Some(ref parsed) = instruction.parsed_data { - self.write_parsed_instruction_data( - output, - parsed, - &instruction.data, - depth + 1, - )?; - } else if !instruction.data.is_empty() { - // Show raw instruction data for unparseable instructions with chunking - // Skip instruction data for account compression program unless explicitly configured - let should_show_data = if instruction.program_name == "Account Compression" { - self.config.show_compression_instruction_data - } else { - true - }; - - if should_show_data { - let indent = self.get_tree_indent(depth + 1); - writeln!( - output, - "{}{}Raw instruction data ({} bytes): {}[", - indent, - self.colors.gray, - instruction.data.len(), - self.colors.cyan - )?; - - // Chunk the data into 32-byte groups for better readability - for (i, chunk) in instruction.data.chunks(32).enumerate() { - write!(output, "{} ", indent)?; - for (j, byte) in chunk.iter().enumerate() { - if j > 0 { - write!(output, ", ")?; - } - write!(output, "{}", byte)?; - } - if i < instruction.data.chunks(32).len() - 1 { - writeln!(output, ",")?; - } else { - writeln!(output, "]{}", self.colors.reset)?; - } - } - } - } - } - _ => {} - } - - // Show accounts if verbose - if self.config.verbosity == LogVerbosity::Full && !instruction.accounts.is_empty() { - let accounts_indent = self.get_tree_indent(depth + 1); - writeln!( - output, - "{}{}Accounts ({}):{}", - accounts_indent, - self.colors.gray, - instruction.accounts.len(), - self.colors.reset - )?; - - // Create a table for better account formatting - let mut account_rows: Vec = Vec::new(); - - for (idx, account) in instruction.accounts.iter().enumerate() { - let access = if account.is_signer && account.is_writable { - AccountAccess::SignerWritable - } else if account.is_signer { - AccountAccess::Signer - } else if account.is_writable { - AccountAccess::Writable - } else { - AccountAccess::Readonly - }; - - let account_name = self.get_account_name(&account.pubkey); - account_rows.push(AccountRow { - symbol: access.symbol(idx + 1), - pubkey: account.pubkey.to_string(), - access: access.text().to_string(), - name: account_name, - }); - } - - if !account_rows.is_empty() { - let table = Table::new(account_rows) - .to_string() - .lines() - .map(|line| format!("{}{}", accounts_indent, line)) - .collect::>() - .join("\n"); - writeln!(output, "{}", table)?; - } - } - - // Write inner instructions recursively - for (i, inner) in instruction.inner_instructions.iter().enumerate() { - if depth < self.config.max_inner_instruction_depth { - self.write_instruction(output, inner, depth + 1, i + 1)?; - } - } - - Ok(()) - } - - /// Write parsed instruction data - fn write_parsed_instruction_data( - &self, - output: &mut String, - parsed: &super::types::ParsedInstructionData, - instruction_data: &[u8], - depth: usize, - ) -> fmt::Result { - let indent = self.get_tree_indent(depth); - - match parsed { - super::types::ParsedInstructionData::LightSystemProgram { - instruction_type, - compressed_accounts, - proof_info, - address_params, - fee_info, - input_account_data, - output_account_data, - } => { - writeln!( - output, - "{}{}Light System: {}{}{}", - indent, - self.colors.gray, - self.colors.yellow, - instruction_type, - self.colors.reset - )?; - - if let Some(compressed_accounts) = compressed_accounts { - writeln!( - output, - "{}{}Accounts: {}in: {}, out: {}{}", - indent, - self.colors.gray, - self.colors.cyan, - compressed_accounts.input_accounts, - compressed_accounts.output_accounts, - self.colors.reset - )?; - } - - if let Some(proof_info) = proof_info { - if proof_info.has_validity_proof { - writeln!( - output, - "{}{}Proof: {}{} proof{}", - indent, - self.colors.gray, - self.colors.cyan, - proof_info.proof_type, - self.colors.reset - )?; - } - } - - // Display input account data - if let Some(ref input_accounts) = input_account_data { - writeln!( - output, - "{}{}Input Accounts ({}):{}", - indent, - self.colors.gray, - input_accounts.len(), - self.colors.reset - )?; - for (i, acc_data) in input_accounts.iter().enumerate() { - writeln!( - output, - "{} {}[{}]{}", - indent, self.colors.gray, i, self.colors.reset - )?; - writeln!( - output, - "{} {}owner: {}{}{}", - indent, - self.colors.gray, - self.colors.yellow, - acc_data - .owner - .map(|o| o.to_string()) - .unwrap_or("None".to_string()), - self.colors.reset - )?; - if let Some(ref address) = acc_data.address { - writeln!( - output, - "{} {}address: {}{:?}{}", - indent, - self.colors.gray, - self.colors.cyan, - address, - self.colors.reset - )?; - } - writeln!( - output, - "{} {}lamports: {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - acc_data.lamports, - self.colors.reset - )?; - if !acc_data.data_hash.is_empty() { - writeln!( - output, - "{} {}data_hash: {}{:?}{}", - indent, - self.colors.gray, - self.colors.cyan, - acc_data.data_hash, - self.colors.reset - )?; - } - if !acc_data.discriminator.is_empty() { - writeln!( - output, - "{} {}discriminator: {}{:?}{}", - indent, - self.colors.gray, - self.colors.cyan, - acc_data.discriminator, - self.colors.reset - )?; - } - if let Some(tree_idx) = acc_data.merkle_tree_index { - if let Some(tree_pubkey) = acc_data.merkle_tree_pubkey { - writeln!( - output, - "{} {}merkle_tree_pubkey (index {}{}{}): {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - tree_idx, - self.colors.gray, - self.colors.yellow, - tree_pubkey, - self.colors.reset - )?; - } else { - writeln!( - output, - "{} {}merkle_tree_index: {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - tree_idx, - self.colors.reset - )?; - } - } else if let Some(tree_pubkey) = acc_data.merkle_tree_pubkey { - writeln!( - output, - "{} {}merkle_tree_pubkey: {}{}{}", - indent, - self.colors.gray, - self.colors.yellow, - tree_pubkey, - self.colors.reset - )?; - } - if let Some(queue_idx) = acc_data.queue_index { - if let Some(queue_pubkey) = acc_data.queue_pubkey { - writeln!( - output, - "{} {}queue_pubkey (index {}{}{}): {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - queue_idx, - self.colors.gray, - self.colors.yellow, - queue_pubkey, - self.colors.reset - )?; - } else { - writeln!( - output, - "{} {}queue_index: {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - queue_idx, - self.colors.reset - )?; - } - } else if let Some(queue_pubkey) = acc_data.queue_pubkey { - writeln!( - output, - "{} {}queue_pubkey: {}{}{}", - indent, - self.colors.gray, - self.colors.yellow, - queue_pubkey, - self.colors.reset - )?; - } - // Display leaf index after queue_pubkey - if let Some(leaf_idx) = acc_data.leaf_index { - writeln!( - output, - "{} {}leaf_index: {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - leaf_idx, - self.colors.reset - )?; - } - // Display root index after leaf index - if let Some(root_idx) = acc_data.root_index { - writeln!( - output, - "{} {}root_index: {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - root_idx, - self.colors.reset - )?; - } - } - } - - // Display output account data - if let Some(ref output_data) = output_account_data { - writeln!( - output, - "{}{}Output Accounts ({}):{}", - indent, - self.colors.gray, - output_data.len(), - self.colors.reset - )?; - for (i, acc_data) in output_data.iter().enumerate() { - writeln!( - output, - "{} {}[{}]{}", - indent, self.colors.gray, i, self.colors.reset - )?; - writeln!( - output, - "{} {}owner: {}{}{}", - indent, - self.colors.gray, - self.colors.yellow, - acc_data - .owner - .map(|o| o.to_string()) - .unwrap_or("None".to_string()), - self.colors.reset - )?; - if let Some(ref address) = acc_data.address { - writeln!( - output, - "{} {}address: {}{:?}{}", - indent, - self.colors.gray, - self.colors.cyan, - address, - self.colors.reset - )?; - } - writeln!( - output, - "{} {}lamports: {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - acc_data.lamports, - self.colors.reset - )?; - if !acc_data.data_hash.is_empty() { - writeln!( - output, - "{} {}data_hash: {}{:?}{}", - indent, - self.colors.gray, - self.colors.cyan, - acc_data.data_hash, - self.colors.reset - )?; - } - if !acc_data.discriminator.is_empty() { - writeln!( - output, - "{} {}discriminator: {}{:?}{}", - indent, - self.colors.gray, - self.colors.cyan, - acc_data.discriminator, - self.colors.reset - )?; - } - if let Some(ref data) = acc_data.data { - writeln!( - output, - "{} {}data ({} bytes): {}{:?}{}", - indent, - self.colors.gray, - data.len(), - self.colors.cyan, - data, - self.colors.reset - )?; - } - if let Some(tree_idx) = acc_data.merkle_tree_index { - if let Some(tree_pubkey) = acc_data.merkle_tree_pubkey { - writeln!( - output, - "{} {}merkle_tree_pubkey (index {}{}{}): {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - tree_idx, - self.colors.gray, - self.colors.yellow, - tree_pubkey, - self.colors.reset - )?; - } else { - writeln!( - output, - "{} {}merkle_tree_index: {}{}{}", - indent, - self.colors.gray, - self.colors.cyan, - tree_idx, - self.colors.reset - )?; - } - } else if let Some(tree_pubkey) = acc_data.merkle_tree_pubkey { - writeln!( - output, - "{} {}merkle_tree_pubkey: {}{}{}", - indent, - self.colors.gray, - self.colors.yellow, - tree_pubkey, - self.colors.reset - )?; - } - } - } - - // Display address parameters with actual values - if let Some(address_params) = address_params { - writeln!( - output, - "{}{}New Addresses ({}):{}", - indent, - self.colors.gray, - address_params.len(), - self.colors.reset - )?; - for (i, addr_param) in address_params.iter().enumerate() { - writeln!( - output, - "{} {}[{}] {}seed: {}{:?}{}", - indent, - self.colors.gray, - i, - self.colors.gray, - self.colors.cyan, - addr_param.seed, - self.colors.reset - )?; - - // Check if v2 by comparing tree and queue pubkeys - let is_v2 = addr_param.address_merkle_tree_pubkey - == addr_param.address_queue_pubkey; - - // Display address tree - if let Some(tree_pubkey) = addr_param.address_merkle_tree_pubkey { - writeln!( - output, - "{} {}tree[{}]: {}{}{}", - indent, - self.colors.gray, - addr_param.merkle_tree_index.unwrap_or(0), - self.colors.yellow, - tree_pubkey, - self.colors.reset - )?; - } - - // Only display queue for v1 trees (when different from tree) - if !is_v2 { - if let Some(queue_pubkey) = addr_param.address_queue_pubkey { - writeln!( - output, - "{} {}queue[{}]: {}{}{}", - indent, - self.colors.gray, - addr_param.address_queue_index.unwrap_or(0), - self.colors.yellow, - queue_pubkey, - self.colors.reset - )?; - } - } - - if let Some(ref derived_addr) = addr_param.derived_address { - writeln!( - output, - "{} {}address: {}{:?}{}", - indent, - self.colors.gray, - self.colors.cyan, - derived_addr, - self.colors.reset - )?; - } - let assignment_str = match addr_param.assigned_account_index { - super::types::AddressAssignment::AssignedIndex(idx) => { - format!("{}", idx) - } - super::types::AddressAssignment::None => "none".to_string(), - super::types::AddressAssignment::V1 => "n/a (v1)".to_string(), - }; - writeln!( - output, - "{} {}assigned: {}{}{}", - indent, - self.colors.gray, - self.colors.yellow, - assignment_str, - self.colors.reset - )?; - } - } - - if let Some(fee_info) = fee_info { - if let Some(relay_fee) = fee_info.relay_fee { - writeln!( - output, - "{}{}Relay Fee: {}{} lamports{}", - indent, - self.colors.gray, - self.colors.yellow, - relay_fee, - self.colors.reset - )?; - } - if let Some(compression_fee) = fee_info.compression_fee { - writeln!( - output, - "{}{}Compression Fee: {}{} lamports{}", - indent, - self.colors.gray, - self.colors.yellow, - compression_fee, - self.colors.reset - )?; - } - } - } - super::types::ParsedInstructionData::ComputeBudget { - instruction_type, - value, - } => { - write!( - output, - "{}{}Compute Budget: {}{}{}", - indent, - self.colors.gray, - self.colors.yellow, - instruction_type, - self.colors.reset - )?; - - if let Some(val) = value { - writeln!(output, " ({})", val)?; - } else { - writeln!(output)?; - } - } - super::types::ParsedInstructionData::System { - instruction_type, - lamports, - space: _, - new_account: _, - } => { - write!( - output, - "{}{}System: {}{}{}", - indent, - self.colors.gray, - self.colors.yellow, - instruction_type, - self.colors.reset - )?; - - if let Some(amount) = lamports { - writeln!(output, " ({} lamports)", amount)?; - } else { - writeln!(output)?; - } - } - super::types::ParsedInstructionData::Unknown { - program_name, - data_preview: _, - } => { - writeln!( - output, - "{}{}Program: {}{}{}", - indent, self.colors.gray, self.colors.yellow, program_name, self.colors.reset - )?; - - // Show raw instruction data for unknown programs with chunking - // Skip instruction data for account compression program unless explicitly configured - let should_show_data = if program_name == "Account Compression" { - self.config.show_compression_instruction_data - } else { - true - }; - - if !instruction_data.is_empty() && should_show_data { - writeln!( - output, - "{}{}Raw instruction data ({} bytes): {}[", - indent, - self.colors.gray, - instruction_data.len(), - self.colors.cyan - )?; - - // Chunk the data into 32-byte groups for better readability - for (i, chunk) in instruction_data.chunks(32).enumerate() { - write!(output, "{} ", indent)?; - for (j, byte) in chunk.iter().enumerate() { - if j > 0 { - write!(output, ", ")?; - } - write!(output, "{}", byte)?; - } - if i < instruction_data.chunks(32).len() - 1 { - writeln!(output, ",")?; - } else { - writeln!(output, "]{}", self.colors.reset)?; - } - } - } - } - } - - Ok(()) - } - - /// Write account changes section - fn write_account_changes_section( - &self, - output: &mut String, - log: &EnhancedTransactionLog, - ) -> fmt::Result { - writeln!(output)?; - writeln!( - output, - "{}Account Changes ({}):{}\n", - self.colors.bold, - log.account_changes.len(), - self.colors.reset - )?; - - for change in &log.account_changes { - self.write_account_change(output, change)?; - } - - Ok(()) - } - - /// Write single account change - fn write_account_change(&self, output: &mut String, change: &AccountChange) -> fmt::Result { - writeln!( - output, - "│ {}{} {} ({}) - {}{}{}", - change.access.symbol(change.account_index), - self.colors.cyan, - change.pubkey, - change.access.text(), - self.colors.yellow, - change.account_type, - self.colors.reset - )?; - - if change.lamports_before != change.lamports_after { - writeln!( - output, - "│ {}Lamports: {} → {}{}", - self.colors.gray, change.lamports_before, change.lamports_after, self.colors.reset - )?; - } - - Ok(()) - } - - /// Write Light Protocol events section - fn write_light_events_section( - &self, - output: &mut String, - log: &EnhancedTransactionLog, - ) -> fmt::Result { - writeln!(output)?; - writeln!( - output, - "{}Light Protocol Events ({}):{}\n", - self.colors.bold, - log.light_events.len(), - self.colors.reset - )?; - - for event in &log.light_events { - writeln!( - output, - "│ {}Event: {}{}{}", - self.colors.blue, self.colors.yellow, event.event_type, self.colors.reset - )?; - - if !event.compressed_accounts.is_empty() { - writeln!( - output, - "│ {}Compressed Accounts: {}{}", - self.colors.gray, - event.compressed_accounts.len(), - self.colors.reset - )?; - } - - if !event.merkle_tree_changes.is_empty() { - writeln!( - output, - "│ {}Merkle Tree Changes: {}{}", - self.colors.gray, - event.merkle_tree_changes.len(), - self.colors.reset - )?; - } - } - - Ok(()) - } - - /// Write program logs section using LiteSVM's pretty logs - fn write_program_logs_section( - &self, - output: &mut String, - log: &EnhancedTransactionLog, - ) -> fmt::Result { - writeln!(output)?; - writeln!( - output, - "{}│{} {}Program Logs:{}", - self.colors.gray, self.colors.reset, self.colors.bold, self.colors.reset - )?; - writeln!(output, "{}│{}", self.colors.gray, self.colors.reset)?; - - // Display LiteSVM's pretty formatted logs with proper indentation - for line in log.program_logs_pretty.lines() { - if !line.trim().is_empty() { - writeln!( - output, - "{}│{} {}", - self.colors.gray, self.colors.reset, line - )?; - } - } - - Ok(()) - } - - /// Get tree-style indentation for given depth - fn get_tree_indent(&self, depth: usize) -> String { - let border = format!("{}│{} ", self.colors.gray, self.colors.reset); - if depth == 0 { - border - } else { - format!("{}{}", border, "│ ".repeat(depth)) - } - } - - /// Get color for transaction status - fn status_color(&self, status: &TransactionStatus) -> &str { - match status { - TransactionStatus::Success => &self.colors.green, - TransactionStatus::Failed(_) => &self.colors.red, - TransactionStatus::Unknown => &self.colors.yellow, - } - } - - /// Get human-readable name for known accounts using constants and test accounts - fn get_account_name(&self, pubkey: &solana_sdk::pubkey::Pubkey) -> String { - let pubkey_bytes = pubkey.to_bytes(); - - // Light Protocol Programs and Accounts from constants - if pubkey_bytes == light_sdk_types::constants::LIGHT_SYSTEM_PROGRAM_ID { - return "light system program".to_string(); - } - if pubkey_bytes == light_sdk_types::constants::ACCOUNT_COMPRESSION_PROGRAM_ID { - return "account compression program".to_string(); - } - if pubkey_bytes == light_sdk_types::constants::REGISTERED_PROGRAM_PDA { - return "registered program pda".to_string(); - } - if pubkey_bytes == light_sdk_types::constants::ACCOUNT_COMPRESSION_AUTHORITY_PDA { - return "account compression authority".to_string(); - } - if pubkey_bytes == light_sdk_types::constants::NOOP_PROGRAM_ID { - return "noop program".to_string(); - } - if pubkey_bytes == light_sdk_types::constants::LIGHT_TOKEN_PROGRAM_ID { - return "compressed token program".to_string(); - } - if pubkey_bytes == light_sdk_types::constants::ADDRESS_TREE_V1 { - return "address tree v1".to_string(); - } - if pubkey_bytes == light_sdk_types::constants::ADDRESS_QUEUE_V1 { - return "address queue v1".to_string(); - } - if pubkey_bytes == light_sdk_types::constants::SOL_POOL_PDA { - return "sol pool pda".to_string(); - } - - // String-based matches for test accounts and other addresses - match pubkey.to_string().as_str() { - "FNt7byTHev1k5x2cXZLBr8TdWiC3zoP5vcnZR4P682Uy" => "test program".to_string(), - - // Test accounts from test_accounts.rs - Local Test Validator - "smt1NamzXdq4AMqS2fS2F1i5KTYPZRhoHgWx38d8WsT" => "v1 state merkle tree".to_string(), - "nfq1NvQDJ2GEgnS8zt9prAe8rjjpAW1zFkrvZoBR148" => "v1 nullifier queue".to_string(), - "cpi1uHzrEhBG733DoEJNgHCyRS3XmmyVNZx5fonubE4" => "v1 cpi context".to_string(), - "amt1Ayt45jfbdw5YSo7iz6WZxUmnZsQTYXy82hVwyC2" => "v1 address merkle tree".to_string(), - "aq1S9z4reTSQAdgWHGD2zDaS39sjGrAxbR31vxJ2F4F" => "v1 address queue".to_string(), - - // V2 State Trees and Queues (5 tree triples) - "bmt1LryLZUMmF7ZtqESaw7wifBXLfXHQYoE4GAmrahU" => "v2 state merkle tree 1".to_string(), - "oq1na8gojfdUhsfCpyjNt6h4JaDWtHf1yQj4koBWfto" => "v2 state output queue 1".to_string(), - "cpi15BoVPKgEPw5o8wc2T816GE7b378nMXnhH3Xbq4y" => "v2 cpi context 1".to_string(), - "bmt2UxoBxB9xWev4BkLvkGdapsz6sZGkzViPNph7VFi" => "v2 state merkle tree 2".to_string(), - "oq2UkeMsJLfXt2QHzim242SUi3nvjJs8Pn7Eac9H9vg" => "v2 state output queue 2".to_string(), - "cpi2yGapXUR3As5SjnHBAVvmApNiLsbeZpF3euWnW6B" => "v2 cpi context 2".to_string(), - "bmt3ccLd4bqSVZVeCJnH1F6C8jNygAhaDfxDwePyyGb" => "v2 state merkle tree 3".to_string(), - "oq3AxjekBWgo64gpauB6QtuZNesuv19xrhaC1ZM1THQ" => "v2 state output queue 3".to_string(), - "cpi3mbwMpSX8FAGMZVP85AwxqCaQMfEk9Em1v8QK9Rf" => "v2 cpi context 3".to_string(), - "bmt4d3p1a4YQgk9PeZv5s4DBUmbF5NxqYpk9HGjQsd8" => "v2 state merkle tree 4".to_string(), - "oq4ypwvVGzCUMoiKKHWh4S1SgZJ9vCvKpcz6RT6A8dq" => "v2 state output queue 4".to_string(), - "cpi4yyPDc4bCgHAnsenunGA8Y77j3XEDyjgfyCKgcoc" => "v2 cpi context 4".to_string(), - "bmt5yU97jC88YXTuSukYHa8Z5Bi2ZDUtmzfkDTA2mG2" => "v2 state merkle tree 5".to_string(), - "oq5oh5ZR3yGomuQgFduNDzjtGvVWfDRGLuDVjv9a96P" => "v2 state output queue 5".to_string(), - "cpi5ZTjdgYpZ1Xr7B1cMLLUE81oTtJbNNAyKary2nV6" => "v2 cpi context 5".to_string(), - - // V2 Address Trees (test accounts) - "amt2kaJA14v3urZbZvnc5v2np8jqvc4Z8zDep5wbtzx" => "v2 address merkle tree".to_string(), - - // CPI Authority (commonly used in tests) - "HZH7qSLcpAeDqCopVU4e5XkhT9j3JFsQiq8CmruY3aru" => "cpi authority pda".to_string(), - - // Solana Native Programs - id if id == system_program::ID.to_string() => "system program".to_string(), - "ComputeBudget111111111111111111111111111111" => "compute budget program".to_string(), - "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" => "token program".to_string(), - "ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL" => { - "associated token program".to_string() - } - - _ => { - // Check if it's a PDA or regular account - if pubkey.is_on_curve() { - "user account".to_string() - } else { - "pda account".to_string() - } - } - } - } -} diff --git a/sdk-libs/program-test/src/logging/mod.rs b/sdk-libs/program-test/src/logging/mod.rs index a1bd94b8bb..cfda331b47 100644 --- a/sdk-libs/program-test/src/logging/mod.rs +++ b/sdk-libs/program-test/src/logging/mod.rs @@ -1,22 +1,12 @@ -//! Enhanced logging system for light-program-test +//! LiteSVM integration for Light Protocol logging //! -//! This module provides Solana Explorer-like transaction logging with: -//! - Hierarchical instruction display with inner instructions -//! - Account changes tracking -//! - Light Protocol specific parsing and formatting -//! - Configurable verbosity levels -//! - Color-coded output +//! This module provides the glue layer between LiteSVM and the instruction-decoder crate. +//! All logging types, decoders, and formatting utilities are in `light-instruction-decoder`. //! -//! Logging behavior: -//! - File logging: Always enabled when `enhanced_logging.enabled = true` (default) -//! - Log file: Written to `target/light_program_test.log` -//! - Console output: Only when `RUST_BACKTRACE` is set AND `log_events = true` -//! - Log file is overwritten at session start, then appended for each transaction - -pub mod config; -pub mod decoder; -pub mod formatter; -pub mod types; +//! This module only contains: +//! - LiteSVM-specific transaction result parsing (`from_transaction_result`) +//! - Log file I/O functions +//! - Re-exports from instruction-decoder use std::{ fs::OpenOptions, @@ -26,13 +16,19 @@ use std::{ }; use chrono; -pub use config::{EnhancedLoggingConfig, LogVerbosity}; -pub use formatter::TransactionFormatter; +// Re-export everything from instruction-decoder +pub use light_instruction_decoder::{ + AccountAccess, AccountChange, AccountCompressionInstructionDecoder, CTokenInstructionDecoder, + Colors, CompressedAccountInfo, ComputeBudgetInstructionDecoder, DecodedField, + DecodedInstruction, DecoderRegistry, EnhancedInstructionLog, EnhancedLoggingConfig, + EnhancedTransactionLog, InstructionDecoder, LightProtocolEvent, LightSystemInstructionDecoder, + LogVerbosity, MerkleTreeChange, RegistryInstructionDecoder, SplTokenInstructionDecoder, + SystemInstructionDecoder, Token2022InstructionDecoder, TransactionFormatter, TransactionStatus, +}; use litesvm::types::TransactionResult; -use solana_sdk::{signature::Signature, transaction::Transaction}; -pub use types::{ - AccountChange, EnhancedInstructionLog, EnhancedTransactionLog, ParsedInstructionData, - TransactionStatus, +use solana_sdk::{ + inner_instruction::InnerInstruction, pubkey::Pubkey, signature::Signature, + transaction::Transaction, }; use crate::program_test::config::ProgramTestConfig; @@ -41,7 +37,6 @@ static SESSION_STARTED: std::sync::Once = std::sync::Once::new(); /// Get the log file path in target directory fn get_log_file_path() -> PathBuf { - // Always use cargo workspace target directory use std::process::Command; if let Ok(output) = Command::new("cargo") .arg("metadata") @@ -62,7 +57,6 @@ fn get_log_file_path() -> PathBuf { } } - // Fallback to current directory's target let mut path = PathBuf::from("target"); path.push("light_program_test.log"); path @@ -77,14 +71,12 @@ fn initialize_log_file() { .unwrap_or_default() .as_secs(); - // Create new log file with session header if let Ok(mut file) = OpenOptions::new() .create(true) .write(true) .truncate(true) .open(&log_path) { - // Format timestamp as readable date let datetime = chrono::DateTime::from_timestamp(timestamp as i64, 0).unwrap_or(chrono::Utc::now()); let formatted_date = datetime.format("%Y-%m-%d %H:%M:%S UTC"); @@ -100,13 +92,11 @@ fn initialize_log_file() { /// Strip ANSI escape codes from string for plain text log files fn strip_ansi_codes(text: &str) -> String { - // Simple regex-free approach to remove ANSI escape sequences let mut result = String::with_capacity(text.len()); let mut chars = text.chars(); while let Some(ch) = chars.next() { if ch == '\x1b' { - // Found escape character, skip until we find 'm' (end of color code) for next_ch in chars.by_ref() { if next_ch == 'm' { break; @@ -122,20 +112,16 @@ fn strip_ansi_codes(text: &str) -> String { /// Write log entry to file (append to existing session log) fn write_to_log_file(content: &str) { - // Ensure session is initialized initialize_log_file(); let log_path = get_log_file_path(); - // Ensure parent directory exists if let Some(parent) = log_path.parent() { let _ = std::fs::create_dir_all(parent); } - // Strip ANSI color codes for file output let clean_content = strip_ansi_codes(content); - // Append transaction log to existing file if let Ok(mut file) = OpenOptions::new().create(true).append(true).open(&log_path) { let _ = writeln!(file, "{}", clean_content); } @@ -175,7 +161,7 @@ pub fn log_transaction_enhanced_with_console( return; } - let enhanced_log = EnhancedTransactionLog::from_transaction_result( + let enhanced_log = from_transaction_result( transaction, result, signature, @@ -186,10 +172,8 @@ pub fn log_transaction_enhanced_with_console( let formatter = TransactionFormatter::new(&config.enhanced_logging); let formatted_log = formatter.format(&enhanced_log, transaction_counter); - // Always write to log file when enhanced logging is enabled write_to_log_file(&formatted_log); - // Print to console if requested if print_to_console { println!("{}", formatted_log); } @@ -199,3 +183,180 @@ pub fn log_transaction_enhanced_with_console( pub fn should_use_enhanced_logging(config: &ProgramTestConfig) -> bool { config.enhanced_logging.enabled && !config.no_logs } + +// ============================================================================ +// LiteSVM-specific conversion functions +// ============================================================================ + +/// Get human-readable program name from pubkey +fn get_program_name(program_id: &Pubkey) -> String { + light_instruction_decoder::types::get_program_name( + &solana_pubkey::Pubkey::new_from_array(program_id.to_bytes()), + None, + ) +} + +/// Use LiteSVM's pretty logs instead of parsing raw logs +fn get_pretty_logs_string(result: &TransactionResult) -> String { + match result { + Ok(meta) => meta.pretty_logs(), + Err(failed) => failed.meta.pretty_logs(), + } +} + +/// Create EnhancedTransactionLog from LiteSVM transaction result +pub fn from_transaction_result( + transaction: &Transaction, + result: &TransactionResult, + signature: &Signature, + slot: u64, + config: &EnhancedLoggingConfig, +) -> EnhancedTransactionLog { + let (status, compute_consumed) = match result { + Ok(meta) => (TransactionStatus::Success, meta.compute_units_consumed), + Err(failed) => ( + TransactionStatus::Failed(format!("{:?}", failed.err)), + failed.meta.compute_units_consumed, + ), + }; + + let estimated_fee = (transaction.signatures.len() as u64) * 5000; + + // Build full instructions with accounts and data + let mut instructions: Vec = transaction + .message + .instructions + .iter() + .enumerate() + .map(|(index, ix)| { + let program_id = transaction.message.account_keys[ix.program_id_index as usize]; + let mut log = EnhancedInstructionLog::new( + index, + solana_pubkey::Pubkey::new_from_array(program_id.to_bytes()), + get_program_name(&program_id), + ); + log.accounts = ix + .accounts + .iter() + .map(|&idx| { + let pubkey = transaction.message.account_keys[idx as usize]; + solana_instruction::AccountMeta { + pubkey: solana_pubkey::Pubkey::new_from_array(pubkey.to_bytes()), + is_signer: transaction.message.is_signer(idx as usize), + is_writable: transaction.message.is_maybe_writable(idx as usize, None), + } + }) + .collect(); + log.data = ix.data.clone(); + log + }) + .collect(); + + // Extract inner instructions from LiteSVM metadata + let inner_instructions_list = match result { + Ok(meta) => &meta.inner_instructions, + Err(failed) => &failed.meta.inner_instructions, + }; + + // Apply decoder to instructions if enabled + if config.decode_light_instructions { + for instruction in instructions.iter_mut() { + instruction.decode(config); + } + + // Populate inner instructions for each top-level instruction + for (instruction_index, inner_list) in inner_instructions_list.iter().enumerate() { + if let Some(instruction) = instructions.get_mut(instruction_index) { + instruction.inner_instructions = parse_inner_instructions( + inner_list, + &transaction.message.account_keys, + &transaction.message, + 1, + config, + ); + } + } + } + + let pretty_logs_string = get_pretty_logs_string(result); + + let sig_bytes: [u8; 64] = signature.as_ref().try_into().unwrap_or([0u8; 64]); + let mut log = EnhancedTransactionLog::new( + light_instruction_decoder::solana_signature::Signature::from(sig_bytes), + slot, + ); + log.status = status; + log.fee = estimated_fee; + log.compute_used = compute_consumed; + log.instructions = instructions; + log.program_logs_pretty = pretty_logs_string; + log +} + +/// Parse inner instructions from Solana's InnerInstruction format with proper nesting +fn parse_inner_instructions( + inner_instructions: &[InnerInstruction], + account_keys: &[Pubkey], + message: &solana_sdk::message::Message, + base_depth: usize, + config: &EnhancedLoggingConfig, +) -> Vec { + let mut result = Vec::new(); + + for (index, inner_ix) in inner_instructions.iter().enumerate() { + let program_id = account_keys[inner_ix.instruction.program_id_index as usize]; + let program_name = get_program_name(&program_id); + + let accounts: Vec = inner_ix + .instruction + .accounts + .iter() + .map(|&idx| { + let account_index = idx as usize; + let pubkey = account_keys[account_index]; + + let is_signer = message.is_signer(account_index); + let is_writable = message.is_maybe_writable(account_index, None); + + solana_instruction::AccountMeta { + pubkey: solana_pubkey::Pubkey::new_from_array(pubkey.to_bytes()), + is_signer, + is_writable, + } + }) + .collect(); + + let instruction_depth = base_depth + (inner_ix.stack_height as usize).saturating_sub(1); + + let mut instruction_log = EnhancedInstructionLog::new( + index, + solana_pubkey::Pubkey::new_from_array(program_id.to_bytes()), + program_name, + ); + instruction_log.accounts = accounts; + instruction_log.data = inner_ix.instruction.data.clone(); + instruction_log.depth = instruction_depth; + + // Decode the instruction if enabled + if config.decode_light_instructions { + instruction_log.decode(config); + } + + // Find the correct parent for this instruction based on stack height + if inner_ix.stack_height <= 2 { + result.push(instruction_log); + } else { + let target_parent_depth = instruction_depth - 1; + if let Some(parent) = EnhancedInstructionLog::find_parent_for_instruction( + &mut result, + target_parent_depth, + ) { + parent.inner_instructions.push(instruction_log); + } else { + result.push(instruction_log); + } + } + } + + result +} diff --git a/sdk-libs/program-test/src/logging/types.rs b/sdk-libs/program-test/src/logging/types.rs deleted file mode 100644 index 29ec42dcc8..0000000000 --- a/sdk-libs/program-test/src/logging/types.rs +++ /dev/null @@ -1,484 +0,0 @@ -//! Type definitions for enhanced logging - -use solana_sdk::{ - inner_instruction::InnerInstruction, instruction::AccountMeta, pubkey::Pubkey, - signature::Signature, system_program, -}; - -use super::config::EnhancedLoggingConfig; - -/// Enhanced transaction log containing all formatting information -#[derive(Debug, Clone)] -pub struct EnhancedTransactionLog { - pub signature: Signature, - pub slot: u64, - pub status: TransactionStatus, - pub fee: u64, - pub compute_used: u64, - pub compute_total: u64, - pub instructions: Vec, - pub account_changes: Vec, - pub program_logs_pretty: String, - pub light_events: Vec, -} - -/// Transaction execution status -#[derive(Debug, Clone)] -pub enum TransactionStatus { - Success, - Failed(String), - Unknown, -} - -impl TransactionStatus { - pub fn text(&self) -> String { - match self { - TransactionStatus::Success => "Success".to_string(), - TransactionStatus::Failed(err) => format!("Failed: {}", err), - TransactionStatus::Unknown => "Unknown".to_string(), - } - } -} - -/// Enhanced instruction log with hierarchy and parsing -#[derive(Debug, Clone)] -pub struct EnhancedInstructionLog { - pub index: usize, - pub program_id: Pubkey, - pub program_name: String, - pub instruction_name: Option, - pub accounts: Vec, - pub data: Vec, - pub parsed_data: Option, - pub inner_instructions: Vec, - pub compute_consumed: Option, - pub success: bool, - pub depth: usize, -} - -/// Parsed instruction data for known programs -#[derive(Debug, Clone)] -pub enum ParsedInstructionData { - LightSystemProgram { - instruction_type: String, - compressed_accounts: Option, - proof_info: Option, - address_params: Option>, - fee_info: Option, - input_account_data: Option>, - output_account_data: Option>, - }, - ComputeBudget { - instruction_type: String, - value: Option, - }, - System { - instruction_type: String, - lamports: Option, - space: Option, - new_account: Option, - }, - Unknown { - program_name: String, - data_preview: String, - }, -} - -/// Summary of compressed accounts in a Light Protocol instruction -#[derive(Debug, Clone)] -pub struct CompressedAccountSummary { - pub input_accounts: usize, - pub output_accounts: usize, - pub lamports_change: Option, -} - -/// Summary of proof information -#[derive(Debug, Clone)] -pub struct ProofSummary { - pub proof_type: String, - pub has_validity_proof: bool, -} - -/// Summary of fee information -#[derive(Debug, Clone)] -pub struct FeeSummary { - pub relay_fee: Option, - pub compression_fee: Option, -} - -/// Address assignment state -#[derive(Debug, Clone)] -pub enum AddressAssignment { - /// V1 address param (no assignment tracking) - V1, - /// Not assigned to any output account - None, - /// Assigned to output account at index - AssignedIndex(u8), -} - -/// Address parameter information -#[derive(Debug, Clone)] -pub struct AddressParam { - pub seed: [u8; 32], - pub address_queue_index: Option, - pub address_queue_pubkey: Option, - pub merkle_tree_index: Option, - pub address_merkle_tree_pubkey: Option, - pub root_index: Option, - pub derived_address: Option<[u8; 32]>, - pub assigned_account_index: AddressAssignment, -} - -/// Input account data -#[derive(Debug, Clone)] -pub struct InputAccountData { - pub lamports: u64, - pub owner: Option, - pub merkle_tree_index: Option, - pub merkle_tree_pubkey: Option, - pub queue_index: Option, - pub queue_pubkey: Option, - pub address: Option<[u8; 32]>, - pub data_hash: Vec, - pub discriminator: Vec, - pub leaf_index: Option, - pub root_index: Option, -} - -/// Output account data -#[derive(Debug, Clone)] -pub struct OutputAccountData { - pub lamports: u64, - pub data: Option>, - pub owner: Option, - pub merkle_tree_index: Option, - pub merkle_tree_pubkey: Option, - pub queue_index: Option, - pub queue_pubkey: Option, - pub address: Option<[u8; 32]>, - pub data_hash: Vec, - pub discriminator: Vec, -} - -/// Account state changes during transaction -#[derive(Debug, Clone)] -pub struct AccountChange { - pub pubkey: Pubkey, - pub account_type: String, - pub access: AccountAccess, - pub account_index: usize, - pub lamports_before: u64, - pub lamports_after: u64, - pub data_len_before: usize, - pub data_len_after: usize, - pub owner: Pubkey, - pub executable: bool, - pub rent_epoch: u64, -} - -/// Account access pattern during transaction -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum AccountAccess { - Readonly, - Writable, - Signer, - SignerWritable, -} - -impl AccountAccess { - pub fn symbol(&self, index: usize) -> String { - format!("#{}", index) - } - - pub fn text(&self) -> &'static str { - match self { - AccountAccess::Readonly => "readonly", - AccountAccess::Writable => "writable", - AccountAccess::Signer => "signer", - AccountAccess::SignerWritable => "signer+writable", - } - } -} - -/// Light Protocol specific events -#[derive(Debug, Clone)] -pub struct LightProtocolEvent { - pub event_type: String, - pub compressed_accounts: Vec, - pub merkle_tree_changes: Vec, - pub nullifiers: Vec, -} - -/// Compressed account information -#[derive(Debug, Clone)] -pub struct CompressedAccountInfo { - pub hash: String, - pub owner: Pubkey, - pub lamports: u64, - pub data: Option>, - pub address: Option, -} - -/// Merkle tree state change -#[derive(Debug, Clone)] -pub struct MerkleTreeChange { - pub tree_pubkey: Pubkey, - pub tree_type: String, - pub sequence_number: u64, - pub leaf_index: u64, -} - -impl EnhancedTransactionLog { - /// Use LiteSVM's pretty logs instead of parsing raw logs - fn get_pretty_logs_string(result: &litesvm::types::TransactionResult) -> String { - match result { - Ok(meta) => meta.pretty_logs(), - Err(failed) => failed.meta.pretty_logs(), - } - } - - /// Create from LiteSVM transaction result - pub fn from_transaction_result( - transaction: &solana_sdk::transaction::Transaction, - result: &litesvm::types::TransactionResult, - signature: &Signature, - slot: u64, - config: &EnhancedLoggingConfig, - ) -> Self { - let (status, compute_consumed) = match result { - Ok(meta) => (TransactionStatus::Success, meta.compute_units_consumed), - Err(failed) => ( - TransactionStatus::Failed(format!("{:?}", failed.err)), - failed.meta.compute_units_consumed, - ), - }; - - // Calculate estimated fee (basic calculation: signatures * lamports_per_signature) - // Default Solana fee is 5000 lamports per signature - let estimated_fee = (transaction.signatures.len() as u64) * 5000; - - // Parse instructions - let instructions: Vec = transaction - .message - .instructions - .iter() - .enumerate() - .map(|(index, ix)| EnhancedInstructionLog { - index, - program_id: transaction.message.account_keys[ix.program_id_index as usize], - program_name: get_program_name( - &transaction.message.account_keys[ix.program_id_index as usize], - ), - instruction_name: None, // Will be filled by decoder - accounts: ix - .accounts - .iter() - .map(|&idx| AccountMeta { - pubkey: transaction.message.account_keys[idx as usize], - is_signer: transaction.message.is_signer(idx as usize), - is_writable: transaction.message.is_maybe_writable(idx as usize, None), - }) - .collect(), - data: ix.data.clone(), - parsed_data: None, // Will be filled by decoder - inner_instructions: Vec::new(), // Will be filled from meta - compute_consumed: None, - success: true, - depth: 0, - }) - .collect(); - - // Extract inner instructions from LiteSVM metadata - let inner_instructions_list = match result { - Ok(meta) => &meta.inner_instructions, - Err(failed) => &failed.meta.inner_instructions, - }; - - // Apply decoder to instructions if enabled and populate inner instructions - let mut instructions = instructions; - if config.decode_light_instructions { - // First, decode all top-level instructions - for instruction in instructions.iter_mut() { - instruction.parsed_data = super::decoder::decode_instruction( - &instruction.program_id, - &instruction.data, - &instruction.accounts, - ); - if let Some(ref parsed) = instruction.parsed_data { - instruction.instruction_name = match parsed { - ParsedInstructionData::LightSystemProgram { - instruction_type, .. - } => Some(instruction_type.clone()), - ParsedInstructionData::ComputeBudget { - instruction_type, .. - } => Some(instruction_type.clone()), - ParsedInstructionData::System { - instruction_type, .. - } => Some(instruction_type.clone()), - _ => None, - }; - } - } - - // Now populate inner instructions for each top-level instruction - for (instruction_index, inner_list) in inner_instructions_list.iter().enumerate() { - if let Some(instruction) = instructions.get_mut(instruction_index) { - instruction.inner_instructions = Self::parse_inner_instructions( - inner_list, // inner_list is already Vec - &transaction.message.account_keys, - &transaction.message, // Pass the full message for account access info - 1, // Start at depth 1 for inner instructions - config, - ); - } - } - } - - // Get LiteSVM's pretty formatted logs - let pretty_logs_string = Self::get_pretty_logs_string(result); - - Self { - signature: *signature, - slot, - status, - fee: estimated_fee, - compute_used: compute_consumed, - compute_total: 1_400_000, // Default compute limit - instructions, - account_changes: Vec::new(), // Will be filled if requested - program_logs_pretty: pretty_logs_string, - light_events: Vec::new(), - } - } - - /// Parse inner instructions from Solana's InnerInstruction format with proper nesting - fn parse_inner_instructions( - inner_instructions: &[InnerInstruction], - account_keys: &[Pubkey], - message: &solana_sdk::message::Message, - base_depth: usize, - config: &EnhancedLoggingConfig, - ) -> Vec { - let mut result = Vec::new(); - - for (index, inner_ix) in inner_instructions.iter().enumerate() { - let program_id = account_keys[inner_ix.instruction.program_id_index as usize]; - let program_name = get_program_name(&program_id); - - let accounts: Vec = inner_ix - .instruction - .accounts - .iter() - .map(|&idx| { - let account_index = idx as usize; - let pubkey = account_keys[account_index]; - - // Get the correct signer and writable information from the original transaction message - let is_signer = message.is_signer(account_index); - let is_writable = message.is_maybe_writable(account_index, None); - - AccountMeta { - pubkey, - is_signer, - is_writable, - } - }) - .collect(); - - let parsed_data = if config.decode_light_instructions { - super::decoder::decode_instruction( - &program_id, - &inner_ix.instruction.data, - &accounts, - ) - } else { - None - }; - - let instruction_name = parsed_data.as_ref().and_then(|parsed| match parsed { - ParsedInstructionData::LightSystemProgram { - instruction_type, .. - } => Some(instruction_type.clone()), - ParsedInstructionData::ComputeBudget { - instruction_type, .. - } => Some(instruction_type.clone()), - ParsedInstructionData::System { - instruction_type, .. - } => Some(instruction_type.clone()), - _ => None, - }); - - // Calculate the actual depth based on stack_height - // stack_height 2 = first level CPI (depth = base_depth + 1) - // stack_height 3 = second level CPI (depth = base_depth + 2), etc. - let instruction_depth = base_depth + (inner_ix.stack_height as usize).saturating_sub(1); - - let instruction_log = EnhancedInstructionLog { - index, - program_id, - program_name, - instruction_name, - accounts, - data: inner_ix.instruction.data.clone(), - parsed_data, - inner_instructions: Vec::new(), - compute_consumed: None, - success: true, // We assume inner instructions succeeded if we're parsing them - depth: instruction_depth, - }; - - // Find the correct parent for this instruction based on stack height - // Stack height 2 = direct CPI, should be at top level - // Stack height 3+ = nested CPI, should be child of previous instruction with stack_height - 1 - if inner_ix.stack_height <= 2 { - // Top-level CPI - add directly to result - result.push(instruction_log); - } else { - // Nested CPI - find the appropriate parent - // We need to traverse the result structure to find the right parent - let target_parent_depth = instruction_depth - 1; - if let Some(parent) = - Self::find_parent_for_instruction(&mut result, target_parent_depth) - { - parent.inner_instructions.push(instruction_log); - } else { - // Fallback: add to top level if we can't find appropriate parent - result.push(instruction_log); - } - } - } - - result - } - - /// Helper function to find the appropriate parent for nested instructions - fn find_parent_for_instruction( - instructions: &mut [EnhancedInstructionLog], - target_depth: usize, - ) -> Option<&mut EnhancedInstructionLog> { - for instruction in instructions.iter_mut().rev() { - if instruction.depth == target_depth { - return Some(instruction); - } - // Recursively search in inner instructions - if let Some(parent) = - Self::find_parent_for_instruction(&mut instruction.inner_instructions, target_depth) - { - return Some(parent); - } - } - None - } -} -/// Get human-readable program name from pubkey -fn get_program_name(program_id: &Pubkey) -> String { - match program_id.to_string().as_str() { - id if id == system_program::ID.to_string() => "System Program".to_string(), - "ComputeBudget111111111111111111111111111111" => "Compute Budget".to_string(), - "SySTEM1eSU2p4BGQfQpimFEWWSC1XDFeun3Nqzz3rT7" => "Light System Program".to_string(), - "compr6CUsB5m2jS4Y3831ztGSTnDpnKJTKS95d64XVq" => "Account Compression".to_string(), - "cTokenmWW8bLPjZEBAUgYy3zKxQZW6VKi7bqNFEVv3m" => "Compressed Token Program".to_string(), - _ => format!("Unknown Program ({})", program_id), - } -} diff --git a/sdk-libs/program-test/src/program_test/config.rs b/sdk-libs/program-test/src/program_test/config.rs index 662431a453..03e7b62fe3 100644 --- a/sdk-libs/program-test/src/program_test/config.rs +++ b/sdk-libs/program-test/src/program_test/config.rs @@ -117,6 +117,33 @@ impl ProgramTestConfig { self.log_light_protocol_events = false; self } + + /// Register custom instruction decoders for enhanced logging + /// + /// This allows registering decoders generated by `#[derive(InstructionDecoder)]` + /// or `#[instruction_decoder]` macros from `light-instruction-decoder-derive`. + /// + /// ## Example + /// + /// ```rust,ignore + /// use light_instruction_decoder_derive::instruction_decoder; + /// + /// #[instruction_decoder(program_id = crate::ID)] + /// #[program] + /// pub mod my_program { + /// // ... + /// } + /// + /// let config = ProgramTestConfig::new_v2(true, Some(vec![("my_program", program_id)])) + /// .with_decoders(vec![Box::new(MyProgramInstructionDecoder)]); + /// ``` + pub fn with_decoders( + mut self, + decoders: Vec>, + ) -> Self { + self.enhanced_logging = self.enhanced_logging.with_decoders(decoders); + self + } } impl Default for ProgramTestConfig { diff --git a/sdk-tests/csdk-anchor-full-derived-test/Cargo.toml b/sdk-tests/csdk-anchor-full-derived-test/Cargo.toml index 86c328e24b..f53cd9522b 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/Cargo.toml +++ b/sdk-tests/csdk-anchor-full-derived-test/Cargo.toml @@ -29,6 +29,9 @@ solana-msg = { workspace = true } solana-account-info = { workspace = true } solana-pubkey = { workspace = true } light-macros = { workspace = true, features = ["solana"] } +light-instruction-decoder = { workspace = true } +light-instruction-decoder-derive = { workspace = true } +solana-instruction = { workspace = true } light-sdk-macros = { workspace = true } borsh = { workspace = true } light-compressed-account = { workspace = true, features = ["solana"] } @@ -55,6 +58,7 @@ solana-signer = { workspace = true } solana-keypair = { workspace = true } solana-account = { workspace = true } bincode = "1.3" +sha2 = { workspace = true } [lints.rust.unexpected_cfgs] level = "allow" diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/amm_test/initialize.rs b/sdk-tests/csdk-anchor-full-derived-test/src/amm_test/initialize.rs index 8aaa7fe1fa..0e42f67c34 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/amm_test/initialize.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/amm_test/initialize.rs @@ -19,7 +19,7 @@ use light_token::instruction::{ use super::states::*; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct InitializeParams { pub init_amount_0: u64, pub init_amount_1: u64, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/amm_test/swap.rs b/sdk-tests/csdk-anchor-full-derived-test/src/amm_test/swap.rs index 62ea324539..7d6a643e63 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/amm_test/swap.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/amm_test/swap.rs @@ -14,7 +14,7 @@ use light_anchor_spl::token_interface::{Mint, TokenAccount, TokenInterface}; use super::states::*; /// Trade direction for swap -#[derive(AnchorSerialize, AnchorDeserialize, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, AnchorSerialize, AnchorDeserialize, Clone, Copy, PartialEq, Eq)] pub enum TradeDirection { /// Swap token_0 for token_1 ZeroForOne, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instruction_accounts.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instruction_accounts.rs index 87d73b9290..fcddddea33 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instruction_accounts.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instruction_accounts.rs @@ -4,7 +4,7 @@ use light_sdk_macros::LightAccounts; use crate::state::*; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct FullAutoWithMintParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -124,7 +124,7 @@ pub const VAULT_SEED: &[u8] = b"vault"; pub const MINT_SIGNER_A_SEED: &[u8] = b"mint_signer_a"; pub const MINT_SIGNER_B_SEED: &[u8] = b"mint_signer_b"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct CreateTwoMintsParams { pub create_accounts_proof: CreateAccountsProof, pub mint_signer_a_bump: u8, @@ -199,7 +199,7 @@ pub struct CreateTwoMints<'info> { pub const MINT_SIGNER_C_SEED: &[u8] = b"mint_signer_c"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct CreateThreeMintsParams { pub create_accounts_proof: CreateAccountsProof, pub mint_signer_a_bump: u8, @@ -292,7 +292,7 @@ pub struct CreateThreeMints<'info> { pub const METADATA_MINT_SIGNER_SEED: &[u8] = b"metadata_mint_signer"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct CreateMintWithMetadataParams { pub create_accounts_proof: CreateAccountsProof, pub mint_signer_bump: u8, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d10_token_accounts/single_ata.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d10_token_accounts/single_ata.rs index 5cc8171ce6..8f4634eb2c 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d10_token_accounts/single_ata.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d10_token_accounts/single_ata.rs @@ -12,7 +12,7 @@ use light_sdk_macros::LightAccounts; use light_sdk_types::LIGHT_TOKEN_PROGRAM_ID; use light_token::instruction::{COMPRESSIBLE_CONFIG_V1, RENT_SPONSOR as LIGHT_TOKEN_RENT_SPONSOR}; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D10SingleAtaParams { pub create_accounts_proof: CreateAccountsProof, /// Bump for the ATA PDA diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d10_token_accounts/single_vault.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d10_token_accounts/single_vault.rs index 68a30313f2..2bb0db6317 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d10_token_accounts/single_vault.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d10_token_accounts/single_vault.rs @@ -16,7 +16,7 @@ pub const D10_SINGLE_VAULT_AUTH_SEED: &[u8] = b"d10_single_vault_auth"; /// Seed for the vault token account PDA pub const D10_SINGLE_VAULT_SEED: &[u8] = b"d10_single_vault"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D10SingleVaultParams { pub create_accounts_proof: CreateAccountsProof, /// Bump for the d10_single_vault PDA (needed for invoke_signed) diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/all.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/all.rs index 79c0e4c27b..2e9d058428 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/all.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/all.rs @@ -13,7 +13,7 @@ use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; pub const D5_ALL_AUTH_SEED: &[u8] = b"d5_all_auth"; pub const D5_ALL_VAULT_SEED: &[u8] = b"d5_all_vault"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D5AllMarkersParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/light_token.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/light_token.rs index 75cad9494d..c8f24a3da1 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/light_token.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/light_token.rs @@ -11,7 +11,7 @@ use light_token::instruction::{COMPRESSIBLE_CONFIG_V1, RENT_SPONSOR as LIGHT_TOK pub const D5_VAULT_AUTH_SEED: &[u8] = b"d5_vault_auth"; pub const D5_VAULT_SEED: &[u8] = b"d5_vault"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D5LightTokenParams { pub create_accounts_proof: CreateAccountsProof, pub vault_bump: u8, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/rentfree_bare.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/rentfree_bare.rs index 732e9f1a25..ae8f33b424 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/rentfree_bare.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d5_markers/rentfree_bare.rs @@ -12,7 +12,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D5RentfreeBareParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/account.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/account.rs index 75ba46ea75..2b4b4c0407 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/account.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/account.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D6AccountParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/all.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/all.rs index 5c3181c6f8..610116fbaa 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/all.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/all.rs @@ -11,7 +11,7 @@ use crate::state::{ d2_compress_as::multiple::MultipleCompressAsRecord, }; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D6AllParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/boxed.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/boxed.rs index 52721559df..9d61ff99e0 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/boxed.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d6_account_types/boxed.rs @@ -9,7 +9,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D6BoxedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/all.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/all.rs index ebb5e296d6..056b234e8f 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/all.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/all.rs @@ -12,7 +12,7 @@ use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; pub const D7_ALL_AUTH_SEED: &[u8] = b"d7_all_auth"; pub const D7_ALL_VAULT_SEED: &[u8] = b"d7_all_vault"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D7AllNamesParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/creator.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/creator.rs index f86bd5e1b8..95595f3c06 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/creator.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/creator.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D7CreatorParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/light_token_config.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/light_token_config.rs index 79903b0216..ebc82702b4 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/light_token_config.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/light_token_config.rs @@ -10,7 +10,7 @@ use light_token::instruction::{COMPRESSIBLE_CONFIG_V1, RENT_SPONSOR as LIGHT_TOK pub const D7_LIGHT_TOKEN_AUTH_SEED: &[u8] = b"d7_light_token_auth"; pub const D7_LIGHT_TOKEN_VAULT_SEED: &[u8] = b"d7_light_token_vault"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D7LightTokenConfigParams { pub create_accounts_proof: CreateAccountsProof, } diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/payer.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/payer.rs index 88c43c3315..d572a092d5 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/payer.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d7_infra_names/payer.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D7PayerParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/all.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/all.rs index 8eda4f81bf..03d9c6d0e8 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/all.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/all.rs @@ -11,7 +11,7 @@ use crate::state::{ d2_compress_as::multiple::MultipleCompressAsRecord, }; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D8AllParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/multi_rentfree.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/multi_rentfree.rs index 1d861f659f..b184a2a923 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/multi_rentfree.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/multi_rentfree.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D8MultiRentfreeParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/pda_only.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/pda_only.rs index 9dc29e9c8a..8ea1267fa8 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/pda_only.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d8_builder_paths/pda_only.rs @@ -9,7 +9,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D8PdaOnlyParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/all.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/all.rs index f23c228d94..6e1d0a3681 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/all.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/all.rs @@ -16,7 +16,7 @@ use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; pub const D9_ALL_SEED: &[u8] = b"d9_all_const"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9AllParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/array_bumps.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/array_bumps.rs index 4cf0c9880e..c548d4c02f 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/array_bumps.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/array_bumps.rs @@ -20,7 +20,7 @@ pub const D9_BUMP_STR: &str = "d9_bump_str"; // Test 1: Literal seed with bump // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9BumpLiteralParams { pub create_accounts_proof: CreateAccountsProof, } @@ -52,7 +52,7 @@ pub struct D9BumpLiteral<'info> { // Test 2: Constant seed with bump // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9BumpConstantParams { pub create_accounts_proof: CreateAccountsProof, } @@ -84,7 +84,7 @@ pub struct D9BumpConstant<'info> { // Test 3: Qualified path with .as_bytes() and bump // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9BumpQualifiedParams { pub create_accounts_proof: CreateAccountsProof, } @@ -116,7 +116,7 @@ pub struct D9BumpQualified<'info> { // Test 4: Param seed with bump // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9BumpParamParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -149,7 +149,7 @@ pub struct D9BumpParam<'info> { // Test 5: Ctx account seed with bump // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9BumpCtxParams { pub create_accounts_proof: CreateAccountsProof, } @@ -184,7 +184,7 @@ pub struct D9BumpCtx<'info> { // Test 6: Multiple seed types with bump // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9BumpMixedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/complex_mixed.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/complex_mixed.rs index ef9273b081..e03034c850 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/complex_mixed.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/complex_mixed.rs @@ -20,7 +20,7 @@ pub const D9_COMPLEX_NAMESPACE: &str = "namespace"; // Test 1: Three seeds - literal + constant + param // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ComplexThreeParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -53,7 +53,7 @@ pub struct D9ComplexThree<'info> { // Test 2: Four seeds - mixed types // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ComplexFourParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -87,7 +87,7 @@ pub struct D9ComplexFour<'info> { // Test 3: Five seeds - ctx account included // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ComplexFiveParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -124,7 +124,7 @@ pub struct D9ComplexFive<'info> { // Test 4: Qualified paths mixed with local // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ComplexQualifiedMixParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -157,7 +157,7 @@ pub struct D9ComplexQualifiedMix<'info> { // Test 5: Function call + other seeds // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ComplexFuncParams { pub create_accounts_proof: CreateAccountsProof, pub key_a: Pubkey, @@ -192,7 +192,7 @@ pub struct D9ComplexFunc<'info> { // Test 6: All qualified paths // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ComplexAllQualifiedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -229,7 +229,7 @@ pub struct D9ComplexAllQualified<'info> { // Test 7: Static function (program ID) as seed // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ComplexProgramIdParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -262,7 +262,7 @@ pub struct D9ComplexProgramId<'info> { // Test 8: Static id() function call as seed // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ComplexIdFuncParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/const_patterns.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/const_patterns.rs index 63ec146be4..8883b1a830 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/const_patterns.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/const_patterns.rs @@ -57,7 +57,7 @@ pub static STATIC_SEED: [u8; 11] = *b"static_seed"; // Test 1: Associated constant // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9AssocConstParams { pub create_accounts_proof: CreateAccountsProof, } @@ -89,7 +89,7 @@ pub struct D9AssocConst<'info> { // Test 2: Associated constant with method // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9AssocConstMethodParams { pub create_accounts_proof: CreateAccountsProof, } @@ -121,7 +121,7 @@ pub struct D9AssocConstMethod<'info> { // Test 3: Multiple associated constants // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MultiAssocConstParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -154,7 +154,7 @@ pub struct D9MultiAssocConst<'info> { // Test 4: Const fn call // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ConstFnParams { pub create_accounts_proof: CreateAccountsProof, } @@ -186,7 +186,7 @@ pub struct D9ConstFn<'info> { // Test 5: Const fn with const generic // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ConstFnGenericParams { pub create_accounts_proof: CreateAccountsProof, } @@ -218,7 +218,7 @@ pub struct D9ConstFnGeneric<'info> { // Test 6: Trait associated constant (fully qualified) // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9TraitAssocConstParams { pub create_accounts_proof: CreateAccountsProof, } @@ -250,7 +250,7 @@ pub struct D9TraitAssocConst<'info> { // Test 7: Static variable // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9StaticParams { pub create_accounts_proof: CreateAccountsProof, } @@ -282,7 +282,7 @@ pub struct D9Static<'info> { // Test 8: Qualified const fn (crate path) // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9QualifiedConstFnParams { pub create_accounts_proof: CreateAccountsProof, } @@ -314,7 +314,7 @@ pub struct D9QualifiedConstFn<'info> { // Test 9: Fully qualified associated constant // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9FullyQualifiedAssocParams { pub create_accounts_proof: CreateAccountsProof, } @@ -346,7 +346,7 @@ pub struct D9FullyQualifiedAssoc<'info> { // Test 10: Fully qualified trait associated constant // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9FullyQualifiedTraitParams { pub create_accounts_proof: CreateAccountsProof, } @@ -378,7 +378,7 @@ pub struct D9FullyQualifiedTrait<'info> { // Test 11: Fully qualified const fn with generics // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9FullyQualifiedGenericParams { pub create_accounts_proof: CreateAccountsProof, } @@ -410,7 +410,7 @@ pub struct D9FullyQualifiedGeneric<'info> { // Test 12: Combined patterns with full qualification // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ConstCombinedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/constant.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/constant.rs index e795083f4d..89a9e79a55 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/constant.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/constant.rs @@ -10,7 +10,7 @@ use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; pub const D9_CONSTANT_SEED: &[u8] = b"d9_constant"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ConstantParams { pub create_accounts_proof: CreateAccountsProof, } diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/ctx_account.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/ctx_account.rs index 68af8c335f..a6853278d9 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/ctx_account.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/ctx_account.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9CtxAccountParams { pub create_accounts_proof: CreateAccountsProof, } diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/edge_cases.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/edge_cases.rs index 5a1e73b356..03b449f26e 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/edge_cases.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/edge_cases.rs @@ -30,7 +30,7 @@ pub const D9_SINGLE_BYTE: &[u8] = b"x"; // Test 1: Minimal literal (single character) // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9EdgeEmptyParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -63,7 +63,7 @@ pub struct D9EdgeEmpty<'info> { // Test 2: Single byte constant // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9EdgeSingleByteParams { pub create_accounts_proof: CreateAccountsProof, } @@ -95,7 +95,7 @@ pub struct D9EdgeSingleByte<'info> { // Test 3: Single letter constant name // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9EdgeSingleLetterParams { pub create_accounts_proof: CreateAccountsProof, } @@ -127,7 +127,7 @@ pub struct D9EdgeSingleLetter<'info> { // Test 4: Constant name with digits // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9EdgeDigitsParams { pub create_accounts_proof: CreateAccountsProof, } @@ -159,7 +159,7 @@ pub struct D9EdgeDigits<'info> { // Test 5: Leading underscore constant // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9EdgeUnderscoreParams { pub create_accounts_proof: CreateAccountsProof, } @@ -191,7 +191,7 @@ pub struct D9EdgeUnderscore<'info> { // Test 6: Many literals in same seeds array // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9EdgeManyLiteralsParams { pub create_accounts_proof: CreateAccountsProof, } @@ -223,7 +223,7 @@ pub struct D9EdgeManyLiterals<'info> { // Test 7: Mixed edge cases in one struct // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9EdgeMixedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/external_paths.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/external_paths.rs index acb9606ced..4b383add3d 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/external_paths.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/external_paths.rs @@ -15,7 +15,7 @@ use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; // Test 1: External crate constant (light_sdk_types) // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ExternalSdkTypesParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -48,7 +48,7 @@ pub struct D9ExternalSdkTypes<'info> { // Test 2: External crate constant (light_token_types) // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ExternalCtokenParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -81,7 +81,7 @@ pub struct D9ExternalCtoken<'info> { // Test 3: Multiple external crate constants mixed // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ExternalMixedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -121,7 +121,7 @@ pub struct D9ExternalMixed<'info> { /// Local constant to mix with external pub const D9_EXTERNAL_LOCAL: &[u8] = b"d9_ext_local"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ExternalWithLocalParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -154,7 +154,7 @@ pub struct D9ExternalWithLocal<'info> { // Test 5: External constant with bump // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ExternalBumpParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -190,7 +190,7 @@ pub struct D9ExternalBump<'info> { /// Re-export from external crate for path testing pub use light_sdk_types::constants::CPI_AUTHORITY_PDA_SEED as REEXPORTED_SEED; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ExternalReexportParams { pub create_accounts_proof: CreateAccountsProof, } diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/function_call.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/function_call.rs index ae88ccc298..e7a2c91762 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/function_call.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/function_call.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9FunctionCallParams { pub create_accounts_proof: CreateAccountsProof, pub key_a: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/instruction_data.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/instruction_data.rs index 9984da74ed..6064fee3f5 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/instruction_data.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/instruction_data.rs @@ -18,7 +18,7 @@ use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; // Test 1: Standard params with single Pubkey field // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9SinglePubkeyParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -51,7 +51,7 @@ pub struct D9InstrSinglePubkey<'info> { // Test 2: Params with u64 field requiring to_le_bytes // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9U64Params { pub create_accounts_proof: CreateAccountsProof, pub amount: u64, @@ -84,7 +84,7 @@ pub struct D9InstrU64<'info> { // Test 3: Multiple data fields in seeds // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MultiFieldParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -118,7 +118,7 @@ pub struct D9InstrMultiField<'info> { // Test 4: Mixed params and ctx account in seeds // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MixedCtxParams { pub create_accounts_proof: CreateAccountsProof, pub data_key: Pubkey, @@ -153,7 +153,7 @@ pub struct D9InstrMixedCtx<'info> { // Test 5: Three data fields // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9TripleParams { pub create_accounts_proof: CreateAccountsProof, pub key_a: Pubkey, @@ -188,7 +188,7 @@ pub struct D9InstrTriple<'info> { // Test 6: to_be_bytes conversion // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9BigEndianParams { pub create_accounts_proof: CreateAccountsProof, pub value: u64, @@ -221,7 +221,7 @@ pub struct D9InstrBigEndian<'info> { // Test 7: Multiple u64 fields // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MultiU64Params { pub create_accounts_proof: CreateAccountsProof, pub id: u64, @@ -255,7 +255,7 @@ pub struct D9InstrMultiU64<'info> { // Test 8: Pubkey with as_ref chained // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ChainedAsRefParams { pub create_accounts_proof: CreateAccountsProof, pub key: Pubkey, @@ -291,7 +291,7 @@ pub struct D9InstrChainedAsRef<'info> { /// Local seed constant pub const D9_INSTR_SEED: &[u8] = b"d9_instr_const"; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ConstMixedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, @@ -324,7 +324,7 @@ pub struct D9InstrConstMixed<'info> { // Test 10: Complex mixed - literal + constant + ctx + params // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ComplexMixedParams { pub create_accounts_proof: CreateAccountsProof, pub data_owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/literal.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/literal.rs index 4f4e8dfcc9..2471e81dc1 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/literal.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/literal.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9LiteralParams { pub create_accounts_proof: CreateAccountsProof, } diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/method_chains.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/method_chains.rs index f3a60f0b4e..64823876bd 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/method_chains.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/method_chains.rs @@ -23,7 +23,7 @@ pub const D9_METHOD_BYTES: &[u8] = b"d9_method_bytes"; // Test 1: Constant with .as_ref() // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MethodAsRefParams { pub create_accounts_proof: CreateAccountsProof, } @@ -55,7 +55,7 @@ pub struct D9MethodAsRef<'info> { // Test 2: String constant with .as_bytes() // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MethodAsBytesParams { pub create_accounts_proof: CreateAccountsProof, } @@ -87,7 +87,7 @@ pub struct D9MethodAsBytes<'info> { // Test 3: Qualified path with .as_bytes() // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MethodQualifiedAsBytesParams { pub create_accounts_proof: CreateAccountsProof, } @@ -119,7 +119,7 @@ pub struct D9MethodQualifiedAsBytes<'info> { // Test 4: Param with .to_le_bytes().as_ref() // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MethodToLeBytesParams { pub create_accounts_proof: CreateAccountsProof, pub id: u64, @@ -152,7 +152,7 @@ pub struct D9MethodToLeBytes<'info> { // Test 5: Param with .to_be_bytes().as_ref() // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MethodToBeBytesParams { pub create_accounts_proof: CreateAccountsProof, pub id: u64, @@ -185,7 +185,7 @@ pub struct D9MethodToBeBytes<'info> { // Test 6: Mixed methods in same seeds array // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MethodMixedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/mixed.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/mixed.rs index d8c517aee7..f2a1a6a6cd 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/mixed.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/mixed.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9MixedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/nested_seeds.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/nested_seeds.rs index b31101a3ce..401e17877f 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/nested_seeds.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/nested_seeds.rs @@ -16,14 +16,14 @@ use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; // ============================================================================ /// Inner nested struct -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct InnerNested { pub owner: Pubkey, pub id: u64, } /// Outer nested struct containing inner -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct OuterNested { pub array: [u8; 16], pub nested: InnerNested, @@ -33,7 +33,7 @@ pub struct OuterNested { // Test 1: Simple nested struct access // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9NestedSimpleParams { pub create_accounts_proof: CreateAccountsProof, pub nested: InnerNested, @@ -66,7 +66,7 @@ pub struct D9NestedSimple<'info> { // Test 2: Double nested struct access // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9NestedDoubleParams { pub create_accounts_proof: CreateAccountsProof, pub outer: OuterNested, @@ -99,7 +99,7 @@ pub struct D9NestedDouble<'info> { // Test 3: Nested array field access // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9NestedArrayFieldParams { pub create_accounts_proof: CreateAccountsProof, pub outer: OuterNested, @@ -132,7 +132,7 @@ pub struct D9NestedArrayField<'info> { // Test 4: Array indexing - params.arrays[2].as_slice() // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ArrayIndexParams { pub create_accounts_proof: CreateAccountsProof, /// 2D array: 10 arrays of 16 bytes each @@ -166,7 +166,7 @@ pub struct D9ArrayIndex<'info> { // Test 5: Combined nested struct + bytes conversion // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9NestedBytesParams { pub create_accounts_proof: CreateAccountsProof, pub nested: InnerNested, @@ -199,7 +199,7 @@ pub struct D9NestedBytes<'info> { // Test 6: Multiple nested seeds combined // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9NestedCombinedParams { pub create_accounts_proof: CreateAccountsProof, pub outer: OuterNested, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/param.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/param.rs index 91ebb686e5..afeee6f73b 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/param.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/param.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ParamParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/param_bytes.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/param_bytes.rs index 107d4384bd..fd121a64ff 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/param_bytes.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/param_bytes.rs @@ -8,7 +8,7 @@ use light_sdk_macros::LightAccounts; use crate::state::d1_field_types::single_pubkey::SinglePubkeyRecord; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9ParamBytesParams { pub create_accounts_proof: CreateAccountsProof, pub id: u64, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/qualified_paths.rs b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/qualified_paths.rs index 5e5fb84bb1..95f214b27b 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/qualified_paths.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/instructions/d9_seeds/qualified_paths.rs @@ -22,7 +22,7 @@ pub const D9_QUALIFIED_CRATE: &[u8] = b"d9_qualified_crate"; // Test 1: Bare constant (no path prefix) // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9QualifiedBareParams { pub create_accounts_proof: CreateAccountsProof, } @@ -54,7 +54,7 @@ pub struct D9QualifiedBare<'info> { // Test 2: self:: prefix // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9QualifiedSelfParams { pub create_accounts_proof: CreateAccountsProof, } @@ -86,7 +86,7 @@ pub struct D9QualifiedSelf<'info> { // Test 3: crate:: prefix // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9QualifiedCrateParams { pub create_accounts_proof: CreateAccountsProof, } @@ -118,7 +118,7 @@ pub struct D9QualifiedCrate<'info> { // Test 4: Deep nested path // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9QualifiedDeepParams { pub create_accounts_proof: CreateAccountsProof, } @@ -150,7 +150,7 @@ pub struct D9QualifiedDeep<'info> { // Test 5: Mixed qualified and bare in same seeds array // ============================================================================ -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug)] pub struct D9QualifiedMixedParams { pub create_accounts_proof: CreateAccountsProof, pub owner: Pubkey, diff --git a/sdk-tests/csdk-anchor-full-derived-test/src/lib.rs b/sdk-tests/csdk-anchor-full-derived-test/src/lib.rs index 65446d194d..826aefb20c 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/src/lib.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/src/lib.rs @@ -2,6 +2,7 @@ #![allow(clippy::useless_asref)] // Testing macro handling of .as_ref() patterns use anchor_lang::prelude::*; +use light_instruction_decoder_derive::instruction_decoder; use light_sdk::{derive_light_cpi_signer, derive_light_rent_sponsor_pda}; use light_sdk_macros::light_program; use light_sdk_types::CpiSigner; @@ -83,6 +84,7 @@ pub fn program_rent_sponsor() -> Pubkey { pub const GAME_SESSION_SEED: &str = "game_session"; +#[instruction_decoder] #[light_program] #[program] pub mod csdk_anchor_full_derived_test { @@ -1419,3 +1421,45 @@ pub mod csdk_anchor_full_derived_test { Ok(()) } } + +// ============================================================================= +// Custom Instruction Decoder with Account Names and Params Decoding +// ============================================================================= + +/// Custom instruction decoder enum that provides account names and decoded params. +/// This uses the enhanced `#[derive(InstructionDecoder)]` macro with variant-level attributes +/// that reference Anchor's generated types. +#[derive(light_instruction_decoder_derive::InstructionDecoder)] +#[instruction_decoder( + program_id = "FAMipfVEhN4hjCLpKCvjDXXfzLsoVTqQccXzePz1L1ah", + program_name = "Csdk Anchor Full Derived Test" +)] +pub enum CsdkTestInstruction { + /// Create PDAs and mint in auto mode + #[instruction_decoder( + accounts = instruction_accounts::CreatePdasAndMintAuto, + params = instruction_accounts::FullAutoWithMintParams + )] + CreatePdasAndMintAuto, + + /// Create two mints in a single transaction + #[instruction_decoder( + accounts = instruction_accounts::CreateTwoMints, + params = instruction_accounts::CreateTwoMintsParams + )] + CreateTwoMints, + + /// Create three mints in a single transaction + #[instruction_decoder( + accounts = instruction_accounts::CreateThreeMints, + params = instruction_accounts::CreateThreeMintsParams + )] + CreateThreeMints, + + /// Create mint with metadata + #[instruction_decoder( + accounts = instruction_accounts::CreateMintWithMetadata, + params = instruction_accounts::CreateMintWithMetadataParams + )] + CreateMintWithMetadata, +} diff --git a/sdk-tests/csdk-anchor-full-derived-test/tests/basic_test.rs b/sdk-tests/csdk-anchor-full-derived-test/tests/basic_test.rs index c5a9926726..8f1b1dde98 100644 --- a/sdk-tests/csdk-anchor-full-derived-test/tests/basic_test.rs +++ b/sdk-tests/csdk-anchor-full-derived-test/tests/basic_test.rs @@ -66,11 +66,15 @@ async fn test_create_pdas_and_mint_auto() { } let program_id = csdk_anchor_full_derived_test::ID; - let mut config = ProgramTestConfig::new_v2( + let config = ProgramTestConfig::new_v2( true, Some(vec![("csdk_anchor_full_derived_test", program_id)]), - ); - config = config.with_light_protocol_events(); + ) + .with_decoders(vec![ + Box::new(csdk_anchor_full_derived_test::CsdkTestInstructionDecoder), + Box::new(csdk_anchor_full_derived_test::CsdkAnchorFullDerivedTestInstructionDecoder), + ]) + .with_light_protocol_events(); let mut rpc = LightProgramTest::new(config).await.unwrap(); let payer = rpc.get_payer().insecure_clone(); @@ -515,11 +519,15 @@ async fn test_create_two_mints() { }; let program_id = csdk_anchor_full_derived_test::ID; - let mut config = ProgramTestConfig::new_v2( + let config = ProgramTestConfig::new_v2( true, Some(vec![("csdk_anchor_full_derived_test", program_id)]), - ); - config = config.with_light_protocol_events(); + ) + .with_decoders(vec![ + Box::new(csdk_anchor_full_derived_test::CsdkTestInstructionDecoder), + Box::new(csdk_anchor_full_derived_test::CsdkAnchorFullDerivedTestInstructionDecoder), + ]) + .with_light_protocol_events(); let mut rpc = LightProgramTest::new(config).await.unwrap(); let payer = rpc.get_payer().insecure_clone(); diff --git a/sdk-tests/csdk-anchor-full-derived-test/tests/instruction_decoder_test.rs b/sdk-tests/csdk-anchor-full-derived-test/tests/instruction_decoder_test.rs new file mode 100644 index 0000000000..2d4de9ffc1 --- /dev/null +++ b/sdk-tests/csdk-anchor-full-derived-test/tests/instruction_decoder_test.rs @@ -0,0 +1,496 @@ +//! Tests for the InstructionDecoder derive macro. +//! +//! This test demonstrates how to use the `#[derive(InstructionDecoder)]` macro +//! to generate instruction decoders for Anchor programs. + +use anchor_lang::Discriminator; +use light_instruction_decoder_derive::InstructionDecoder; +use light_program_test::logging::{DecoderRegistry, InstructionDecoder as InstructionDecoderTrait}; +use solana_pubkey::Pubkey; + +/// Example instruction enum for testing the derive macro +#[derive(InstructionDecoder)] +#[instruction_decoder( + program_id = "FAMipfVEhN4hjCLpKCvjDXXfzLsoVTqQccXzePz1L1ah", + program_name = "Test Program" +)] +pub enum TestInstruction { + /// Initialize instruction with no fields + Initialize, + /// Create record with owner + CreateRecord { owner: Pubkey }, + /// Update record with score + UpdateRecord { score: u64 }, + /// Transfer with amount and destination + Transfer { amount: u64, destination: Pubkey }, +} + +#[test] +fn test_instruction_decoder_macro_generates_decoder() { + // The macro should have generated TestInstructionDecoder struct + let decoder = TestInstructionDecoder; + + // Test program ID + let expected_id: Pubkey = "FAMipfVEhN4hjCLpKCvjDXXfzLsoVTqQccXzePz1L1ah" + .parse() + .unwrap(); + assert_eq!(decoder.program_id(), expected_id); + + // Test program name + assert_eq!(decoder.program_name(), "Test Program"); +} + +#[test] +fn test_instruction_decoder_can_be_registered() { + let decoder = TestInstructionDecoder; + + // Create a registry and register our decoder + let mut registry = DecoderRegistry::new(); + registry.register(Box::new(decoder)); + + // Verify the decoder is registered + let program_id: Pubkey = "FAMipfVEhN4hjCLpKCvjDXXfzLsoVTqQccXzePz1L1ah" + .parse() + .unwrap(); + assert!(registry.has_decoder(&program_id)); +} + +#[test] +fn test_instruction_decoder_decodes_instructions() { + use sha2::{Digest, Sha256}; + + let decoder = TestInstructionDecoder; + + // Test decoding an instruction with valid discriminator + // Compute the expected discriminator for "initialize" + let hash = Sha256::digest(b"global:initialize"); + let discriminator: [u8; 8] = hash[..8].try_into().unwrap(); + + let data = discriminator.to_vec(); + // No additional data for Initialize + + let result = decoder.decode(&data, &[]); + assert!(result.is_some()); + let decoded = result.unwrap(); + assert_eq!(decoded.name, "Initialize"); +} + +#[test] +fn test_instruction_decoder_returns_none_for_unknown() { + let decoder = TestInstructionDecoder; + + // Test with invalid discriminator + let data = [0u8; 16]; + let result = decoder.decode(&data, &[]); + assert!(result.is_none()); +} + +#[test] +fn test_instruction_decoder_with_fields() { + use sha2::{Digest, Sha256}; + + let decoder = TestInstructionDecoder; + + // Test decoding CreateRecord instruction + let hash = Sha256::digest(b"global:create_record"); + let discriminator: [u8; 8] = hash[..8].try_into().unwrap(); + + let mut data = discriminator.to_vec(); + // Add dummy owner pubkey data (32 bytes) + data.extend_from_slice(&[1u8; 32]); + + let result = decoder.decode(&data, &[]); + assert!(result.is_some()); + let decoded = result.unwrap(); + assert_eq!(decoded.name, "CreateRecord"); + // Should have fields reported + assert!(!decoded.fields.is_empty()); +} + +// ============================================================================= +// Tests for enhanced InstructionDecoder with accounts and params attributes +// ============================================================================= + +/// Test that CsdkTestInstructionDecoder decodes CreateTwoMints with correct account names +#[test] +fn test_enhanced_decoder_account_names() { + use csdk_anchor_full_derived_test::CsdkTestInstructionDecoder; + + let decoder = CsdkTestInstructionDecoder; + + // Verify program ID and name + let expected_id: Pubkey = "FAMipfVEhN4hjCLpKCvjDXXfzLsoVTqQccXzePz1L1ah" + .parse() + .unwrap(); + assert_eq!(decoder.program_id(), expected_id); + assert_eq!(decoder.program_name(), "Csdk Anchor Full Derived Test"); + + // Use Anchor's generated discriminator for create_two_mints + let discriminator = csdk_anchor_full_derived_test::instruction::CreateTwoMints::DISCRIMINATOR; + + // Build minimal instruction data (discriminator + enough bytes for params) + let mut data = discriminator.to_vec(); + // Add dummy params data - enough to pass the discriminator check + data.extend_from_slice(&[0u8; 200]); + + let result = decoder.decode(&data, &[]); + assert!(result.is_some(), "Decoder should recognize CreateTwoMints"); + + let decoded = result.unwrap(); + assert_eq!(decoded.name, "CreateTwoMints"); + + // Verify account names are populated from the accounts struct + assert!( + !decoded.account_names.is_empty(), + "Account names should not be empty" + ); + + // Check specific account names from CreateTwoMints struct + let expected_account_names = [ + "fee_payer", + "authority", + "mint_signer_a", + "mint_signer_b", + "cmint_a", + "cmint_b", + "compression_config", + "light_token_compressible_config", + "rent_sponsor", + "light_token_program", + "light_token_cpi_authority", + "system_program", + ]; + + assert_eq!( + decoded.account_names.len(), + expected_account_names.len(), + "Should have {} account names, got {}", + expected_account_names.len(), + decoded.account_names.len() + ); + + for (i, expected_name) in expected_account_names.iter().enumerate() { + assert_eq!( + decoded.account_names[i], *expected_name, + "Account name at index {} should be '{}', got '{}'", + i, expected_name, decoded.account_names[i] + ); + } +} + +/// Test that CsdkTestInstructionDecoder decodes params with Debug output +#[test] +fn test_enhanced_decoder_params_decoding() { + use borsh::BorshSerialize; + use csdk_anchor_full_derived_test::{ + instruction_accounts::CreateTwoMintsParams, CsdkTestInstructionDecoder, + }; + use light_compressed_account::instruction_data::compressed_proof::ValidityProof; + use light_compressible::CreateAccountsProof; + use light_sdk_types::instruction::PackedAddressTreeInfo; + + let decoder = CsdkTestInstructionDecoder; + + // Use Anchor's generated discriminator for create_two_mints + let discriminator = csdk_anchor_full_derived_test::instruction::CreateTwoMints::DISCRIMINATOR; + + // Build instruction data with actual serialized params + let params = CreateTwoMintsParams { + create_accounts_proof: CreateAccountsProof { + proof: ValidityProof(None), + address_tree_info: PackedAddressTreeInfo { + address_merkle_tree_pubkey_index: 0, + address_queue_pubkey_index: 0, + root_index: 0, + }, + output_state_tree_index: 0, + state_tree_index: None, + }, + mint_signer_a_bump: 254, + mint_signer_b_bump: 255, + }; + + let mut data = discriminator.to_vec(); + params.serialize(&mut data).unwrap(); + + let result = decoder.decode(&data, &[]); + assert!(result.is_some(), "Decoder should recognize CreateTwoMints"); + + let decoded = result.unwrap(); + assert_eq!(decoded.name, "CreateTwoMints"); + + // Verify params are decoded + assert!( + !decoded.fields.is_empty(), + "Fields should contain decoded params" + ); + + // The params field has empty name for inline display + let params_field = decoded.fields.first(); + assert!(params_field.is_some(), "Should have a params field"); + + let params_value = ¶ms_field.unwrap().value; + assert!( + params_value.contains("mint_signer_a_bump: 254"), + "Params should contain 'mint_signer_a_bump: 254', got: {}", + params_value + ); + assert!( + params_value.contains("mint_signer_b_bump: 255"), + "Params should contain 'mint_signer_b_bump: 255', got: {}", + params_value + ); +} + +/// Test that unknown instructions return None (fallback behavior) +#[test] +fn test_enhanced_decoder_unknown_instruction() { + use csdk_anchor_full_derived_test::CsdkTestInstructionDecoder; + + let decoder = CsdkTestInstructionDecoder; + + // Use an invalid discriminator + let data = [0u8; 16]; + let result = decoder.decode(&data, &[]); + assert!(result.is_none(), "Unknown instruction should return None"); +} + +// ============================================================================= +// Tests for #[instruction_decoder] attribute macro +// ============================================================================= + +/// Test that CsdkAnchorFullDerivedTestInstructionDecoder (from attribute macro) works +/// This decoder is auto-generated by the #[instruction_decoder] attribute on the program module. +#[test] +fn test_attribute_macro_decoder() { + use csdk_anchor_full_derived_test::CsdkAnchorFullDerivedTestInstructionDecoder; + + let decoder = CsdkAnchorFullDerivedTestInstructionDecoder; + + // Verify program ID uses crate::ID + let expected_id: Pubkey = "FAMipfVEhN4hjCLpKCvjDXXfzLsoVTqQccXzePz1L1ah" + .parse() + .unwrap(); + assert_eq!(decoder.program_id(), expected_id); + + // Test decoding create_two_mints instruction using Anchor's generated discriminator + let discriminator = csdk_anchor_full_derived_test::instruction::CreateTwoMints::DISCRIMINATOR; + + let mut data = discriminator.to_vec(); + data.extend_from_slice(&[0u8; 100]); // dummy data + + let result = decoder.decode(&data, &[]); + assert!(result.is_some()); + + let decoded = result.unwrap(); + assert_eq!(decoded.name, "CreateTwoMints"); + + // Verify account names are populated + assert!( + !decoded.account_names.is_empty(), + "Should have account names" + ); + assert!(decoded.account_names.contains(&"fee_payer".to_string())); +} + +/// Test that attribute macro decoder has account names for all instructions +#[test] +fn test_attribute_macro_decoder_account_names() { + use csdk_anchor_full_derived_test::CsdkAnchorFullDerivedTestInstructionDecoder; + + let decoder = CsdkAnchorFullDerivedTestInstructionDecoder; + + // Test create_pdas_and_mint_auto using Anchor's generated discriminator + let discriminator = + csdk_anchor_full_derived_test::instruction::CreatePdasAndMintAuto::DISCRIMINATOR; + let mut data = discriminator.to_vec(); + data.extend_from_slice(&[0u8; 100]); + + let result = decoder.decode(&data, &[]); + assert!(result.is_some(), "Should decode create_pdas_and_mint_auto"); + let decoded = result.unwrap(); + + // Verify specific account names from CreatePdasAndMintAuto struct + let expected_accounts = [ + "fee_payer", + "authority", + "mint_authority", + "mint_signer", + "user_record", + "game_session", + "mint", + "vault", + "vault_authority", + "user_ata", + "compression_config", + "light_token_compressible_config", + "rent_sponsor", + "light_token_program", + "light_token_cpi_authority", + "system_program", + ]; + + assert_eq!( + decoded.account_names.len(), + expected_accounts.len(), + "create_pdas_and_mint_auto should have {} accounts, got {}", + expected_accounts.len(), + decoded.account_names.len() + ); + + for (i, expected) in expected_accounts.iter().enumerate() { + assert_eq!( + decoded.account_names[i], *expected, + "Account at index {} should be '{}', got '{}'", + i, expected, decoded.account_names[i] + ); + } +} + +/// Test that attribute macro decoder handles initialize_pool (AMM test) +#[test] +fn test_attribute_macro_decoder_initialize_pool() { + use csdk_anchor_full_derived_test::CsdkAnchorFullDerivedTestInstructionDecoder; + + let decoder = CsdkAnchorFullDerivedTestInstructionDecoder; + + // Use Anchor's generated discriminator for initialize_pool + let discriminator = csdk_anchor_full_derived_test::instruction::InitializePool::DISCRIMINATOR; + let mut data = discriminator.to_vec(); + data.extend_from_slice(&[0u8; 100]); + + let result = decoder.decode(&data, &[]); + assert!(result.is_some(), "Should decode initialize_pool"); + let decoded = result.unwrap(); + + assert_eq!(decoded.name, "InitializePool"); + assert!( + !decoded.account_names.is_empty(), + "InitializePool should have account names" + ); + + // Check for specific AMM accounts + assert!( + decoded.account_names.contains(&"creator".to_string()), + "Should have 'creator' account" + ); + assert!( + decoded.account_names.contains(&"pool_state".to_string()), + "Should have 'pool_state' account" + ); + assert!( + decoded.account_names.contains(&"token_0_vault".to_string()), + "Should have 'token_0_vault' account" + ); +} + +/// Test attribute macro decoder with actual serialized instruction data +#[test] +fn test_attribute_macro_decoder_with_instruction_data() { + use borsh::BorshSerialize; + use csdk_anchor_full_derived_test::{ + instruction_accounts::CreateTwoMintsParams, CsdkAnchorFullDerivedTestInstructionDecoder, + }; + use light_compressed_account::instruction_data::compressed_proof::ValidityProof; + use light_compressible::CreateAccountsProof; + use light_program_test::logging::InstructionDecoder; + use light_sdk_types::instruction::PackedAddressTreeInfo; + + let decoder = CsdkAnchorFullDerivedTestInstructionDecoder; + + // Use Anchor's generated discriminator for create_two_mints + let discriminator = csdk_anchor_full_derived_test::instruction::CreateTwoMints::DISCRIMINATOR; + + // Build instruction data with actual serialized params + let params = CreateTwoMintsParams { + create_accounts_proof: CreateAccountsProof { + proof: ValidityProof(None), + address_tree_info: PackedAddressTreeInfo { + address_merkle_tree_pubkey_index: 0, + address_queue_pubkey_index: 0, + root_index: 0, + }, + output_state_tree_index: 0, + state_tree_index: None, + }, + mint_signer_a_bump: 254, + mint_signer_b_bump: 255, + }; + + let mut data = discriminator.to_vec(); + params.serialize(&mut data).unwrap(); + + println!("Instruction data length: {} bytes", data.len()); + println!("Discriminator: {:?}", &data[0..8]); + + let result = decoder.decode(&data, &[]); + assert!(result.is_some(), "Should decode create_two_mints"); + + let decoded = result.unwrap(); + println!("Decoded instruction: {}", decoded.name); + println!("Account names: {:?}", decoded.account_names); + println!("Fields: {:?}", decoded.fields); + + assert_eq!(decoded.name, "CreateTwoMints"); + + // Verify account names are correct + assert_eq!(decoded.account_names.len(), 12); + assert_eq!(decoded.account_names[0], "fee_payer"); + assert_eq!(decoded.account_names[1], "authority"); + + // The attribute macro decodes params - requires Debug impl (compile error if missing) + assert_eq!(decoded.fields.len(), 1, "Should have 1 field (params)"); + // Field name is empty for inline params display + assert_eq!(decoded.fields[0].name, ""); + + // Verify params contain expected values + let params_value = &decoded.fields[0].value; + assert!( + params_value.contains("mint_signer_a_bump: 254"), + "Params should contain 'mint_signer_a_bump: 254', got: {}", + params_value + ); + assert!( + params_value.contains("mint_signer_b_bump: 255"), + "Params should contain 'mint_signer_b_bump: 255', got: {}", + params_value + ); +} + +/// Test that InstructionDecoder discriminators match Anchor's DISCRIMINATOR constants. +/// This validates consistency between the InstructionDecoder macro and Anchor's instruction generation. +#[test] +fn test_discriminators_match_anchor_constants() { + use sha2::{Digest, Sha256}; + + // Verify the sha256 computation matches Anchor's DISCRIMINATOR for each instruction + let instructions: &[(&str, &[u8])] = &[ + ( + "create_two_mints", + csdk_anchor_full_derived_test::instruction::CreateTwoMints::DISCRIMINATOR, + ), + ( + "create_three_mints", + csdk_anchor_full_derived_test::instruction::CreateThreeMints::DISCRIMINATOR, + ), + ( + "create_pdas_and_mint_auto", + csdk_anchor_full_derived_test::instruction::CreatePdasAndMintAuto::DISCRIMINATOR, + ), + ( + "initialize_pool", + csdk_anchor_full_derived_test::instruction::InitializePool::DISCRIMINATOR, + ), + ]; + + for (name, anchor_discriminator) in instructions { + let hash = Sha256::digest(format!("global:{}", name).as_bytes()); + let computed = &hash[..8]; + + assert_eq!( + computed, *anchor_discriminator, + "Discriminator mismatch for '{}': computed {:?} != anchor {:?}", + name, computed, anchor_discriminator + ); + } +} diff --git a/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/Cargo.toml b/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/Cargo.toml index 5676da6da1..cffe36d40a 100644 --- a/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/Cargo.toml +++ b/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/Cargo.toml @@ -26,8 +26,8 @@ light-hasher = { workspace = true, features = ["solana", "poseidon", "sha256", " anchor-lang = { workspace = true } light-sdk = { workspace = true, features = ["anchor", "v2", "poseidon"] } light-sdk-types = { workspace = true } +light-instruction-decoder = { workspace = true } serial_test = { workspace = true } -solana-pubkey = { workspace = true } [target.'cfg(not(target_os = "solana"))'.dependencies] solana-sdk = { workspace = true } diff --git a/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs b/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs index db55f7f9da..52ffc64eba 100644 --- a/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs +++ b/sdk-tests/sdk-anchor-test/programs/sdk-anchor-test/src/lib.rs @@ -4,6 +4,7 @@ mod read_only; use anchor_lang::{prelude::*, Discriminator}; +use light_instruction_decoder::instruction_decoder; use light_sdk::{ // anchor test test poseidon LightAccount, native tests sha256 LightAccount account::LightAccount, @@ -26,6 +27,7 @@ declare_id!("2tzfijPBGbrR5PboyFUFKzfEoLTwdDSHUjANCw929wyt"); pub const LIGHT_CPI_SIGNER: CpiSigner = derive_light_cpi_signer!("2tzfijPBGbrR5PboyFUFKzfEoLTwdDSHUjANCw929wyt"); +#[instruction_decoder] #[program] pub mod sdk_anchor_test { diff --git a/sdk-tests/sdk-token-test/Cargo.toml b/sdk-tests/sdk-token-test/Cargo.toml index 64c79b2562..bd2c1c25e3 100644 --- a/sdk-tests/sdk-token-test/Cargo.toml +++ b/sdk-tests/sdk-token-test/Cargo.toml @@ -45,6 +45,9 @@ light-batched-merkle-tree = { workspace = true } light-token-interface = { workspace = true, features = ["anchor"] } light-zero-copy = { workspace = true } light-program-profiler = { workspace = true } +light-instruction-decoder = { workspace = true } +light-instruction-decoder-derive = { workspace = true } +solana-instruction = { workspace = true } [dev-dependencies] light-program-test = { workspace = true, features = ["devenv"] } diff --git a/sdk-tests/sdk-token-test/src/lib.rs b/sdk-tests/sdk-token-test/src/lib.rs index 9124c500e6..ca28952afb 100644 --- a/sdk-tests/sdk-token-test/src/lib.rs +++ b/sdk-tests/sdk-token-test/src/lib.rs @@ -3,6 +3,7 @@ #![allow(deprecated)] use anchor_lang::prelude::*; +use light_instruction_decoder::instruction_decoder; use light_sdk::{ instruction::{PackedAddressTreeInfo, ValidityProof as LightValidityProof}, PackedAddressTreeInfoExt, @@ -54,7 +55,7 @@ use light_sdk::{cpi::CpiSigner, derive_light_cpi_signer}; pub const LIGHT_CPI_SIGNER: CpiSigner = derive_light_cpi_signer!("5p1t1GAaKtK1FKCh5Hd2Gu8JCu3eREhJm4Q2qYfTEPYK"); -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Clone, AnchorSerialize, AnchorDeserialize, Debug)] pub struct TokenParams { pub deposit_amount: u64, pub depositing_token_metas: Vec, @@ -63,7 +64,7 @@ pub struct TokenParams { pub recipient_bump: u8, } -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Clone, AnchorSerialize, AnchorDeserialize, Debug)] pub struct PdaParams { pub account_meta: CompressedAccountMeta, pub existing_amount: u64, @@ -75,6 +76,8 @@ use crate::{ process_create_compressed_account::deposit_tokens, process_four_transfer2::FourTransfer2Params, process_update_deposit::process_update_deposit, }; + +#[instruction_decoder] #[program] pub mod sdk_token_test { diff --git a/sdk-tests/sdk-token-test/src/mint_compressed_tokens_cpi_write.rs b/sdk-tests/sdk-token-test/src/mint_compressed_tokens_cpi_write.rs index 968061a95b..6063f536f4 100644 --- a/sdk-tests/sdk-token-test/src/mint_compressed_tokens_cpi_write.rs +++ b/sdk-tests/sdk-token-test/src/mint_compressed_tokens_cpi_write.rs @@ -9,7 +9,7 @@ use light_token_interface::instructions::mint_action::{ use crate::Generic; -#[derive(AnchorSerialize, AnchorDeserialize, Clone)] +#[derive(Debug, AnchorSerialize, AnchorDeserialize, Clone)] pub struct MintCompressedTokensCpiWriteParams { pub compressed_mint_with_context: MintWithContext, pub recipients: Vec, diff --git a/sdk-tests/sdk-token-test/src/process_create_two_mints.rs b/sdk-tests/sdk-token-test/src/process_create_two_mints.rs index 304974a224..34c5f3b152 100644 --- a/sdk-tests/sdk-token-test/src/process_create_two_mints.rs +++ b/sdk-tests/sdk-token-test/src/process_create_two_mints.rs @@ -6,7 +6,7 @@ use light_token::{ /// Parameters for a single mint within a batch creation. /// Does not include proof since proof is shared across all mints. -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Clone, AnchorSerialize, AnchorDeserialize, Debug)] pub struct MintParams { pub decimals: u8, pub address_merkle_tree_root_index: u16, @@ -26,7 +26,7 @@ pub struct MintParams { /// Flow: /// - N=1: Single CPI (create + decompress) /// - N>1: 2N-1 CPIs (N-1 writes + 1 execute with decompress + N-1 decompress) -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Clone, AnchorSerialize, AnchorDeserialize, Debug)] pub struct CreateMintsParams { /// Parameters for each mint to create pub mints: Vec, diff --git a/sdk-tests/sdk-token-test/src/process_four_invokes.rs b/sdk-tests/sdk-token-test/src/process_four_invokes.rs index 0b4204ba76..7098976100 100644 --- a/sdk-tests/sdk-token-test/src/process_four_invokes.rs +++ b/sdk-tests/sdk-token-test/src/process_four_invokes.rs @@ -12,7 +12,7 @@ use light_token::compressed_token::{ use crate::{process_update_deposit::process_update_escrow_pda, PdaParams}; -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)] pub struct TransferParams { pub mint: Pubkey, pub transfer_amount: u64, @@ -21,7 +21,7 @@ pub struct TransferParams { pub recipient_bump: u8, } -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)] pub struct CompressParams { pub mint: Pubkey, pub amount: u64, @@ -30,7 +30,7 @@ pub struct CompressParams { pub token_account: Pubkey, } -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)] pub struct FourInvokesParams { pub compress_1: CompressParams, pub transfer_2: TransferParams, diff --git a/sdk-tests/sdk-token-test/src/process_four_transfer2.rs b/sdk-tests/sdk-token-test/src/process_four_transfer2.rs index 29dd3cd58a..8fc5431eb7 100644 --- a/sdk-tests/sdk-token-test/src/process_four_transfer2.rs +++ b/sdk-tests/sdk-token-test/src/process_four_transfer2.rs @@ -19,14 +19,14 @@ use light_token_interface::instructions::transfer2::MultiInputTokenDataWithConte use crate::{process_update_deposit::CompressedEscrowPda, PdaParams, LIGHT_CPI_SIGNER}; -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)] pub struct TransferParams { pub transfer_amount: u64, pub token_metas: Vec, pub recipient: u8, } -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)] pub struct CompressParams { pub mint: u8, pub amount: u64, @@ -35,7 +35,7 @@ pub struct CompressParams { pub authority: u8, } -#[derive(Clone, AnchorSerialize, AnchorDeserialize)] +#[derive(Debug, Clone, AnchorSerialize, AnchorDeserialize)] pub struct FourTransfer2Params { pub compress_1: CompressParams, pub transfer_2: TransferParams, diff --git a/sdk-tests/sdk-token-test/tests/test.rs b/sdk-tests/sdk-token-test/tests/test.rs index 60614ae176..3e85bdd929 100644 --- a/sdk-tests/sdk-token-test/tests/test.rs +++ b/sdk-tests/sdk-token-test/tests/test.rs @@ -455,10 +455,12 @@ async fn decompress_compressed_tokens( #[tokio::test] async fn test_batch_compress() { // Initialize the test environment - let mut rpc = LightProgramTest::new(ProgramTestConfig::new_v2( - false, - Some(vec![("sdk_token_test", sdk_token_test::ID)]), - )) + let mut rpc = LightProgramTest::new( + ProgramTestConfig::new_v2(false, Some(vec![("sdk_token_test", sdk_token_test::ID)])) + .with_decoders(vec![Box::new( + sdk_token_test::SdkTokenTestInstructionDecoder, + )]), + ) .await .unwrap(); diff --git a/sdk-tests/sdk-token-test/tests/test_4_transfer2.rs b/sdk-tests/sdk-token-test/tests/test_4_transfer2.rs index 7bef0f5749..98bf56287c 100644 --- a/sdk-tests/sdk-token-test/tests/test_4_transfer2.rs +++ b/sdk-tests/sdk-token-test/tests/test_4_transfer2.rs @@ -30,10 +30,12 @@ use solana_sdk::{ #[tokio::test] async fn test_4_transfer2() { // Initialize the test environment - let mut rpc = LightProgramTest::new(ProgramTestConfig::new_v2( - false, - Some(vec![("sdk_token_test", sdk_token_test::ID)]), - )) + let mut rpc = LightProgramTest::new( + ProgramTestConfig::new_v2(false, Some(vec![("sdk_token_test", sdk_token_test::ID)])) + .with_decoders(vec![Box::new( + sdk_token_test::SdkTokenTestInstructionDecoder, + )]), + ) .await .unwrap(); diff --git a/sdk-tests/sdk-token-test/tests/test_create_two_mints.rs b/sdk-tests/sdk-token-test/tests/test_create_two_mints.rs index 8a7f23a014..e554689a60 100644 --- a/sdk-tests/sdk-token-test/tests/test_create_two_mints.rs +++ b/sdk-tests/sdk-token-test/tests/test_create_two_mints.rs @@ -26,10 +26,12 @@ async fn test_create_three_mints() { } async fn test_create_mints(n: usize) { - let mut rpc = LightProgramTest::new(ProgramTestConfig::new_v2( - false, - Some(vec![("sdk_token_test", sdk_token_test::ID)]), - )) + let mut rpc = LightProgramTest::new( + ProgramTestConfig::new_v2(false, Some(vec![("sdk_token_test", sdk_token_test::ID)])) + .with_decoders(vec![Box::new( + sdk_token_test::SdkTokenTestInstructionDecoder, + )]), + ) .await .unwrap(); diff --git a/sdk-tests/sdk-token-test/tests/test_instruction_decoder.rs b/sdk-tests/sdk-token-test/tests/test_instruction_decoder.rs new file mode 100644 index 0000000000..6602a05a4e --- /dev/null +++ b/sdk-tests/sdk-token-test/tests/test_instruction_decoder.rs @@ -0,0 +1,132 @@ +//! Tests for the InstructionDecoder generated by the #[instruction_decoder] attribute macro. + +use anchor_lang::InstructionData; +use light_instruction_decoder::InstructionDecoder; +use sdk_token_test::SdkTokenTestInstructionDecoder; +use solana_sdk::pubkey::Pubkey; + +#[test] +fn test_decode_batch_compress_tokens() { + let decoder = SdkTokenTestInstructionDecoder; + let instruction_data = sdk_token_test::instruction::BatchCompressTokens { + recipients: vec![], + token_pool_index: 0, + token_pool_bump: 255, + } + .data(); + + let decoded = decoder + .decode(&instruction_data, &[]) + .expect("Failed to decode BatchCompressTokens"); + + assert_eq!( + decoded.name, "BatchCompressTokens", + "Instruction name should match" + ); +} + +#[test] +fn test_decode_compress_tokens() { + let decoder = SdkTokenTestInstructionDecoder; + let instruction_data = sdk_token_test::instruction::CompressTokens { + output_tree_index: 0, + recipient: Pubkey::default(), + mint: Pubkey::default(), + amount: 1000, + } + .data(); + + let decoded = decoder + .decode(&instruction_data, &[]) + .expect("Failed to decode CompressTokens"); + + assert_eq!( + decoded.name, "CompressTokens", + "Instruction name should match" + ); +} + +#[test] +fn test_decode_transfer_tokens() { + let decoder = SdkTokenTestInstructionDecoder; + let instruction_data = sdk_token_test::instruction::TransferTokens { + validity_proof: light_token::ValidityProof::default(), + token_metas: vec![], + output_tree_index: 0, + mint: Pubkey::default(), + recipient: Pubkey::default(), + } + .data(); + + let decoded = decoder + .decode(&instruction_data, &[]) + .expect("Failed to decode TransferTokens"); + + assert_eq!( + decoded.name, "TransferTokens", + "Instruction name should match" + ); +} + +#[test] +fn test_decode_decompress_tokens() { + let decoder = SdkTokenTestInstructionDecoder; + let instruction_data = sdk_token_test::instruction::DecompressTokens { + validity_proof: light_token::ValidityProof::default(), + token_data: vec![], + output_tree_index: 0, + mint: Pubkey::default(), + } + .data(); + + let decoded = decoder + .decode(&instruction_data, &[]) + .expect("Failed to decode DecompressTokens"); + + assert_eq!( + decoded.name, "DecompressTokens", + "Instruction name should match" + ); +} + +#[test] +fn test_decoder_program_id() { + let decoder = SdkTokenTestInstructionDecoder; + assert_eq!( + decoder.program_id(), + sdk_token_test::ID, + "Program ID should match declared ID" + ); +} + +#[test] +fn test_decoder_program_name() { + let decoder = SdkTokenTestInstructionDecoder; + assert_eq!( + decoder.program_name(), + "Sdk Token Test", + "Program name should match module name in title case" + ); +} + +#[test] +fn test_decode_unknown_discriminator_returns_none() { + let decoder = SdkTokenTestInstructionDecoder; + let invalid_data = [0u8; 8]; + + assert!( + decoder.decode(&invalid_data, &[]).is_none(), + "Unknown discriminator should return None" + ); +} + +#[test] +fn test_decode_insufficient_data_returns_none() { + let decoder = SdkTokenTestInstructionDecoder; + let short_data = [0u8; 4]; + + assert!( + decoder.decode(&short_data, &[]).is_none(), + "Data shorter than 8 bytes should return None" + ); +}