+ );
+}
+```
+
+## Production considerations
+
+### Private key management
+
+The examples above hardcode private keys for clarity. In production:
+
+- **Never** ship private keys in frontend code
+- Use a backend service to sign state transitions, or
+- Prompt the user for their mnemonic/key at runtime and keep it in memory only
+- Consider the `wallet` namespace for key derivation from user-provided mnemonics
+
+```tsx
+import { wallet } from '@dashevo/evo-sdk';
+
+async function signWithUserMnemonic(mnemonic: string) {
+ const keyInfo = await wallet.deriveKeyFromSeedPhrase({
+ mnemonic,
+ network: 'testnet',
+ derivationPath: "m/9'/1'/0'/0/0",
+ });
+ return keyInfo.privateKeyWif;
+}
+```
+
+### Bundle size
+
+The WASM module adds ~2-4 MB (gzipped) to your bundle. To optimise:
+
+- Use **code splitting** — the SDK module only loads when `connect()` is called
+- Vite handles WASM lazy loading automatically
+- Consider loading the SDK only on pages that need it
+
+### Error boundaries
+
+Wrap SDK-dependent components in an error boundary to handle WASM
+initialization failures gracefully:
+
+```tsx
+import { ErrorBoundary } from 'react-error-boundary';
+
+Failed to load Dash SDK
}>
+
+
+
+
+```
+
+### Network switching
+
+To let users switch networks at runtime, key the provider on the network value:
+
+```tsx
+const [network, setNetwork] = useState<'testnet' | 'mainnet'>('testnet');
+
+
+
+
+```
+
+The `key` prop forces React to unmount and remount the provider, creating a
+fresh SDK connection for the new network.
diff --git a/book/src/evo-sdk/wallet-utilities.md b/book/src/evo-sdk/wallet-utilities.md
new file mode 100644
index 00000000000..b30ef759563
--- /dev/null
+++ b/book/src/evo-sdk/wallet-utilities.md
@@ -0,0 +1,124 @@
+# Wallet Utilities
+
+The Evo SDK exports a standalone `wallet` namespace with offline cryptographic
+utilities. These functions do **not** require a connected SDK instance — they
+initialise the WASM module on first call and work independently.
+
+```typescript
+import { wallet } from '@dashevo/evo-sdk';
+```
+
+## Mnemonic management
+
+```typescript
+// Generate a new 12-word mnemonic
+const mnemonic = await wallet.generateMnemonic();
+// "abandon ability able about above absent ..."
+
+// Validate an existing mnemonic
+const valid = await wallet.validateMnemonic(mnemonic);
+
+// Convert to seed bytes (with optional passphrase)
+const seed = await wallet.mnemonicToSeed(mnemonic, 'optional-passphrase');
+```
+
+## Key derivation
+
+### From seed phrase
+
+```typescript
+const keyInfo = await wallet.deriveKeyFromSeedPhrase({
+ mnemonic,
+ network: 'testnet',
+ derivationPath: "m/44'/1'/0'/0/0",
+});
+// keyInfo.privateKeyWif, keyInfo.publicKeyHex, keyInfo.address
+```
+
+### From seed with path
+
+```typescript
+const seed = await wallet.mnemonicToSeed(mnemonic);
+const key = await wallet.deriveKeyFromSeedWithPath({
+ seed,
+ network: 'testnet',
+ path: "m/44'/1'/0'/0/0",
+});
+```
+
+### Standard derivation paths
+
+The SDK provides helpers for Dash-specific derivation paths:
+
+```typescript
+// BIP-44 paths
+const bip44 = await wallet.derivationPathBip44Testnet(0, 0, 0);
+// "m/44'/1'/0'/0/0"
+
+// DIP-9 Platform paths (identity authentication keys)
+const dip9 = await wallet.derivationPathDip9Testnet(0, 0, 0);
+
+// DIP-13 DashPay paths (contact encryption keys)
+const dip13 = await wallet.derivationPathDip13Testnet(0);
+```
+
+### Extended public key operations
+
+```typescript
+// Convert xprv to xpub
+const xpub = await wallet.xprvToXpub(xprv);
+
+// Derive child public key
+const childPub = await wallet.deriveChildPublicKey(xpub, 0, false);
+```
+
+## Key pair generation
+
+```typescript
+// Generate a random key pair
+const keyPair = await wallet.generateKeyPair('testnet');
+// keyPair.privateKeyWif, keyPair.publicKeyHex, keyPair.address
+
+// Generate multiple key pairs
+const pairs = await wallet.generateKeyPairs('testnet', 5);
+
+// Import from WIF
+const imported = await wallet.keyPairFromWif('cPrivateKeyWif...');
+
+// Import from hex
+const fromHex = await wallet.keyPairFromHex('abcd1234...', 'testnet');
+```
+
+## Address utilities
+
+```typescript
+// Derive address from public key
+const address = await wallet.pubkeyToAddress(pubkeyHex, 'testnet');
+
+// Validate an address for a network
+const ok = await wallet.validateAddress('yWhatever...', 'testnet');
+```
+
+## Message signing
+
+```typescript
+const signature = await wallet.signMessage(
+ 'Hello Dash Platform',
+ privateKeyWif,
+);
+```
+
+## DashPay contact keys
+
+For DashPay encrypted messaging, derive contact-specific keys:
+
+```typescript
+const contactKey = await wallet.deriveDashpayContactKey({
+ mnemonic,
+ network: 'testnet',
+ senderIdentityId: '...',
+ recipientIdentityId: '...',
+ account: 0,
+ index: 0,
+});
+```
From 5381e7893c6a608c9ddd75fe3225eb5fff6d5dfc Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Thu, 2 Apr 2026 19:23:28 +0300
Subject: [PATCH 02/40] feat(swift-sdk): add ZK sync, local Docker support, and
account management (part 1) (#3393)
Co-authored-by: Claude Opus 4.6 (1M context)
---
Cargo.lock | 20 +-
Cargo.toml | 14 +-
packages/rs-sdk-ffi/src/sdk.rs | 54 ++-
.../Core/Models/HDWalletModels.swift | 3 +
.../SwiftDashSDK/Core/SPV/SPVClient.swift | 32 +-
.../Core/Services/WalletService.swift | 62 ++-
.../Core/Wallet/CoreWalletManager.swift | 219 ++++++++++-
.../KeyWallet/KeyWalletTypes.swift | 3 +
.../SwiftDashSDK/KeyWallet/Wallet.swift | 2 +-
.../KeyWallet/WalletManager.swift | 2 +-
.../swift-sdk/Sources/SwiftDashSDK/SDK.swift | 20 +-
.../SwiftExampleApp/AppState.swift | 51 +--
.../SwiftExampleApp/ContentView.swift | 13 +-
.../Core/Models/DashAddress.swift | 13 +-
.../Core/Services/ZKSyncService.swift | 164 ++++++++
.../Core/ViewModels/SendViewModel.swift | 125 ++++++-
.../Core/Views/AccountListView.swift | 30 +-
.../Core/Views/AddAccountView.swift | 319 ++++++++++++++++
.../Core/Views/CoreContentView.swift | 352 +++++++++++++-----
.../Core/Views/CreateWalletView.swift | 21 +-
.../Core/Views/ReceiveAddressView.swift | 98 +++++
.../Core/Views/SendTransactionView.swift | 100 ++++-
.../Core/Views/WalletsContentView.swift | 1 +
.../SwiftExampleApp/SwiftExampleAppApp.swift | 1 +
.../SwiftExampleApp/UnifiedAppState.swift | 51 +++
.../SwiftExampleApp/Views/OptionsView.swift | 38 +-
26 files changed, 1581 insertions(+), 227 deletions(-)
create mode 100644 packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Services/ZKSyncService.swift
create mode 100644 packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/AddAccountView.swift
diff --git a/Cargo.lock b/Cargo.lock
index 560ff6e46b0..92fea276fcf 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1607,7 +1607,7 @@ dependencies = [
[[package]]
name = "dash-spv"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
dependencies = [
"anyhow",
"async-trait",
@@ -1640,7 +1640,7 @@ dependencies = [
[[package]]
name = "dash-spv-ffi"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
dependencies = [
"cbindgen 0.29.2",
"clap",
@@ -1665,7 +1665,7 @@ dependencies = [
[[package]]
name = "dashcore"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
dependencies = [
"anyhow",
"base64-compat",
@@ -1690,12 +1690,12 @@ dependencies = [
[[package]]
name = "dashcore-private"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
[[package]]
name = "dashcore-rpc"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
dependencies = [
"dashcore-rpc-json",
"hex",
@@ -1708,7 +1708,7 @@ dependencies = [
[[package]]
name = "dashcore-rpc-json"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
dependencies = [
"bincode",
"dashcore",
@@ -1723,7 +1723,7 @@ dependencies = [
[[package]]
name = "dashcore_hashes"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
dependencies = [
"bincode",
"dashcore-private",
@@ -3829,7 +3829,7 @@ dependencies = [
[[package]]
name = "key-wallet"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
dependencies = [
"aes",
"async-trait",
@@ -3857,7 +3857,7 @@ dependencies = [
[[package]]
name = "key-wallet-ffi"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
dependencies = [
"cbindgen 0.29.2",
"dashcore",
@@ -3872,7 +3872,7 @@ dependencies = [
[[package]]
name = "key-wallet-manager"
version = "0.42.0"
-source = "git+https://github.com/dashpay/rust-dashcore?rev=5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a#5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a"
+source = "git+https://github.com/dashpay/rust-dashcore?rev=6638745c27119778d4c78959003955f00bad373c#6638745c27119778d4c78959003955f00bad373c"
dependencies = [
"async-trait",
"bincode",
diff --git a/Cargo.toml b/Cargo.toml
index 2a27563d527..80a21c2046e 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -47,13 +47,13 @@ members = [
]
[workspace.dependencies]
-dashcore = { git = "https://github.com/dashpay/rust-dashcore", rev = "5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a" }
-dash-spv = { git = "https://github.com/dashpay/rust-dashcore", rev = "5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a" }
-dash-spv-ffi = { git = "https://github.com/dashpay/rust-dashcore", rev = "5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a" }
-key-wallet = { git = "https://github.com/dashpay/rust-dashcore", rev = "5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a" }
-key-wallet-ffi = { git = "https://github.com/dashpay/rust-dashcore", rev = "5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a" }
-key-wallet-manager = { git = "https://github.com/dashpay/rust-dashcore", rev = "5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a" }
-dashcore-rpc = { git = "https://github.com/dashpay/rust-dashcore", rev = "5db46b4d2bdc50b0fbc8d9acbebe72775bb4132a" }
+dashcore = { git = "https://github.com/dashpay/rust-dashcore", rev = "6638745c27119778d4c78959003955f00bad373c" }
+dash-spv = { git = "https://github.com/dashpay/rust-dashcore", rev = "6638745c27119778d4c78959003955f00bad373c" }
+dash-spv-ffi = { git = "https://github.com/dashpay/rust-dashcore", rev = "6638745c27119778d4c78959003955f00bad373c" }
+key-wallet = { git = "https://github.com/dashpay/rust-dashcore", rev = "6638745c27119778d4c78959003955f00bad373c" }
+key-wallet-ffi = { git = "https://github.com/dashpay/rust-dashcore", rev = "6638745c27119778d4c78959003955f00bad373c" }
+key-wallet-manager = { git = "https://github.com/dashpay/rust-dashcore", rev = "6638745c27119778d4c78959003955f00bad373c" }
+dashcore-rpc = { git = "https://github.com/dashpay/rust-dashcore", rev = "6638745c27119778d4c78959003955f00bad373c" }
# Optimize heavy crypto crates even in dev/test builds so that
# Halo 2 proof generation and verification run at near-release speed.
diff --git a/packages/rs-sdk-ffi/src/sdk.rs b/packages/rs-sdk-ffi/src/sdk.rs
index 4d60eeb0ec6..bfea1fe335c 100644
--- a/packages/rs-sdk-ffi/src/sdk.rs
+++ b/packages/rs-sdk-ffi/src/sdk.rs
@@ -331,21 +331,47 @@ pub unsafe extern "C" fn dash_sdk_create_trusted(config: *const DashSDKConfig) -
);
// Create trusted context provider
- let trusted_provider = match rs_sdk_trusted_context_provider::TrustedHttpContextProvider::new(
- network,
- None, // Use default quorum lookup endpoints
- std::num::NonZeroUsize::new(100).unwrap(), // Cache size
- ) {
- Ok(provider) => {
- info!("dash_sdk_create_trusted: trusted context provider created");
- Arc::new(provider)
+ // For local/regtest, use the quorum sidecar at localhost:22444 (dashmate Docker default)
+ let is_local = matches!(
+ config.network,
+ DashSDKNetwork::SDKLocal | DashSDKNetwork::SDKRegtest
+ );
+ let trusted_provider = if is_local {
+ info!("dash_sdk_create_trusted: using local quorum sidecar for regtest");
+ match rs_sdk_trusted_context_provider::TrustedHttpContextProvider::new_with_url(
+ network,
+ "http://127.0.0.1:22444".to_string(),
+ std::num::NonZeroUsize::new(100).unwrap(),
+ ) {
+ Ok(provider) => {
+ info!("dash_sdk_create_trusted: local trusted context provider created");
+ Arc::new(provider)
+ }
+ Err(e) => {
+ error!(error = %e, "dash_sdk_create_trusted: failed to create local context provider");
+ return DashSDKResult::error(DashSDKError::new(
+ DashSDKErrorCode::InternalError,
+ format!("Failed to create local context provider: {}", e),
+ ));
+ }
}
- Err(e) => {
- error!(error = %e, "dash_sdk_create_trusted: failed to create trusted context provider");
- return DashSDKResult::error(DashSDKError::new(
- DashSDKErrorCode::InternalError,
- format!("Failed to create trusted context provider: {}", e),
- ));
+ } else {
+ match rs_sdk_trusted_context_provider::TrustedHttpContextProvider::new(
+ network,
+ None, // Use default quorum lookup endpoints
+ std::num::NonZeroUsize::new(100).unwrap(), // Cache size
+ ) {
+ Ok(provider) => {
+ info!("dash_sdk_create_trusted: trusted context provider created");
+ Arc::new(provider)
+ }
+ Err(e) => {
+ error!(error = %e, "dash_sdk_create_trusted: failed to create trusted context provider");
+ return DashSDKResult::error(DashSDKError::new(
+ DashSDKErrorCode::InternalError,
+ format!("Failed to create trusted context provider: {}", e),
+ ));
+ }
}
};
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/Core/Models/HDWalletModels.swift b/packages/swift-sdk/Sources/SwiftDashSDK/Core/Models/HDWalletModels.swift
index 070a9d2ae0d..fb2fa1d6b51 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/Core/Models/HDWalletModels.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/Core/Models/HDWalletModels.swift
@@ -21,6 +21,9 @@ public enum AccountCategory: Equatable, Hashable, Sendable {
case providerOwnerKeys
case providerOperatorKeys
case providerPlatformKeys
+ case dashPayReceivingFunds
+ case dashPayExternalAccount
+ case platformPayment
}
// MARK: - Account Info
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/Core/SPV/SPVClient.swift b/packages/swift-sdk/Sources/SwiftDashSDK/Core/SPV/SPVClient.swift
index 345fd6f7211..5f56aaa8dfd 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/Core/SPV/SPVClient.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/Core/SPV/SPVClient.swift
@@ -44,6 +44,9 @@ class SPVClient: @unchecked Sendable {
return dash_spv_ffi_config_mainnet()
case 1:
return dash_spv_ffi_config_testnet()
+ case 2:
+ // Regtest (local Docker)
+ return dash_spv_ffi_config_new(FFINetwork(rawValue: 2))
case 3:
// Map devnet to custom FFINetwork value 3
return dash_spv_ffi_config_new(FFINetwork(rawValue: 3))
@@ -58,6 +61,7 @@ class SPVClient: @unchecked Sendable {
// If requested, prefer local core peers (defaults to 127.0.0.1 with network default port)
let useLocalCore = UserDefaults.standard.bool(forKey: "useLocalhostCore")
+ || UserDefaults.standard.bool(forKey: "useDockerSetup")
// Only restrict to configured peers when using local core, if not, allow DNS discovery
let restrictToConfiguredPeers = useLocalCore
if useLocalCore {
@@ -65,6 +69,8 @@ class SPVClient: @unchecked Sendable {
if swiftLoggingEnabled {
print("[SPV][Config] Use Local Core enabled; peers=\(peers.joined(separator: ", "))")
}
+ // Clear default peers before adding custom Docker peers
+ dash_spv_ffi_config_clear_peers(configPtr)
// Add peers via FFI (supports "ip:port" or bare IP for network-default port)
for addr in peers {
addr.withCString { cstr in
@@ -135,9 +141,9 @@ class SPVClient: @unchecked Sendable {
}
private static func readLocalCorePeers() -> [String] {
- // If no override is set, default to 127.0.0.1 and let FFI pick port by network
+ // If no override is set, default to dashmate Docker Core P2P port
let raw = UserDefaults.standard.string(forKey: "corePeerAddresses")?.trimmingCharacters(in: .whitespacesAndNewlines)
- let list = (raw?.isEmpty == false ? raw! : "127.0.0.1")
+ let list = (raw?.isEmpty == false ? raw! : "127.0.0.1:20001")
return list
.split(separator: ",")
.map { $0.trimmingCharacters(in: .whitespaces) }
@@ -190,6 +196,25 @@ class SPVClient: @unchecked Sendable {
config = nil
}
+ // MARK: - Broadcast Transactions
+
+ func broadcastTransaction(_ transactionData: Data) throws {
+ try transactionData.withUnsafeBytes { (ptr: UnsafeRawBufferPointer) in
+ guard let txBytes = ptr.bindMemory(to: UInt8.self).baseAddress else {
+ throw SPVError.transactionBroadcastFailed("Invalid transaction data pointer")
+ }
+ let result = dash_spv_ffi_client_broadcast_transaction(
+ client,
+ txBytes,
+ UInt(transactionData.count)
+ )
+
+ if result != 0 {
+ throw SPVError.transactionBroadcastFailed(SPVClient.getLastDashFFIError())
+ }
+ }
+ }
+
// MARK: - Synchronization
func startSync() async throws {
@@ -235,6 +260,7 @@ public enum SPVError: LocalizedError {
case alreadySyncing
case syncFailed(String)
case storageOperationFailed(String)
+ case transactionBroadcastFailed(String)
public var errorDescription: String? {
switch self {
@@ -254,6 +280,8 @@ public enum SPVError: LocalizedError {
return "Sync failed: \(reason)"
case let .storageOperationFailed(reason):
return reason
+ case let .transactionBroadcastFailed(reason):
+ return "Transaction broadcast failed: \(reason)"
}
}
}
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/Core/Services/WalletService.swift b/packages/swift-sdk/Sources/SwiftDashSDK/Core/Services/WalletService.swift
index ba19cdb52bb..aea6a23924d 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/Core/Services/WalletService.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/Core/Services/WalletService.swift
@@ -104,6 +104,9 @@ public class WalletService: ObservableObject {
private var spvClient: SPVClient
public private(set) var walletManager: CoreWalletManager
+ // InstantSend lock storage for asset lock flow
+ private let instantLockStore = InstantLockStore()
+
public init(modelContainer: ModelContainer, network: AppNetwork) {
self.modelContainer = modelContainer
self.network = network
@@ -236,6 +239,19 @@ public class WalletService: ObservableObject {
self.initializeNewSPVClient()
}
+ // MARK: - Transaction Broadcasting
+
+ /// Broadcast a raw transaction on the Core P2P network.
+ public func broadcastTransaction(_ transactionData: Data) throws {
+ try spvClient.broadcastTransaction(transactionData)
+ }
+
+ /// Wait for an InstantSend lock for a specific transaction.
+ /// Returns the serialized IS lock bytes when received, or throws on timeout.
+ public func waitForInstantLock(txid: Data, timeout: TimeInterval = 30) async throws -> Data {
+ try await instantLockStore.waitForLock(txid: txid, timeout: timeout)
+ }
+
public func clearSpvStorage() {
if syncProgress.state.isRunning() {
print("[SPV][Clear] Sync task is running, cannot clear storage")
@@ -314,7 +330,9 @@ public class WalletService: ObservableObject {
func onBlocksProcessed(_ height: UInt32, _ hash: Data, _ newAddressCount: UInt32) {}
func onMasternodeStateUpdated(_ height: UInt32) {}
func onChainLockReceived(_ height: UInt32, _ hash: Data, _ signature: Data, _ validated: Bool) {}
- func onInstantLockReceived(_ txid: Data, _ instantLockData: Data, _ validated: Bool) {}
+ func onInstantLockReceived(_ txid: Data, _ instantLockData: Data, _ validated: Bool) {
+ walletService.instantLockStore.store(txid: txid, lockData: instantLockData)
+ }
func onSyncManagerError(_ manager: SPVSyncManager, _ errorMsg: String) {
SDKLogger.error("Sync manager \(manager) error: \(errorMsg)")
@@ -393,3 +411,45 @@ extension Data {
return map { String(format: "%02hhx", $0) }.joined()
}
}
+
+// MARK: - InstantSend Lock Store
+
+/// Thread-safe store for InstantSend lock data, keyed by txid.
+/// Supports async waiting for a specific txid's IS lock to arrive.
+internal final class InstantLockStore: @unchecked Sendable {
+ private var locks: [Data: Data] = [:]
+ private var continuations: [Data: CheckedContinuation] = [:]
+ private let queue = DispatchQueue(label: "com.dash.instantlock-store")
+
+ /// Store an IS lock. Resumes waiter if one exists for this txid.
+ func store(txid: Data, lockData: Data) {
+ var cont: CheckedContinuation?
+ queue.sync {
+ locks[txid] = lockData
+ cont = continuations.removeValue(forKey: txid)
+ }
+ cont?.resume(returning: lockData)
+ }
+
+ /// Wait for an IS lock for a specific txid.
+ /// Returns immediately if already cached, otherwise polls until received or timeout.
+ func waitForLock(txid: Data, timeout: TimeInterval = 30) async throws -> Data {
+ // Check if already available
+ if let existing = queue.sync(execute: { locks[txid] }) {
+ return existing
+ }
+
+ // Poll-based approach — simpler and avoids continuation resume races
+ let deadline = Date().addingTimeInterval(timeout)
+ while Date() < deadline {
+ try await Task.sleep(nanoseconds: 250_000_000) // 250ms
+ if let existing = queue.sync(execute: { locks[txid] }) {
+ return existing
+ }
+ }
+
+ throw SPVError.transactionBroadcastFailed(
+ "InstantSend lock timeout after \(Int(timeout))s for txid \(txid.hexString)"
+ )
+ }
+}
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/Core/Wallet/CoreWalletManager.swift b/packages/swift-sdk/Sources/SwiftDashSDK/Core/Wallet/CoreWalletManager.swift
index c9535a7e207..7638b7f439c 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/Core/Wallet/CoreWalletManager.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/Core/Wallet/CoreWalletManager.swift
@@ -105,6 +105,159 @@ public class CoreWalletManager: ObservableObject {
return wallet
}
+ /// Add a new account to a wallet.
+ public func addAccount(to wallet: HDWallet, type: AccountType, index: UInt32, keyClass: UInt32 = 0) throws {
+ guard let sdkWallet = try sdkWalletManager.getWallet(id: wallet.walletId) else {
+ throw WalletError.walletError("Wallet not found")
+ }
+ if type == .platformPayment {
+ try sdkWallet.addPlatformPaymentAccount(accountIndex: index, keyClass: keyClass)
+ } else {
+ _ = try sdkWallet.addAccount(type: type, index: index)
+ }
+ }
+
+ // MARK: - Asset Lock Transaction
+
+ /// Result of building an asset lock transaction.
+ public struct AssetLockTransactionResult {
+ /// Serialized transaction bytes.
+ public let transactionBytes: Data
+ /// Index of the asset lock output in the transaction.
+ public let outputIndex: UInt32
+ /// One-time private key for the asset lock proof (32 bytes).
+ public let privateKey: Data
+ /// Actual fee paid in duffs.
+ public let fee: UInt64
+ }
+
+ /// Asset lock funding type.
+ public enum AssetLockFundingType: UInt32 {
+ case identityRegistration = 0
+ case identityTopUp = 1
+ case identityTopUpNotBound = 2
+ case identityInvitation = 3
+ case assetLockAddressTopUp = 4
+ case assetLockShieldedAddressTopUp = 5
+ }
+
+ /// Build and sign an asset lock transaction for Core → Platform transfers.
+ ///
+ /// Creates a Core special transaction (type 8) with AssetLockPayload that locks
+ /// Dash for Platform credits.
+ ///
+ /// - Parameters:
+ /// - wallet: The wallet to fund from.
+ /// - accountIndex: BIP44 account index (typically 0).
+ /// - fundingType: The type of asset lock funding account for key derivation.
+ /// - identityIndex: Identity index for key derivation (0 for new).
+ /// - creditOutputs: Array of (scriptPubKey, amount) pairs for platform credit outputs.
+ /// - feePerKb: Fee rate in duffs per kilobyte (0 for default).
+ /// - Returns: `AssetLockTransactionResult` with tx bytes, output index, private key, and fee.
+ public func buildAssetLockTransaction(
+ for wallet: HDWallet,
+ accountIndex: UInt32 = 0,
+ fundingType: AssetLockFundingType = .assetLockAddressTopUp,
+ identityIndex: UInt32 = 0,
+ creditOutputs: [(scriptPubKey: Data, amount: UInt64)],
+ feePerKb: UInt64 = 1000
+ ) throws -> AssetLockTransactionResult {
+ guard let sdkWallet = try sdkWalletManager.getWallet(id: wallet.walletId) else {
+ throw WalletError.walletError("Wallet not found")
+ }
+
+ let count = creditOutputs.count
+ guard count > 0 else {
+ throw WalletError.walletError("At least one credit output required")
+ }
+
+ // Concatenate all scripts into a single contiguous buffer
+ // and build an array of pointers into it
+ var scriptLens: [Int] = creditOutputs.map { $0.scriptPubKey.count }
+ var amounts: [UInt64] = creditOutputs.map { $0.amount }
+ var concatenatedScripts = Data()
+ for output in creditOutputs {
+ concatenatedScripts.append(output.scriptPubKey)
+ }
+
+ var feeOut: UInt64 = 0
+ var txBytesOut: UnsafeMutablePointer? = nil
+ var txLenOut: Int = 0
+ var outputIndexOut: UInt32 = 0
+ var privateKeyOut: (UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8,
+ UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8,
+ UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8,
+ UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8) =
+ (0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0)
+ var ffiError = FFIError()
+
+ // Build pointers inside withUnsafeBytes so they remain valid
+ let success = concatenatedScripts.withUnsafeBytes { allScriptsBuffer -> Bool in
+ guard let allScriptsBase = allScriptsBuffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else {
+ return false
+ }
+ // Build array of pointers into the concatenated buffer
+ var scriptPtrs: [UnsafePointer?] = []
+ var offset = 0
+ for len in scriptLens {
+ scriptPtrs.append(allScriptsBase.advanced(by: offset))
+ offset += len
+ }
+
+ return scriptPtrs.withUnsafeMutableBufferPointer { scriptPtrsBuffer in
+ scriptLens.withUnsafeMutableBufferPointer { scriptLensBuffer in
+ amounts.withUnsafeMutableBufferPointer { amountsBuffer in
+ wallet_build_and_sign_asset_lock_transaction(
+ sdkWalletManager.handle,
+ sdkWallet.handle,
+ accountIndex,
+ fundingType.rawValue,
+ identityIndex,
+ scriptPtrsBuffer.baseAddress,
+ scriptLensBuffer.baseAddress,
+ amountsBuffer.baseAddress,
+ count,
+ feePerKb,
+ &feeOut,
+ &txBytesOut,
+ &txLenOut,
+ &outputIndexOut,
+ &privateKeyOut,
+ &ffiError
+ )
+ }
+ }
+ }
+ }
+
+ guard success else {
+ let msg = ffiError.message != nil ? String(cString: ffiError.message!) : "Unknown error"
+ if ffiError.message != nil {
+ error_message_free(ffiError.message)
+ }
+ throw WalletError.walletError("Asset lock transaction failed: \(msg)")
+ }
+
+ // Copy transaction bytes
+ let txData: Data
+ if let ptr = txBytesOut, txLenOut > 0 {
+ txData = Data(bytes: ptr, count: txLenOut)
+ transaction_bytes_free(ptr)
+ } else {
+ throw WalletError.walletError("No transaction bytes returned")
+ }
+
+ // Copy private key from tuple to Data
+ let privateKeyData = withUnsafeBytes(of: privateKeyOut) { Data($0) }
+
+ return AssetLockTransactionResult(
+ transactionBytes: txData,
+ outputIndex: outputIndexOut,
+ privateKey: privateKeyData,
+ fee: feeOut
+ )
+ }
+
public func deleteWallet(_ wallet: HDWallet) async throws {
let walletId = wallet.id
@@ -244,6 +397,11 @@ public class CoreWalletManager: ObservableObject {
managed = collection.getProviderOperatorKeysAccount()
case .providerPlatformKeys:
managed = collection.getProviderPlatformKeysAccount()
+ case .dashPayReceivingFunds, .dashPayExternalAccount:
+ managed = nil
+ case .platformPayment:
+ // Platform Payment uses ManagedPlatformAccount, handled separately below
+ managed = nil
}
let appNetwork = AppNetwork(network: sdkWalletManager.network)
@@ -252,7 +410,27 @@ public class CoreWalletManager: ObservableObject {
var externalDetails: [AddressDetail] = []
var internalDetails: [AddressDetail] = []
var ffiType = FFIAccountType(rawValue: 0)
- if let m = managed {
+
+ // Special handling for Platform Payment accounts — encode as bech32m
+ if accountInfo.category == .platformPayment {
+ ffiType = FFIAccountType(rawValue: AccountType.platformPayment.rawValue)
+ let networkValue: UInt32 = {
+ switch appNetwork {
+ case .mainnet: return 0
+ case .testnet: return 1
+ case .regtest: return 2
+ case .devnet: return 3
+ }
+ }()
+ if let platformAccount = collection.getPlatformPaymentAccount(accountIndex: accountInfo.index ?? 0, keyClass: 0),
+ let pool = platformAccount.getAddressPool(),
+ let infos = try? pool.getAddresses(from: 0, to: 0) {
+ externalDetails = infos.compactMap { info in
+ let bech32Address = Self.encodePlatformAddress(scriptPubKey: info.scriptPubKey, networkValue: networkValue) ?? info.address
+ return AddressDetail(address: bech32Address, index: info.index, path: info.path, isUsed: info.used, publicKey: info.publicKey?.map { String(format: "%02x", $0) }.joined() ?? "")
+ }
+ }
+ } else if let m = managed {
ffiType = FFIAccountType(rawValue: m.accountType?.rawValue ?? 0)
// Query all generated addresses (0 to 0 means "all addresses" in FFI)
if let pool = m.getExternalAddressPool(), let infos = try? pool.getAddresses(from: 0, to: 0) {
@@ -316,7 +494,8 @@ public class CoreWalletManager: ObservableObject {
case .coinjoin:
let idx = (accountInfo.index ?? 1000) - 1000
return (.coinJoin, UInt32(idx), "m/9'/\(coinType)/4'/\(idx)'")
- case .identityRegistration, .identityInvitation, .identityTopupNotBound, .identityTopup:
+ case .identityRegistration, .identityInvitation, .identityTopupNotBound, .identityTopup,
+ .dashPayReceivingFunds, .dashPayExternalAccount, .platformPayment:
return nil
}
}()
@@ -360,10 +539,33 @@ public class CoreWalletManager: ObservableObject {
return "m/9'/\(coinType)/3'/3'/x"
case .providerPlatformKeys:
return "m/9'/\(coinType)/3'/4'/x"
+ case .dashPayReceivingFunds:
+ return "m/9'/\(coinType)/5'/0'/x"
+ case .dashPayExternalAccount:
+ return "m/9'/\(coinType)/5'/0'/x"
+ case .platformPayment:
+ return "m/9'/\(coinType)/15'/\(index ?? 0)'/x"
}
}
+ /// Encode a P2PKH scriptPubKey as a bech32m platform address (DIP-17/18).
+ private static func encodePlatformAddress(scriptPubKey: Data, networkValue: UInt32) -> String? {
+ let result = scriptPubKey.withUnsafeBytes { buffer -> DashSDKResult in
+ guard let base = buffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else {
+ return DashSDKResult()
+ }
+ return dash_sdk_encode_platform_address(base, UInt32(scriptPubKey.count), networkValue)
+ }
+ guard result.error == nil, let dataPtr = result.data else {
+ if let error = result.error { dash_sdk_error_free(error) }
+ return nil
+ }
+ let str = String(cString: dataPtr.assumingMemoryBound(to: CChar.self))
+ dash_sdk_string_free(dataPtr)
+ return str
+ }
+
// Removed old FFI-based helper; using SwiftDashSDK wrappers instead
/// Get all accounts for a wallet from the FFI wallet manager
@@ -446,6 +648,19 @@ public class CoreWalletManager: ObservableObject {
list.append(AccountInfo(category: .providerPlatformKeys, label: "Provider Platform Keys (EdDSA)", balance: b, addressCount: (0, 0)))
}
+ // Platform Payment (DIP-17)
+ if collection.hasPlatformPaymentAccounts {
+ for accountIdx in 0..
+ internal let handle: UnsafeMutablePointer
private let ownsHandle: Bool
// MARK: - Static Methods
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/WalletManager.swift b/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/WalletManager.swift
index 93137dd4ab4..ab34303e23b 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/WalletManager.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/WalletManager.swift
@@ -3,7 +3,7 @@ import DashSDKFFI
/// Swift wrapper for wallet manager that manages multiple wallets
public class WalletManager {
- private let handle: UnsafeMutablePointer
+ internal let handle: UnsafeMutablePointer
internal let network: KeyWalletNetwork
private let ownsHandle: Bool
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/SDK.swift b/packages/swift-sdk/Sources/SwiftDashSDK/SDK.swift
index e2459e90175..5f0c88ace12 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/SDK.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/SDK.swift
@@ -145,7 +145,7 @@ public final class SDK: @unchecked Sendable {
if let override = UserDefaults.standard.string(forKey: "platformDAPIAddresses"), !override.isEmpty {
return override
}
- return "http://127.0.0.1:1443"
+ return "http://127.0.0.1:2443"
}
/// Create a new SDK instance with trusted setup
@@ -154,39 +154,27 @@ public final class SDK: @unchecked Sendable {
/// data contracts from trusted HTTP endpoints instead of requiring proof verification.
/// This is suitable for mobile applications where proof verification would be resource-intensive.
public init(network: Network) throws {
- print("🔵 SDK.init: Creating SDK with network: \(network)")
var config = DashSDKConfig()
-
- // Map network - in C enums, Swift imports them as raw values
config.network = network
- print("🔵 SDK.init: Network config set to: \(config.network)")
-
- // Default to SDK-provided addresses; may override below
config.dapi_addresses = nil
-
config.skip_asset_lock_proof_verification = false
config.request_retry_count = 1
config.request_timeout_ms = 8000 // 8 seconds
- // Create SDK with trusted setup
- print("🔵 SDK.init: Creating SDK with trusted setup...")
+ // Create SDK with trusted setup — Rust side auto-detects local/regtest
+ // and uses the quorum sidecar at localhost:22444 instead of remote endpoints
let result: DashSDKResult
- // Force local DAPI regardless of selected network when enabled
- let forceLocal = UserDefaults.standard.bool(forKey: "useLocalhostPlatform")
+ let forceLocal = UserDefaults.standard.bool(forKey: "useDockerSetup")
if forceLocal {
let localAddresses = Self.platformDAPIAddresses
- print("🔵 SDK.init: Using local DAPI addresses: \(localAddresses)")
result = localAddresses.withCString { addressesCStr -> DashSDKResult in
var mutableConfig = config
mutableConfig.dapi_addresses = addressesCStr
- print("🔵 SDK.init: Calling dash_sdk_create_trusted...")
return dash_sdk_create_trusted(&mutableConfig)
}
} else {
- print("🔵 SDK.init: Using default network addresses")
result = dash_sdk_create_trusted(&config)
}
- print("🔵 SDK.init: dash_sdk_create_trusted returned")
// Check for errors
if result.error != nil {
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/AppState.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/AppState.swift
index a7816f3bb58..df6365d1078 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/AppState.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/AppState.swift
@@ -26,21 +26,20 @@ class AppState: ObservableObject {
@Published var dataStatistics: (identities: Int, documents: Int, contracts: Int, tokenBalances: Int)?
- @Published var useLocalPlatform: Bool {
+ @Published var useDockerSetup: Bool {
didSet {
- UserDefaults.standard.set(useLocalPlatform, forKey: "useLocalhostPlatform")
- // Maintain backward-compat key for older SDK builds
- UserDefaults.standard.set(useLocalPlatform, forKey: "useLocalhost")
+ UserDefaults.standard.set(useDockerSetup, forKey: "useDockerSetup")
+ // Write to legacy keys so SDK.swift and SPVClient.swift pick them up
+ UserDefaults.standard.set(useDockerSetup, forKey: "useLocalhostPlatform")
+ UserDefaults.standard.set(useDockerSetup, forKey: "useLocalhostCore")
+ UserDefaults.standard.set(useDockerSetup, forKey: "useLocalhost")
Task { await switchNetwork(to: currentNetwork) }
}
}
- @Published var useLocalCore: Bool {
- didSet {
- UserDefaults.standard.set(useLocalCore, forKey: "useLocalhostCore")
- // TODO: Reconfigure SPV client peers when supported
- }
- }
+ /// Backward-compat computed properties (read-only)
+ var useLocalPlatform: Bool { useDockerSetup }
+ var useLocalCore: Bool { useDockerSetup }
private let testSigner = TestSigner()
private var dataManager: DataManager?
@@ -54,12 +53,17 @@ class AppState: ObservableObject {
} else {
self.currentNetwork = .testnet
}
- // Migration: if legacy key set and new keys absent, propagate
- let legacyLocal = UserDefaults.standard.bool(forKey: "useLocalhost")
- let hasPlatformKey = UserDefaults.standard.object(forKey: "useLocalhostPlatform") != nil
- let hasCoreKey = UserDefaults.standard.object(forKey: "useLocalhostCore") != nil
- self.useLocalPlatform = hasPlatformKey ? UserDefaults.standard.bool(forKey: "useLocalhostPlatform") : legacyLocal
- self.useLocalCore = hasCoreKey ? UserDefaults.standard.bool(forKey: "useLocalhostCore") : legacyLocal
+ // Migration: if legacy keys set, propagate to new unified key
+ if let _ = UserDefaults.standard.object(forKey: "useDockerSetup") {
+ self.useDockerSetup = UserDefaults.standard.bool(forKey: "useDockerSetup")
+ } else {
+ // Fall back to legacy keys
+ let legacyLocal = UserDefaults.standard.bool(forKey: "useLocalhostPlatform")
+ || UserDefaults.standard.bool(forKey: "useLocalhost")
+ self.useDockerSetup = legacyLocal
+ // Persist so SDK.swift can read it (didSet doesn't fire in init)
+ UserDefaults.standard.set(legacyLocal, forKey: "useDockerSetup")
+ }
}
func initializeSDK(modelContext: ModelContext) {
@@ -74,21 +78,14 @@ class AppState: ObservableObject {
isLoading = true
NSLog("🔵 AppState: Initializing SDK library...")
- // Initialize the SDK library
SDK.initialize()
-
- // Enable debug logging to see gRPC endpoints
SDK.enableLogging(level: .debug)
- NSLog("🔵 AppState: Enabled debug logging for gRPC requests")
- NSLog("🔵 AppState: Creating SDK instance for network: \(currentNetwork)")
- // Create SDK instance for current network
let sdkNetwork: DashSDKNetwork = currentNetwork.sdkNetwork
- NSLog("🔵 AppState: SDK network value: \(sdkNetwork)")
-
+ NSLog("🔵 AppState: Creating SDK for network=\(currentNetwork), docker=\(useDockerSetup)")
let newSDK = try SDK(network: sdkNetwork)
sdk = newSDK
- NSLog("✅ AppState: SDK created successfully with handle: \(newSDK.handle != nil ? "exists" : "nil")")
+ NSLog("✅ AppState: SDK created successfully")
// Load known contracts into the SDK's trusted provider
await loadKnownContractsIntoSDK(sdk: newSDK, modelContext: modelContext)
@@ -98,7 +95,9 @@ class AppState: ObservableObject {
isLoading = false
} catch {
+ sdk = nil
showError(message: "Failed to initialize SDK: \(error.localizedDescription)")
+ NSLog("❌ AppState.initializeSDK: \(error)")
isLoading = false
}
}
@@ -200,7 +199,9 @@ class AppState: ObservableObject {
isLoading = false
} catch {
+ sdk = nil
showError(message: "Failed to switch network: \(error.localizedDescription)")
+ NSLog("❌ AppState.switchNetwork: \(error)")
isLoading = false
}
}
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/ContentView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/ContentView.swift
index 80ae6121f40..883383edcfc 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/ContentView.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/ContentView.swift
@@ -2,7 +2,7 @@ import SwiftUI
import SwiftDashSDK
import SwiftData
-enum RootTab: Hashable {
+enum RootTab: String, Hashable {
case sync, wallets, friends, platform, settings
}
@@ -10,7 +10,13 @@ struct ContentView: View {
@EnvironmentObject var unifiedState: UnifiedAppState
@EnvironmentObject var walletService: WalletService
- @State private var selectedTab: RootTab = .sync
+ @State private var selectedTab: RootTab = {
+ if let saved = UserDefaults.standard.string(forKey: "selectedTab"),
+ let tab = RootTab(rawValue: saved) {
+ return tab
+ }
+ return .sync
+ }()
var body: some View {
if !unifiedState.isInitialized {
@@ -82,6 +88,9 @@ struct ContentView: View {
}
.tag(RootTab.settings)
}
+ .onChange(of: selectedTab) { _, newTab in
+ UserDefaults.standard.set(newTab.rawValue, forKey: "selectedTab")
+ }
.overlay(alignment: .top) {
if walletService.syncProgress.state.isSyncing() {
GlobalSyncIndicator(showDetails: selectedTab == .sync && unifiedState.showWalletsSyncDetails)
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Models/DashAddress.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Models/DashAddress.swift
index 82fca710ea3..26383579677 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Models/DashAddress.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Models/DashAddress.swift
@@ -20,18 +20,19 @@ struct DashAddress {
let data = decoded.data
// Check HRP validity
- let validPlatformHrp = (network == .mainnet) ? "dashevo" : "tdashevo"
- let validOrchardHrp = (network == .mainnet) ? "dash" : "tdash"
+ // Platform and Orchard share the same HRP: "dash" (mainnet) / "tdash" (testnet/regtest)
+ // Distinguished by type byte: 0x00/0xb0/0x80 = platform, 0x10 = orchard
+ let validHrp = (network == .mainnet) ? "dash" : "tdash"
- if hrp == validPlatformHrp && data.count == 21 {
- // Platform address: type byte 0xb0 or 0x80 + 20-byte hash
+ if hrp == validHrp && data.count == 21 {
+ // Platform address: type byte 0x00 (P2PKH) or 0xb0/0x80 + 20-byte hash
let typeByte = data[0]
- if typeByte == 0xb0 || typeByte == 0x80 {
+ if typeByte == 0x00 || typeByte == 0xb0 || typeByte == 0x80 {
return DashAddress(type: .platform(data), displayString: input)
}
}
- if hrp == validOrchardHrp && data.count >= 2 {
+ if hrp == validHrp && data.count >= 2 {
let typeByte = data[0]
if typeByte == 0x10 {
// Orchard address: 0x10 type byte + 43 bytes raw address
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Services/ZKSyncService.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Services/ZKSyncService.swift
new file mode 100644
index 00000000000..fe924f84094
--- /dev/null
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Services/ZKSyncService.swift
@@ -0,0 +1,164 @@
+// ZKSyncService.swift
+// SwiftExampleApp
+//
+// App-level service that performs periodic ZK shielded sync (notes + nullifiers)
+// with UI status display. Follows the same pattern as PlatformBalanceSyncService.
+
+import Foundation
+import SwiftUI
+import SwiftDashSDK
+
+/// Observable service managing periodic ZK shielded pool sync.
+///
+/// Syncs every 30 seconds while the app is active, or on manual pull-to-refresh.
+/// Persists `shieldedBalance` and `orchardAddress` in UserDefaults for display across launches.
+@MainActor
+class ZKSyncService: ObservableObject {
+ // MARK: - Published State
+
+ /// Whether a sync is currently in progress.
+ @Published var isSyncing: Bool = false
+
+ /// Last successful sync time (local clock).
+ @Published var lastSyncTime: Date?
+
+ /// Current shielded balance (in credits).
+ @Published var shieldedBalance: UInt64 = 0
+
+ /// Orchard display address (Bech32m-encoded).
+ @Published var orchardAddress: String?
+
+ /// Number of new notes found in the most recent sync.
+ @Published var notesSynced: Int = 0
+
+ /// Number of nullifiers spent in the most recent sync.
+ @Published var nullifiersSpent: Int = 0
+
+ /// Cumulative notes synced since launch.
+ @Published var totalNotesSynced: Int = 0
+
+ /// Cumulative nullifiers spent since launch.
+ @Published var totalNullifiersSpent: Int = 0
+
+ /// Total number of successful syncs since launch.
+ @Published var syncCountSinceLaunch: Int = 0
+
+ /// Last error message, cleared on successful sync.
+ @Published var lastError: String?
+
+ // MARK: - Persisted State
+
+ /// Persisted shielded balance (credits).
+ private var persistedBalance: UInt64 {
+ get { UInt64(UserDefaults.standard.integer(forKey: "\(keyPrefix)_balance")) }
+ set { UserDefaults.standard.set(Int(newValue), forKey: "\(keyPrefix)_balance") }
+ }
+
+ /// Persisted orchard address string.
+ private var persistedOrchardAddress: String? {
+ get { UserDefaults.standard.string(forKey: "\(keyPrefix)_orchardAddress") }
+ set { UserDefaults.standard.set(newValue, forKey: "\(keyPrefix)_orchardAddress") }
+ }
+
+ /// UserDefaults key prefix scoped to network.
+ private var keyPrefix: String {
+ "zkSync_\(networkName)"
+ }
+
+ private var networkName: String = "testnet"
+
+ // MARK: - Lifecycle
+
+ /// Initialize for a network. Restores persisted balance and address.
+ /// The actual periodic loop is managed by UnifiedAppState.
+ func startPeriodicSync(network: AppNetwork) {
+ networkName = network.rawValue
+
+ // Restore persisted state from previous session
+ let savedBalance = persistedBalance
+ if savedBalance > 0 {
+ shieldedBalance = savedBalance
+ }
+
+ let savedAddress = persistedOrchardAddress
+ if let addr = savedAddress, !addr.isEmpty {
+ orchardAddress = addr
+ }
+ }
+
+ /// Perform a single ZK shielded sync (notes then nullifiers).
+ ///
+ /// - Parameters:
+ /// - sdk: The initialized SDK instance.
+ /// - shieldedService: The shielded service with an initialized pool client.
+ func performSync(sdk: SDK, shieldedService: ShieldedService) async {
+ guard !isSyncing else { return }
+ guard let poolClient = shieldedService.poolClient else { return }
+
+ isSyncing = true
+ lastError = nil
+
+ do {
+ // Step 1: Sync notes
+ let notesResult = try await poolClient.syncNotes(sdk: sdk)
+ let newNotes = notesResult.newNotes
+
+ // Step 2: Sync nullifiers
+ let nullifiersResult = try await poolClient.syncNullifiers(sdk: sdk)
+ let spentCount = nullifiersResult.spentCount
+ let finalBalance = nullifiersResult.balance
+
+ // Update per-sync stats
+ notesSynced = newNotes
+ nullifiersSpent = spentCount
+
+ // Update cumulative stats
+ totalNotesSynced += newNotes
+ totalNullifiersSpent += spentCount
+
+ // Update balance and address
+ shieldedBalance = finalBalance
+ orchardAddress = shieldedService.orchardDisplayAddress
+
+ // Persist balance and address
+ persistedBalance = finalBalance
+ persistedOrchardAddress = shieldedService.orchardDisplayAddress
+
+ // Update sync metadata
+ lastSyncTime = Date()
+ syncCountSinceLaunch += 1
+
+ SDKLogger.log(
+ "ZK sync complete: \(newNotes) notes, \(spentCount) spent, balance: \(finalBalance)",
+ minimumLevel: .medium
+ )
+
+ } catch {
+ lastError = error.localizedDescription
+ SDKLogger.log(
+ "ZK sync error: \(error.localizedDescription)",
+ minimumLevel: .medium
+ )
+ }
+
+ isSyncing = false
+ }
+
+ /// Reset all state (e.g. on wallet deletion or network switch).
+ func reset() {
+ isSyncing = false
+ lastSyncTime = nil
+ shieldedBalance = 0
+ orchardAddress = nil
+ notesSynced = 0
+ nullifiersSpent = 0
+ totalNotesSynced = 0
+ totalNullifiersSpent = 0
+ syncCountSinceLaunch = 0
+ lastError = nil
+
+ // Clear persisted state
+ persistedBalance = 0
+ persistedOrchardAddress = nil
+ }
+}
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/ViewModels/SendViewModel.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/ViewModels/SendViewModel.swift
index 1d52e08105b..c2f0f583f7f 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/ViewModels/SendViewModel.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/ViewModels/SendViewModel.swift
@@ -1,8 +1,11 @@
import Foundation
+import CommonCrypto
import SwiftDashSDK
/// Available send flow types based on source and destination.
enum SendFlow: Equatable {
+ case coreToPlatform // Asset lock / transfer to platform address
+ case coreToCore // Standard Core transaction
case platformToShielded // Shield credits
case shieldedToShielded // Private transfer
case shieldedToPlatform // Unshield
@@ -10,6 +13,8 @@ enum SendFlow: Equatable {
var displayName: String {
switch self {
+ case .coreToPlatform: return "Transfer to Platform"
+ case .coreToCore: return "Core Transfer"
case .platformToShielded: return "Shield Credits"
case .shieldedToShielded: return "Shielded Transfer"
case .shieldedToPlatform: return "Unshield"
@@ -19,6 +24,8 @@ enum SendFlow: Equatable {
var iconName: String {
switch self {
+ case .coreToPlatform: return "arrow.up.to.line"
+ case .coreToCore: return "arrow.right"
case .platformToShielded: return "lock.shield"
case .shieldedToShielded: return "arrow.left.arrow.right"
case .shieldedToPlatform: return "lock.open"
@@ -26,9 +33,11 @@ enum SendFlow: Equatable {
}
}
- /// Approximate fee in credits for this flow type.
+ /// Approximate fee in duffs for this flow type.
var estimatedFee: UInt64 {
switch self {
+ case .coreToPlatform: return 100_000 // ~0.001 DASH
+ case .coreToCore: return 100_000 // ~0.001 DASH
case .platformToShielded: return 200_000
case .shieldedToShielded: return 300_000
case .shieldedToPlatform: return 300_000
@@ -58,8 +67,8 @@ class SendViewModel: ObservableObject {
@Published var error: String?
@Published var successMessage: String?
- // Source preference (for demo UI)
- @Published var preferShieldedSource = true
+ // Source preference (for demo UI — defaults to Core since shielded requires setup)
+ @Published var preferShieldedSource = false
private let network: AppNetwork
@@ -95,9 +104,10 @@ class SendViewModel: ObservableObject {
case .orchard:
detectedFlow = preferShieldedSource ? .shieldedToShielded : .platformToShielded
case .platform:
- detectedFlow = .shieldedToPlatform
+ // If we have shielded balance, unshield; otherwise transfer from Core
+ detectedFlow = preferShieldedSource ? .shieldedToPlatform : .coreToPlatform
case .core:
- detectedFlow = .shieldedToCore
+ detectedFlow = preferShieldedSource ? .shieldedToCore : .coreToCore
case .unknown:
detectedFlow = nil
}
@@ -110,13 +120,19 @@ class SendViewModel: ObservableObject {
func executeSend(
sdk: SDK,
shieldedService: ShieldedService,
+ walletService: WalletService,
platformState: AppState,
wallet: HDWallet
) async {
guard let flow = detectedFlow, let amount = amount else { return }
- guard let poolClient = shieldedService.poolClient else {
- error = "Shielded pool not initialized"
- return
+
+ // Shielded flows need pool client; Core flows don't
+ let needsPoolClient = flow != .coreToPlatform && flow != .coreToCore
+ if needsPoolClient {
+ guard shieldedService.poolClient != nil else {
+ error = "Shielded pool not initialized"
+ return
+ }
}
isSending = true
@@ -127,7 +143,7 @@ class SendViewModel: ObservableObject {
do {
switch flow {
case .platformToShielded:
- let bundle = try await poolClient.buildShieldBundle(amount: amount)
+ let bundle = try await shieldedService.poolClient!.buildShieldBundle(amount: amount)
// Find an identity with sufficient balance to fund the shield
guard let identity = platformState.identities.first(where: {
$0.walletId == wallet.walletId &&
@@ -159,7 +175,7 @@ class SendViewModel: ObservableObject {
case .shieldedToShielded:
let parsed = DashAddress.parse(recipientAddress, network: network)
guard case .orchard(let rawAddress) = parsed.type else { return }
- let bundle = try await poolClient.buildTransferBundle(
+ let bundle = try await shieldedService.poolClient!.buildTransferBundle(
recipientAddress: rawAddress,
amount: amount
)
@@ -172,7 +188,7 @@ class SendViewModel: ObservableObject {
case .shieldedToPlatform:
let parsed = DashAddress.parse(recipientAddress, network: network)
guard case .platform(let addressBytes) = parsed.type else { return }
- let bundle = try await poolClient.buildUnshieldBundle(
+ let bundle = try await shieldedService.poolClient!.buildUnshieldBundle(
outputAddress: addressBytes,
amount: amount
)
@@ -186,7 +202,7 @@ class SendViewModel: ObservableObject {
case .shieldedToCore:
let parsed = DashAddress.parse(recipientAddress, network: network)
guard case .core(let outputScript) = parsed.type else { return }
- let bundle = try await poolClient.buildWithdrawalBundle(
+ let bundle = try await shieldedService.poolClient!.buildWithdrawalBundle(
outputScript: outputScript,
amount: amount,
coreFeePerByte: 1,
@@ -200,6 +216,62 @@ class SendViewModel: ObservableObject {
outputScript: outputScript
)
successMessage = "Withdrawal submitted"
+
+ case .coreToPlatform:
+ // Core → Platform via asset lock
+ let parsed = DashAddress.parse(recipientAddress, network: network)
+ guard case .platform(let addressBytes) = parsed.type else {
+ error = "Invalid platform address"
+ return
+ }
+
+ // Convert platform address (21 bytes: type + hash) to P2PKH scriptPubKey (25 bytes)
+ // Platform type 0x00 = P2PKH: OP_DUP OP_HASH160 <20-byte-hash> OP_EQUALVERIFY OP_CHECKSIG
+ let creditScript: Data
+ if addressBytes.count == 21 && addressBytes[0] == 0x00 {
+ let pubkeyHash = addressBytes.dropFirst() // 20-byte hash
+ var script = Data([0x76, 0xa9, 0x14]) // OP_DUP OP_HASH160 PUSH20
+ script.append(contentsOf: pubkeyHash)
+ script.append(contentsOf: [0x88, 0xac]) // OP_EQUALVERIFY OP_CHECKSIG
+ creditScript = script
+ } else {
+ // Pass through as-is for other address types
+ creditScript = addressBytes
+ }
+
+ // 1. Build the asset lock transaction
+ let assetLockResult = try walletService.walletManager.buildAssetLockTransaction(
+ for: wallet,
+ creditOutputs: [(scriptPubKey: creditScript, amount: amount)]
+ )
+
+ // 2. Broadcast on Core network
+ try walletService.broadcastTransaction(assetLockResult.transactionBytes)
+
+ // Compute txid from transaction bytes (double SHA256, reversed)
+ let txid = computeTxid(from: assetLockResult.transactionBytes)
+
+ // 3. Wait for InstantSend lock
+ let isLockData = try await walletService.waitForInstantLock(txid: txid, timeout: 30)
+
+ // 4. Submit to Platform
+ let outPoint = buildOutPoint(txid: txid, outputIndex: assetLockResult.outputIndex)
+ _ = try sdk.addresses.topUpAddressFromAssetLock(
+ proofType: .instant,
+ instantLockData: isLockData,
+ transactionData: assetLockResult.transactionBytes,
+ outputIndex: assetLockResult.outputIndex,
+ coreChainLockedHeight: 0,
+ outPoint: outPoint,
+ assetLockPrivateKey: assetLockResult.privateKey,
+ outputs: [Addresses.AddressTransferOutput(addressBytes: addressBytes, amount: amount)]
+ )
+
+ successMessage = "Transfer to Platform complete"
+
+ case .coreToCore:
+ // TODO: Implement standard Core → Core transaction
+ error = "Core to Core transfer not yet implemented"
}
// Refresh shielded balance
@@ -209,4 +281,33 @@ class SendViewModel: ObservableObject {
self.error = error.localizedDescription
}
}
+
+ // MARK: - Helpers
+
+ /// Compute txid from raw transaction bytes (double SHA256, reversed).
+ private func computeTxid(from txBytes: Data) -> Data {
+ var hash1 = Data(count: Int(CC_SHA256_DIGEST_LENGTH))
+ var hash2 = Data(count: Int(CC_SHA256_DIGEST_LENGTH))
+ txBytes.withUnsafeBytes { ptr in
+ hash1.withUnsafeMutableBytes { out in
+ _ = CC_SHA256(ptr.baseAddress, CC_LONG(txBytes.count), out.bindMemory(to: UInt8.self).baseAddress)
+ }
+ }
+ hash1.withUnsafeBytes { ptr in
+ hash2.withUnsafeMutableBytes { out in
+ _ = CC_SHA256(ptr.baseAddress, CC_LONG(hash1.count), out.bindMemory(to: UInt8.self).baseAddress)
+ }
+ }
+ // Txid is the reversed double-SHA256
+ return Data(hash2.reversed())
+ }
+
+ /// Build a 36-byte OutPoint (txid + output index as little-endian u32).
+ private func buildOutPoint(txid: Data, outputIndex: UInt32) -> Data {
+ // OutPoint = txid (32 bytes, internal byte order) + index (4 bytes LE)
+ var outPoint = Data(txid.reversed()) // reversed back to internal order
+ var idx = outputIndex.littleEndian
+ outPoint.append(Data(bytes: &idx, count: 4))
+ return outPoint
+ }
}
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/AccountListView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/AccountListView.swift
index ab14a281b0e..0e8abb6e0f9 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/AccountListView.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/AccountListView.swift
@@ -9,6 +9,7 @@ struct AccountListView: View {
@EnvironmentObject var walletService: WalletService
let wallet: HDWallet
@State private var accounts: [AccountInfo] = []
+ @State private var showAddAccount = false
var body: some View {
ZStack {
@@ -29,7 +30,22 @@ struct AccountListView: View {
loadAccounts()
}
}
- }.task {
+ }
+ .toolbar {
+ ToolbarItem(placement: .navigationBarTrailing) {
+ Button {
+ showAddAccount = true
+ } label: {
+ Image(systemName: "plus")
+ }
+ }
+ }
+ .sheet(isPresented: $showAddAccount) {
+ AddAccountView(wallet: wallet)
+ .environmentObject(walletService)
+ .onDisappear { loadAccounts() }
+ }
+ .task {
loadAccounts()
}
}
@@ -46,7 +62,7 @@ struct AccountRowView: View {
/// Determines if this account type should show balance in UI
var shouldShowBalance: Bool {
switch account.category {
- case .bip44, .bip32, .coinjoin:
+ case .bip44, .bip32, .coinjoin, .platformPayment:
return true
default:
return false
@@ -65,7 +81,10 @@ struct AccountRowView: View {
case .providerVotingKeys: return "Voting"
case .providerOwnerKeys: return "Owner"
case .providerOperatorKeys: return "Operator"
- case .providerPlatformKeys: return "Platform"
+ case .providerPlatformKeys: return "Platform Keys"
+ case .dashPayReceivingFunds: return "DashPay"
+ case .dashPayExternalAccount: return "DashPay Ext"
+ case .platformPayment: return account.index.map { "Payment #\($0)" } ?? "Payment"
}
}
@@ -81,6 +100,9 @@ struct AccountRowView: View {
case .providerOwnerKeys: return "key.horizontal"
case .providerOperatorKeys: return "wrench.and.screwdriver"
case .providerPlatformKeys: return "network"
+ case .dashPayReceivingFunds: return "person.2.circle"
+ case .dashPayExternalAccount: return "person.crop.circle.badge.questionmark"
+ case .platformPayment: return "creditcard.fill"
}
}
@@ -94,6 +116,8 @@ struct AccountRowView: View {
case .providerOwnerKeys: return .pink
case .providerOperatorKeys: return .indigo
case .providerPlatformKeys: return .teal
+ case .dashPayReceivingFunds, .dashPayExternalAccount: return .cyan
+ case .platformPayment: return .green
}
}
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/AddAccountView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/AddAccountView.swift
new file mode 100644
index 00000000000..1f835a4502d
--- /dev/null
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/AddAccountView.swift
@@ -0,0 +1,319 @@
+import SwiftUI
+import SwiftDashSDK
+import SwiftData
+
+/// View for adding a new account to a wallet
+struct AddAccountView: View {
+ @EnvironmentObject var walletService: WalletService
+ @Environment(\.dismiss) private var dismiss
+
+ let wallet: HDWallet
+
+ @State private var selectedAccountType: AddableAccountType = .bip44
+ @State private var accountIndex: String = ""
+ @State private var keyClass: String = "0"
+ @State private var isCreating = false
+ @State private var errorMessage: String?
+ @State private var showError = false
+
+ /// Account types that can be added by the user
+ enum AddableAccountType: String, CaseIterable, Identifiable {
+ case bip44 = "BIP44 (Standard)"
+ case bip32 = "BIP32 (Legacy)"
+ case coinjoin = "CoinJoin (Privacy)"
+ case platformPayment = "Platform Payment"
+ case identityTopup = "Identity Top-up"
+
+ var id: String { rawValue }
+
+ var accountType: AccountType {
+ switch self {
+ case .bip44: return .standardBIP44
+ case .bip32: return .standardBIP32
+ case .coinjoin: return .coinJoin
+ case .platformPayment: return .platformPayment
+ case .identityTopup: return .identityTopUp
+ }
+ }
+
+ var description: String {
+ switch self {
+ case .bip44:
+ return "Standard account for receiving and sending DASH. Recommended for most users."
+ case .bip32:
+ return "Legacy account type for compatibility with older systems."
+ case .coinjoin:
+ return "Privacy-enhanced account for mixing transactions."
+ case .platformPayment:
+ return "Platform payment account (DIP-17) for receiving credits to platform payment addresses."
+ case .identityTopup:
+ return "Account for topping up platform identity credits."
+ }
+ }
+
+ var icon: String {
+ switch self {
+ case .bip44: return "folder.fill"
+ case .bip32: return "tray.full.fill"
+ case .coinjoin: return "shuffle.circle.fill"
+ case .platformPayment: return "creditcard.fill"
+ case .identityTopup: return "arrow.up.circle.fill"
+ }
+ }
+
+ var color: Color {
+ switch self {
+ case .bip44: return .blue
+ case .bip32: return .teal
+ case .coinjoin: return .orange
+ case .platformPayment: return .green
+ case .identityTopup: return .purple
+ }
+ }
+
+ var requiresIndex: Bool {
+ // All these account types require an index
+ return true
+ }
+
+ var indexPlaceholder: String {
+ switch self {
+ case .bip44: return "e.g., 1, 2, 3..."
+ case .bip32: return "e.g., 0, 1, 2..."
+ case .coinjoin: return "e.g., 0, 1, 2..."
+ case .platformPayment: return "e.g., 0, 1, 2..."
+ case .identityTopup: return "Identity index (e.g., 0)"
+ }
+ }
+
+ /// Returns true if this account type needs a key class parameter
+ var requiresKeyClass: Bool {
+ self == .platformPayment
+ }
+
+ /// Returns the derivation path template for this account type
+ func derivationPath(index: UInt32, keyClass: UInt32, isTestnet: Bool) -> String {
+ let coinType = isTestnet ? "1'" : "5'"
+ switch self {
+ case .bip44:
+ return "m/44'/\(coinType)/\(index)'"
+ case .bip32:
+ return "m/\(index)'"
+ case .coinjoin:
+ return "m/9'/\(coinType)/4'/\(index)'"
+ case .platformPayment:
+ return "m/9'/\(coinType)/17'/\(index)'/\(keyClass)'/..."
+ case .identityTopup:
+ return "m/9'/\(coinType)/5'/2'/\(index)'/..."
+ }
+ }
+ }
+
+ var body: some View {
+ NavigationView {
+ Form {
+ // Account Type Selection
+ Section {
+ Picker("Account Type", selection: $selectedAccountType) {
+ ForEach(AddableAccountType.allCases) { type in
+ HStack {
+ Image(systemName: type.icon)
+ .foregroundColor(type.color)
+ Text(type.rawValue)
+ }
+ .tag(type)
+ }
+ }
+ .pickerStyle(.navigationLink)
+ } header: {
+ Text("Account Type")
+ } footer: {
+ Text(selectedAccountType.description)
+ .foregroundColor(.secondary)
+ }
+
+ // Account Index
+ Section {
+ TextField(selectedAccountType.indexPlaceholder, text: $accountIndex)
+ .keyboardType(.numberPad)
+ } header: {
+ Text("Account Index")
+ } footer: {
+ Text("Enter the account index number. Each account type can have multiple accounts with different indices.")
+ .foregroundColor(.secondary)
+ }
+
+ // Key Class (for Platform Payment accounts)
+ if selectedAccountType.requiresKeyClass {
+ Section {
+ TextField("e.g., 0", text: $keyClass)
+ .keyboardType(.numberPad)
+ } header: {
+ Text("Key Class")
+ } footer: {
+ Text("The key class level in the DIP-17 derivation path. Typically 0 for main addresses.")
+ .foregroundColor(.secondary)
+ }
+ }
+
+ // Preview
+ Section("Preview") {
+ VStack(alignment: .leading, spacing: 12) {
+ HStack {
+ Image(systemName: selectedAccountType.icon)
+ .foregroundColor(selectedAccountType.color)
+ .font(.title2)
+
+ VStack(alignment: .leading, spacing: 4) {
+ Text(accountLabel)
+ .font(.headline)
+
+ Text(selectedAccountType.rawValue)
+ .font(.caption)
+ .foregroundColor(.secondary)
+ }
+
+ Spacer()
+
+ if let index = parsedIndex {
+ Text("#\(index)")
+ .font(.system(.body, design: .monospaced))
+ .foregroundColor(.secondary)
+ }
+ }
+
+ // Derivation Path
+ if parsedIndex != nil {
+ HStack {
+ Text("Path:")
+ .font(.caption)
+ .foregroundColor(.secondary)
+ Text(derivationPath)
+ .font(.system(.caption, design: .monospaced))
+ .foregroundColor(.secondary)
+ }
+ }
+ }
+ .padding(.vertical, 4)
+ }
+
+ // Create Button
+ Section {
+ Button(action: createAccount) {
+ HStack {
+ Spacer()
+ if isCreating {
+ ProgressView()
+ .progressViewStyle(CircularProgressViewStyle())
+ .scaleEffect(0.8)
+ Text("Creating...")
+ } else {
+ Image(systemName: "plus.circle.fill")
+ Text("Create Account")
+ }
+ Spacer()
+ }
+ }
+ .disabled(!canCreate || isCreating)
+ }
+ }
+ .navigationTitle("Add Account")
+ .navigationBarTitleDisplayMode(.inline)
+ .toolbar {
+ ToolbarItem(placement: .navigationBarLeading) {
+ Button("Cancel") {
+ dismiss()
+ }
+ }
+ }
+ .alert("Error", isPresented: $showError) {
+ Button("OK") { }
+ } message: {
+ Text(errorMessage ?? "An unknown error occurred")
+ }
+ }
+ }
+
+ // MARK: - Computed Properties
+
+ private var parsedIndex: UInt32? {
+ guard !accountIndex.isEmpty else { return nil }
+ return UInt32(accountIndex)
+ }
+
+ private var parsedKeyClass: UInt32 {
+ UInt32(keyClass) ?? 0
+ }
+
+ private var canCreate: Bool {
+ parsedIndex != nil
+ }
+
+ private var isTestnet: Bool {
+ wallet.network == .testnet || wallet.network == .regtest || wallet.network == .devnet
+ }
+
+ private var derivationPath: String {
+ guard let index = parsedIndex else { return "" }
+ return selectedAccountType.derivationPath(index: index, keyClass: parsedKeyClass, isTestnet: isTestnet)
+ }
+
+ private var accountLabel: String {
+ guard let index = parsedIndex else {
+ return "Account"
+ }
+
+ switch selectedAccountType {
+ case .bip44:
+ return index == 0 ? "Main Account" : "Account \(index)"
+ case .bip32:
+ return "BIP32 Account \(index)"
+ case .coinjoin:
+ return "CoinJoin Account \(index)"
+ case .platformPayment:
+ return "Platform Payment \(index)"
+ case .identityTopup:
+ return "Top-up Account \(index)"
+ }
+ }
+
+ // MARK: - Actions
+
+ private func createAccount() {
+ guard let index = parsedIndex else { return }
+
+ isCreating = true
+ errorMessage = nil
+
+ Task {
+ do {
+ // Add the account via the wallet manager
+ try walletService.walletManager.addAccount(
+ to: wallet,
+ type: selectedAccountType.accountType,
+ index: index,
+ keyClass: parsedKeyClass
+ )
+
+ await MainActor.run {
+ isCreating = false
+ dismiss()
+ }
+ } catch {
+ await MainActor.run {
+ isCreating = false
+ errorMessage = error.localizedDescription
+ showError = true
+ }
+ }
+ }
+ }
+}
+
+// MARK: - Preview
+
+struct AddAccountView_Previews: PreviewProvider {
+ static var previews: some View {
+ Text("Preview requires wallet context")
+ }
+}
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CoreContentView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CoreContentView.swift
index 77dab8ee47b..aa128c9e70b 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CoreContentView.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CoreContentView.swift
@@ -6,7 +6,10 @@ struct CoreContentView: View {
@EnvironmentObject var walletService: WalletService
@EnvironmentObject var unifiedAppState: UnifiedAppState
@EnvironmentObject var platformBalanceSyncService: PlatformBalanceSyncService
+ @EnvironmentObject var zkSyncService: ZKSyncService
@State private var showProofDetail = false
+ @State private var showPlatformDetails = false
+ @State private var showZKDetails = false
// Progress values come from WalletService (kept in sync with SPV callbacks)
// Display helpers
@@ -26,7 +29,7 @@ struct CoreContentView: View {
}
private var filterHeightsDisplay: String? {
- let cur = walletService.syncProgress.filters?.currentHeight ?? 0
+ let cur = walletService.syncProgress.filters?.storedHeight ?? 0
let tot = walletService.syncProgress.filters?.targetHeight ?? 0
return heightDisplay(numerator: cur, denominator: tot)
@@ -78,7 +81,11 @@ var body: some View {
CompactSyncRow(
title: "Filters",
- progress: walletService.syncProgress.filters?.percentage ?? 0.0,
+ progress: {
+ let stored = Double(walletService.syncProgress.filters?.storedHeight ?? 0)
+ let target = Double(walletService.syncProgress.filters?.targetHeight ?? 0)
+ return target > 0 ? stored / target : 0.0
+ }(),
value: filterHeightsDisplay
)
@@ -139,6 +146,15 @@ var body: some View {
.foregroundColor(.secondary)
}
Spacer()
+ // Expand/collapse chevron
+ Button {
+ showPlatformDetails.toggle()
+ } label: {
+ Image(systemName: showPlatformDetails ? "chevron.up" : "chevron.down")
+ .font(.caption)
+ .foregroundColor(.secondary)
+ }
+ .buttonStyle(.plain)
}
// Balance summary
@@ -158,145 +174,299 @@ var body: some View {
}
}
- // Active addresses
- HStack {
- Text("Active Addresses")
- .font(.subheadline)
- .foregroundColor(.secondary)
- Spacer()
- Text("\(platformBalanceSyncService.activeAddressCount)")
- .font(.subheadline)
- .fontWeight(.medium)
- }
-
- // Chain tip height
- if platformBalanceSyncService.chainTipHeight > 0 {
+ // Expanded details
+ if showPlatformDetails {
+ // Active addresses
HStack {
- Text("Chain Tip Height")
+ Text("Active Addresses")
.font(.subheadline)
.foregroundColor(.secondary)
Spacer()
- Text(formattedHeight(UInt32(platformBalanceSyncService.chainTipHeight)))
+ Text("\(platformBalanceSyncService.activeAddressCount)")
.font(.subheadline)
.fontWeight(.medium)
}
- }
- // Sync checkpoint (from tree scan)
- if platformBalanceSyncService.checkpointHeight > 0 {
- HStack {
- Text("Sync Checkpoint")
- .font(.subheadline)
- .foregroundColor(.secondary)
- Spacer()
- Text(formattedHeight(UInt32(platformBalanceSyncService.checkpointHeight)))
- .font(.subheadline)
- .foregroundColor(.secondary)
+ // Chain tip height
+ if platformBalanceSyncService.chainTipHeight > 0 {
+ HStack {
+ Text("Chain Tip Height")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ Spacer()
+ Text(formattedHeight(UInt32(platformBalanceSyncService.chainTipHeight)))
+ .font(.subheadline)
+ .fontWeight(.medium)
+ }
}
- }
- // Last known recent block (for compaction detection)
- HStack {
- Text("Last Recent Block")
- .font(.subheadline)
- .foregroundColor(.secondary)
- Spacer()
- if platformBalanceSyncService.lastKnownRecentBlock > 0 {
- Text(formattedHeight(UInt32(platformBalanceSyncService.lastKnownRecentBlock)))
- .font(.subheadline)
- .foregroundColor(.secondary)
- } else {
- Text("None found")
- .font(.subheadline)
- .foregroundColor(.blue)
- .onTapGesture {
- showProofDetail = true
- }
+ // Sync checkpoint (from tree scan)
+ if platformBalanceSyncService.checkpointHeight > 0 {
+ HStack {
+ Text("Sync Checkpoint")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ Spacer()
+ Text(formattedHeight(UInt32(platformBalanceSyncService.checkpointHeight)))
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ }
}
- }
- // Block time
- if let blockTime = platformBalanceSyncService.lastSyncBlockTime {
+ // Last known recent block (for compaction detection)
HStack {
- Text("Block Time")
+ Text("Last Recent Block")
.font(.subheadline)
.foregroundColor(.secondary)
Spacer()
- Text(blockTime, style: .date)
- .font(.caption)
- .foregroundColor(.secondary)
- Text(blockTime, style: .time)
- .font(.caption)
- .foregroundColor(.secondary)
+ if platformBalanceSyncService.lastKnownRecentBlock > 0 {
+ Text(formattedHeight(UInt32(platformBalanceSyncService.lastKnownRecentBlock)))
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ } else {
+ Text("None found")
+ .font(.subheadline)
+ .foregroundColor(.blue)
+ .onTapGesture {
+ showProofDetail = true
+ }
+ }
}
- }
- // Query counts since launch
- if platformBalanceSyncService.syncCountSinceLaunch > 0 {
- let svc = platformBalanceSyncService
- VStack(spacing: 4) {
+ // Block time
+ if let blockTime = platformBalanceSyncService.lastSyncBlockTime {
HStack {
- Text("Queries Since Launch")
+ Text("Block Time")
.font(.subheadline)
.foregroundColor(.secondary)
Spacer()
- Text("\(svc.syncCountSinceLaunch) syncs")
+ Text(blockTime, style: .date)
+ .font(.caption)
+ .foregroundColor(.secondary)
+ Text(blockTime, style: .time)
.font(.caption)
.foregroundColor(.secondary)
}
- HStack(spacing: 12) {
- QueryCountBadge(label: "Trunk", count: svc.totalTrunkQueries, color: .blue)
- QueryCountBadge(label: "Branch", count: svc.totalBranchQueries, color: .indigo)
- QueryCountBadge(label: "Compacted", count: svc.totalCompactedQueries, detail: svc.totalCompactedEntries, color: .orange)
- QueryCountBadge(label: "Recent", count: svc.totalRecentQueries, detail: svc.totalRecentEntries, color: .green)
+ }
+
+ // Query counts since launch
+ if platformBalanceSyncService.syncCountSinceLaunch > 0 {
+ let svc = platformBalanceSyncService
+ VStack(spacing: 4) {
+ HStack {
+ Text("Queries Since Launch")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ Spacer()
+ Text("\(svc.syncCountSinceLaunch) syncs")
+ .font(.caption)
+ .foregroundColor(.secondary)
+ }
+ HStack(spacing: 12) {
+ QueryCountBadge(label: "Trunk", count: svc.totalTrunkQueries, color: .blue)
+ QueryCountBadge(label: "Branch", count: svc.totalBranchQueries, color: .indigo)
+ QueryCountBadge(label: "Compacted", count: svc.totalCompactedQueries, detail: svc.totalCompactedEntries, color: .orange)
+ QueryCountBadge(label: "Recent", count: svc.totalRecentQueries, detail: svc.totalRecentEntries, color: .green)
+ }
}
}
+
+ // Action buttons
+ HStack {
+ Spacer()
+
+ Button {
+ Task {
+ await unifiedAppState.performPlatformBalanceSync()
+ }
+ } label: {
+ HStack(spacing: 4) {
+ Image(systemName: "arrow.clockwise")
+ Text("Sync Now")
+ }
+ .font(.caption)
+ .fontWeight(.medium)
+ }
+ .buttonStyle(.borderedProminent)
+ .tint(.blue)
+ .controlSize(.mini)
+ .disabled(platformBalanceSyncService.isSyncing)
+
+ Button {
+ platformBalanceSyncService.reset()
+ } label: {
+ Text("Clear")
+ .font(.caption)
+ .fontWeight(.medium)
+ }
+ .buttonStyle(.borderedProminent)
+ .tint(.red)
+ .controlSize(.mini)
+ }
}
- // Error display
+ // Error display (always visible)
if let error = platformBalanceSyncService.lastError {
Text(error)
.font(.caption)
.foregroundColor(.red)
.lineLimit(2)
}
+ }
+ .padding(.vertical, 4)
+ } header: {
+ Text("Platform Sync Status")
+ }
- // Action buttons
+ // Section 3: ZK Shielded Sync Status
+ Section {
+ VStack(spacing: 8) {
+ // Sync state row
HStack {
+ if zkSyncService.isSyncing {
+ ProgressView()
+ .scaleEffect(0.7)
+ Text("Syncing...")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ } else if let lastSync = zkSyncService.lastSyncTime {
+ Image(systemName: "checkmark.circle.fill")
+ .foregroundColor(.green)
+ .font(.caption)
+ Text("Last sync: \(lastSync, style: .relative)")
+ .font(.caption)
+ .foregroundColor(.secondary)
+ } else {
+ Image(systemName: "circle.dashed")
+ .foregroundColor(.secondary)
+ .font(.caption)
+ Text("Not synced yet")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ }
Spacer()
-
+ // Expand/collapse chevron
Button {
- Task {
- await unifiedAppState.performPlatformBalanceSync()
- }
+ showZKDetails.toggle()
} label: {
- HStack(spacing: 4) {
- Image(systemName: "arrow.clockwise")
- Text("Sync Now")
+ Image(systemName: showZKDetails ? "chevron.up" : "chevron.down")
+ .font(.caption)
+ .foregroundColor(.secondary)
+ }
+ .buttonStyle(.plain)
+ }
+
+ // Shielded balance
+ HStack {
+ Text("Shielded Balance")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ Spacer()
+ if zkSyncService.shieldedBalance > 0 {
+ Text(formatCredits(zkSyncService.shieldedBalance))
+ .font(.subheadline)
+ .fontWeight(.medium)
+ } else {
+ Text("0")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ }
+ }
+
+ // Expanded details
+ if showZKDetails {
+ // Orchard address (truncated)
+ if let address = zkSyncService.orchardAddress {
+ HStack {
+ Text("Orchard Address")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ Spacer()
+ Text(String(address.prefix(12)) + "..." + String(address.suffix(6)))
+ .foregroundColor(.secondary)
+ .font(.system(.caption, design: .monospaced))
}
- .font(.caption)
- .fontWeight(.medium)
}
- .buttonStyle(.borderedProminent)
- .tint(.blue)
- .controlSize(.mini)
- .disabled(platformBalanceSyncService.isSyncing)
- Button {
- platformBalanceSyncService.reset()
- } label: {
- Text("Clear")
+ // Last sync stats
+ HStack {
+ Text("Last Sync")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ Spacer()
+ Text("\(zkSyncService.notesSynced) notes, \(zkSyncService.nullifiersSpent) spent")
+ .font(.caption)
+ .foregroundColor(.secondary)
+ }
+
+ // Cumulative totals
+ HStack {
+ Text("Total Synced")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ Spacer()
+ Text("\(zkSyncService.totalNotesSynced) notes, \(zkSyncService.totalNullifiersSpent) spent")
+ .font(.caption)
+ .foregroundColor(.secondary)
+ }
+
+ // Sync count
+ HStack {
+ Text("Sync Count")
+ .font(.subheadline)
+ .foregroundColor(.secondary)
+ Spacer()
+ Text("\(zkSyncService.syncCountSinceLaunch)")
+ .font(.caption)
+ .foregroundColor(.secondary)
+ }
+
+ // Action buttons
+ HStack {
+ Spacer()
+
+ Button {
+ Task {
+ await unifiedAppState.performZKSync()
+ }
+ } label: {
+ HStack(spacing: 4) {
+ Image(systemName: "arrow.clockwise")
+ Text("Sync Now")
+ }
.font(.caption)
.fontWeight(.medium)
+ }
+ .buttonStyle(.borderedProminent)
+ .tint(.blue)
+ .controlSize(.mini)
+ .disabled(zkSyncService.isSyncing)
+
+ Button {
+ zkSyncService.reset()
+ } label: {
+ Text("Clear")
+ .font(.caption)
+ .fontWeight(.medium)
+ }
+ .buttonStyle(.borderedProminent)
+ .tint(.red)
+ .controlSize(.mini)
+ .disabled(zkSyncService.isSyncing)
}
- .buttonStyle(.borderedProminent)
- .tint(.red)
- .controlSize(.mini)
+ }
+
+ // Error display (always visible)
+ if let error = zkSyncService.lastError {
+ Text(error)
+ .font(.caption)
+ .foregroundColor(.red)
+ .lineLimit(2)
}
}
.padding(.vertical, 4)
} header: {
- Text("Platform Sync Status")
+ Text("ZK Shielded Sync")
}
}
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CreateWalletView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CreateWalletView.swift
index 68bc0220e35..e1e7479ed42 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CreateWalletView.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CreateWalletView.swift
@@ -38,11 +38,14 @@ struct CreateWalletView: View {
unifiedAppState.platformState.currentNetwork
}
- // Only show devnet option if currently on devnet
var shouldShowDevnet: Bool {
currentNetwork == .devnet
}
+ var shouldShowRegtest: Bool {
+ currentNetwork == .regtest
+ }
+
var body: some View {
Form {
Section {
@@ -96,6 +99,19 @@ struct CreateWalletView: View {
}
.toggleStyle(CheckboxToggleStyle())
}
+
+ // Only show Regtest/Local if currently on Local
+ if shouldShowRegtest {
+ Toggle(isOn: $createForRegtest) {
+ HStack {
+ Image(systemName: "network")
+ .foregroundColor(.purple)
+ Text("Local")
+ .font(.body)
+ }
+ }
+ .toggleStyle(CheckboxToggleStyle())
+ }
}
.padding(.vertical, 4)
} header: {
@@ -221,7 +237,7 @@ struct CreateWalletView: View {
}
private var hasNetworkSelected: Bool {
- createForMainnet || createForTestnet || createForDevnet
+ createForMainnet || createForTestnet || createForDevnet || createForRegtest
}
private func setupInitialNetworkSelection() {
@@ -279,6 +295,7 @@ struct CreateWalletView: View {
createForMainnet ? AppNetwork.mainnet : nil,
createForTestnet ? AppNetwork.testnet : nil,
(createForDevnet && shouldShowDevnet) ? AppNetwork.devnet : nil,
+ (createForRegtest && shouldShowRegtest) ? AppNetwork.regtest : nil,
].compactMap { $0 }
guard let primaryNetwork = selectedNetworks.first else {
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/ReceiveAddressView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/ReceiveAddressView.swift
index b4d677297ca..321b4561c3f 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/ReceiveAddressView.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/ReceiveAddressView.swift
@@ -17,6 +17,8 @@ struct ReceiveAddressView: View {
@State private var selectedTab: ReceiveAddressTab = .core
@State private var copiedToClipboard = false
+ @State private var faucetStatus: String?
+ @State private var isFaucetLoading = false
private var currentAddress: String {
switch selectedTab {
@@ -143,6 +145,27 @@ struct ReceiveAddressView: View {
.buttonStyle(.borderedProminent)
.tint(tabColor)
.padding(.horizontal)
+
+ // Faucet button — only on local Docker, Core tab
+ if selectedTab == .core && unifiedAppState.platformState.useDockerSetup {
+ Button {
+ Task { await requestFromFaucet() }
+ } label: {
+ HStack {
+ if isFaucetLoading {
+ ProgressView().scaleEffect(0.8)
+ } else {
+ Image(systemName: "drop.fill")
+ }
+ Text(faucetStatus ?? "Get 10 DASH from Faucet")
+ }
+ .frame(maxWidth: .infinity)
+ }
+ .buttonStyle(.borderedProminent)
+ .tint(.green)
+ .padding(.horizontal)
+ .disabled(isFaucetLoading)
+ }
} else {
Spacer()
Text(unavailableMessage)
@@ -217,4 +240,79 @@ struct ReceiveAddressView: View {
copiedToClipboard = false
}
}
+
+ /// Request 10 DASH from the local Docker faucet (seed node Core RPC).
+ private func requestFromFaucet() async {
+ isFaucetLoading = true
+ faucetStatus = nil
+ defer { isFaucetLoading = false }
+
+ let address = currentAddress
+ guard !address.isEmpty else {
+ faucetStatus = "No address available"
+ return
+ }
+
+ // Read RPC port and password from UserDefaults, with dashmate defaults
+ let rpcPort = UserDefaults.standard.string(forKey: "faucetRPCPort") ?? "20302"
+ let rpcUser = UserDefaults.standard.string(forKey: "faucetRPCUser") ?? "dashmate"
+ let rpcPassword = UserDefaults.standard.string(forKey: "faucetRPCPassword") ?? "dashmate"
+
+ guard let url = URL(string: "http://127.0.0.1:\(rpcPort)/") else {
+ faucetStatus = "Invalid RPC URL"
+ return
+ }
+
+ let body: [String: Any] = [
+ "jsonrpc": "1.0",
+ "id": "faucet",
+ "method": "sendtoaddress",
+ "params": [address, 10]
+ ]
+
+ guard let jsonData = try? JSONSerialization.data(withJSONObject: body) else {
+ faucetStatus = "Failed to encode request"
+ return
+ }
+
+ var request = URLRequest(url: url)
+ request.httpMethod = "POST"
+ request.httpBody = jsonData
+ request.setValue("text/plain", forHTTPHeaderField: "Content-Type")
+
+ let credentials = "\(rpcUser):\(rpcPassword)"
+ if let credData = credentials.data(using: .utf8) {
+ request.setValue("Basic \(credData.base64EncodedString())", forHTTPHeaderField: "Authorization")
+ }
+
+ do {
+ let (data, response) = try await URLSession.shared.data(for: request)
+ guard let httpResponse = response as? HTTPURLResponse else {
+ faucetStatus = "Invalid response"
+ return
+ }
+
+ if httpResponse.statusCode == 200 {
+ if let json = try? JSONSerialization.jsonObject(with: data) as? [String: Any],
+ let txid = json["result"] as? String {
+ faucetStatus = "Sent! tx: \(txid.prefix(12))..."
+ } else {
+ faucetStatus = "Sent!"
+ }
+ } else if httpResponse.statusCode == 401 || httpResponse.statusCode == 403 {
+ faucetStatus = "Auth failed — set faucetRPCPassword in UserDefaults"
+ } else {
+ let body = String(data: data, encoding: .utf8) ?? ""
+ faucetStatus = "RPC error \(httpResponse.statusCode): \(body.prefix(80))"
+ }
+ } catch {
+ faucetStatus = "Network error: \(error.localizedDescription)"
+ }
+
+ // Clear status after 5 seconds
+ Task {
+ try? await Task.sleep(nanoseconds: 5_000_000_000)
+ faucetStatus = nil
+ }
+ }
}
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/SendTransactionView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/SendTransactionView.swift
index 9b512ef62fa..a8d3af87804 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/SendTransactionView.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/SendTransactionView.swift
@@ -28,6 +28,29 @@ struct SendTransactionView: View {
if !viewModel.recipientAddress.isEmpty {
AddressTypeBadge(type: viewModel.detectedAddressType)
}
+
+ // Quick-fill address buttons
+ let quickAddresses = buildQuickAddresses()
+ if !quickAddresses.isEmpty {
+ ScrollView(.horizontal, showsIndicators: false) {
+ HStack(spacing: 8) {
+ ForEach(quickAddresses, id: \.label) { qa in
+ Button {
+ viewModel.recipientAddress = qa.address
+ } label: {
+ Text(qa.label)
+ .font(.caption2)
+ .padding(.horizontal, 10)
+ .padding(.vertical, 6)
+ .background(qa.color.opacity(0.15))
+ .foregroundColor(qa.color)
+ .cornerRadius(12)
+ }
+ .buttonStyle(.plain)
+ }
+ }
+ }
+ }
} header: {
Text("Recipient")
}
@@ -103,25 +126,6 @@ struct SendTransactionView: View {
}
}
- // Asset Lock (disabled)
- Section {
- HStack {
- Image(systemName: "lock.fill")
- .foregroundColor(.gray)
- Text("Asset Lock")
- .foregroundColor(.gray)
- Spacer()
- Text("Coming Soon")
- .font(.caption)
- .foregroundColor(.secondary)
- .padding(.horizontal, 8)
- .padding(.vertical, 4)
- .background(Color(UIColor.tertiarySystemBackground))
- .cornerRadius(6)
- }
- } header: {
- Text("Other Options")
- }
}
.navigationTitle("Send Dash")
.navigationBarTitleDisplayMode(.inline)
@@ -136,6 +140,7 @@ struct SendTransactionView: View {
await viewModel.executeSend(
sdk: sdk,
shieldedService: shieldedService,
+ walletService: walletService,
platformState: unifiedAppState.platformState,
wallet: wallet
)
@@ -189,6 +194,8 @@ struct SendTransactionView: View {
private func flowColor(for flow: SendFlow) -> Color {
switch flow {
+ case .coreToPlatform: return .indigo
+ case .coreToCore: return .blue
case .platformToShielded: return .purple
case .shieldedToShielded: return .purple
case .shieldedToPlatform: return .blue
@@ -209,6 +216,61 @@ struct SendTransactionView: View {
}
return String(format: "%.8f DASH", dash)
}
+
+ // MARK: - Quick Address Buttons
+
+ private struct QuickAddress {
+ let label: String
+ let address: String
+ let color: Color
+ }
+
+ private func buildQuickAddresses() -> [QuickAddress] {
+ var addresses: [QuickAddress] = []
+ let wallets = walletService.walletManager.wallets
+
+ // Our wallet's internal addresses
+ let ownCoreAddress = walletService.walletManager.getReceiveAddress(for: wallet)
+ if !ownCoreAddress.isEmpty {
+ addresses.append(QuickAddress(label: "My Core", address: ownCoreAddress, color: .blue))
+ }
+
+ // Our platform address
+ if let collection = walletService.walletManager.getManagedAccountCollection(for: wallet),
+ let platformAccount = collection.getPlatformPaymentAccount(accountIndex: 0, keyClass: 0),
+ let pool = platformAccount.getAddressPool(),
+ let infos = try? pool.getAddresses(from: 0, to: 1),
+ let addrInfo = infos.first {
+ let networkValue: UInt32 = wallet.network == .mainnet ? 0 : 1
+ let result = addrInfo.scriptPubKey.withUnsafeBytes { buffer -> DashSDKResult in
+ guard let base = buffer.baseAddress?.assumingMemoryBound(to: UInt8.self) else {
+ return DashSDKResult()
+ }
+ return dash_sdk_encode_platform_address(base, UInt32(addrInfo.scriptPubKey.count), networkValue)
+ }
+ if result.error == nil, let dataPtr = result.data {
+ let str = String(cString: dataPtr.assumingMemoryBound(to: CChar.self))
+ dash_sdk_string_free(dataPtr)
+ addresses.append(QuickAddress(label: "My Platform", address: str, color: .indigo))
+ }
+ }
+
+ // Our shielded address
+ if let orchardAddress = shieldedService.orchardDisplayAddress {
+ addresses.append(QuickAddress(label: "My Shielded", address: orchardAddress, color: .purple))
+ }
+
+ // Other wallet's addresses (first wallet that isn't ours)
+ if let otherWallet = wallets.first(where: { $0.id != wallet.id }) {
+ let otherCore = walletService.walletManager.getReceiveAddress(for: otherWallet)
+ if !otherCore.isEmpty {
+ let name = otherWallet.label.isEmpty ? "Other" : otherWallet.label
+ addresses.append(QuickAddress(label: "\(name) Core", address: otherCore, color: .green))
+ }
+ }
+
+ return addresses
+ }
}
// MARK: - Subviews
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/WalletsContentView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/WalletsContentView.swift
index 4a4199c0976..315283e80a6 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/WalletsContentView.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/WalletsContentView.swift
@@ -126,6 +126,7 @@ struct WalletsContentView: View {
}
.refreshable {
await unifiedAppState.performPlatformBalanceSync()
+ await unifiedAppState.performZKSync()
}
}
}
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/SwiftExampleAppApp.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/SwiftExampleAppApp.swift
index f53c7b7401f..b35e0251681 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/SwiftExampleAppApp.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/SwiftExampleAppApp.swift
@@ -50,6 +50,7 @@ struct SwiftExampleAppApp: App {
.environmentObject(unifiedState.unifiedState)
.environmentObject(unifiedState.shieldedService)
.environmentObject(unifiedState.platformBalanceSyncService)
+ .environmentObject(unifiedState.zkSyncService)
.environment(\.modelContext, unifiedState.modelContainer.mainContext)
.task {
SDKLogger.log("🚀 SwiftExampleApp: Starting initialization...", minimumLevel: .medium)
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/UnifiedAppState.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/UnifiedAppState.swift
index f205bdef6c7..3b10c146cc2 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/UnifiedAppState.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/UnifiedAppState.swift
@@ -32,6 +32,9 @@ class UnifiedAppState: ObservableObject {
// Platform address balance sync service (BLAST sync)
let platformBalanceSyncService = PlatformBalanceSyncService()
+ // ZK shielded pool sync service
+ let zkSyncService = ZKSyncService()
+
// State from Platform
let platformState: AppState
@@ -47,6 +50,9 @@ class UnifiedAppState: ObservableObject {
// Task for the periodic sync loop
private var syncLoopTask: Task?
+ // Task for the periodic ZK sync loop
+ private var zkSyncLoopTask: Task?
+
// Computed property for easy SDK access
var sdk: SDK? {
platformState.sdk
@@ -93,6 +99,9 @@ class UnifiedAppState: ObservableObject {
// Start periodic BLAST address sync
startPlatformBalanceSync()
+ // Start periodic ZK shielded sync
+ startZKSync()
+
isInitialized = true
}
@@ -106,6 +115,9 @@ class UnifiedAppState: ObservableObject {
syncLoopTask?.cancel()
syncLoopTask = nil
platformBalanceSyncService.reset()
+ zkSyncLoopTask?.cancel()
+ zkSyncLoopTask = nil
+ zkSyncService.reset()
// Reset platform state
platformState.sdk = nil
@@ -129,6 +141,9 @@ class UnifiedAppState: ObservableObject {
// Restart BLAST sync for the new network
startPlatformBalanceSync()
+ // Restart ZK sync for the new network
+ startZKSync()
+
// The platform state handles its own network switching in AppState.switchNetwork
}
@@ -189,6 +204,42 @@ class UnifiedAppState: ObservableObject {
}
}
+ /// Start periodic ZK shielded sync (every 30 seconds).
+ func startZKSync() {
+ // Cancel any previous sync loop
+ zkSyncLoopTask?.cancel()
+
+ let network = platformState.currentNetwork
+ zkSyncService.startPeriodicSync(network: network)
+
+ // Run a repeating async loop
+ zkSyncLoopTask = Task { [weak self] in
+ // Initial delay to allow SDK and shielded service to initialize
+ try? await Task.sleep(for: .seconds(5))
+ await self?.performZKSync()
+
+ // Repeat every 30 seconds
+ while !Task.isCancelled {
+ do {
+ try await Task.sleep(for: .seconds(30))
+ } catch {
+ break // Task was cancelled
+ }
+ await self?.performZKSync()
+ }
+ }
+ }
+
+ /// Perform a single ZK shielded sync. Skips silently if no SDK or pool client.
+ func performZKSync() async {
+ guard let sdk = platformState.sdk else { return }
+
+ // Skip silently if shielded pool client is not initialized
+ guard shieldedService.poolClient != nil else { return }
+
+ await zkSyncService.performSync(sdk: sdk, shieldedService: shieldedService)
+ }
+
/// Initialize the shielded service using the first wallet's seed.
/// Call after wallet seed becomes available or on network switch.
func initializeShieldedService() {
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Views/OptionsView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Views/OptionsView.swift
index 89258fca735..a953371e9ad 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Views/OptionsView.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Views/OptionsView.swift
@@ -19,6 +19,11 @@ struct OptionsView: View {
if newNetwork != appState.currentNetwork {
isSwitchingNetwork = true
Task {
+ // Auto-disable Docker when leaving Local
+ if newNetwork != .regtest && appState.useDockerSetup {
+ appState.useDockerSetup = false
+ }
+
// Update platform state (which will trigger SDK switch)
appState.currentNetwork = newNetwork
@@ -39,21 +44,27 @@ struct OptionsView: View {
.pickerStyle(SegmentedPickerStyle())
.disabled(isSwitchingNetwork)
- Toggle("Use Local DAPI (Platform)", isOn: $appState.useLocalPlatform)
- .onChange(of: appState.useLocalPlatform) { _, _ in
- isSwitchingNetwork = true
- Task {
- await appState.switchNetwork(to: appState.currentNetwork)
- await MainActor.run { isSwitchingNetwork = false }
+ if appState.currentNetwork == .regtest {
+ Toggle("Use Docker Setup", isOn: $appState.useDockerSetup)
+ .onChange(of: appState.useDockerSetup) { _, _ in
+ isSwitchingNetwork = true
+ Task {
+ await appState.switchNetwork(to: appState.currentNetwork)
+ await MainActor.run { isSwitchingNetwork = false }
+ }
}
+ .help("Connect to local dashmate Docker network.")
+
+ if appState.useDockerSetup {
+ TextField("Faucet RPC Password", text: Binding(
+ get: { UserDefaults.standard.string(forKey: "faucetRPCPassword") ?? "" },
+ set: { UserDefaults.standard.set($0, forKey: "faucetRPCPassword") }
+ ))
+ .font(.system(.body, design: .monospaced))
+ .textInputAutocapitalization(.never)
+ .autocorrectionDisabled()
}
- .help("When enabled, Platform requests use local DAPI at 127.0.0.1:1443 (override via 'platformDAPIAddresses').")
-
- Toggle("Use Local Core (SPV)", isOn: $appState.useLocalCore)
- .onChange(of: appState.useLocalCore) { _, _ in
- // Core override will be applied when SPV peer overrides are supported
- }
- .help("When enabled, Core (SPV) connects only to configured peers (default 127.0.0.1 with network port). Override via 'corePeerAddresses'.")
+ }
HStack {
Text("Network Status")
@@ -76,6 +87,7 @@ struct OptionsView: View {
.foregroundColor(.red)
}
}
+
}
Section("Data") {
From 93395a9d37179c77d60ae2853c4a169c18147a6d Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Fri, 3 Apr 2026 19:42:23 +0300
Subject: [PATCH 03/40] feat: add rs-scripts crate with decode-document CLI
tool (#3391)
Co-authored-by: Claude Opus 4.6 (1M context)
---
Cargo.lock | 14 ++
Cargo.toml | 4 +-
packages/rs-scripts/Cargo.toml | 18 ++
packages/rs-scripts/README.md | 60 ++++++
.../rs-scripts/src/bin/decode_document.rs | 173 ++++++++++++++++++
5 files changed, 268 insertions(+), 1 deletion(-)
create mode 100644 packages/rs-scripts/Cargo.toml
create mode 100644 packages/rs-scripts/README.md
create mode 100644 packages/rs-scripts/src/bin/decode_document.rs
diff --git a/Cargo.lock b/Cargo.lock
index 92fea276fcf..8b80e6dcc03 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -5842,6 +5842,20 @@ dependencies = [
"tracing",
]
+[[package]]
+name = "rs-scripts"
+version = "0.1.0"
+dependencies = [
+ "base64 0.22.1",
+ "chrono",
+ "clap",
+ "data-contracts",
+ "dpp",
+ "hex",
+ "platform-version",
+ "serde_json",
+]
+
[[package]]
name = "rs-sdk-ffi"
version = "3.1.0-dev.1"
diff --git a/Cargo.toml b/Cargo.toml
index 80a21c2046e..fe1346cea97 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -43,7 +43,9 @@ members = [
"packages/rs-platform-wallet",
"packages/rs-platform-wallet-ffi",
"packages/rs-platform-encryption",
- "packages/wasm-sdk", "packages/rs-unified-sdk-ffi",
+ "packages/wasm-sdk",
+ "packages/rs-unified-sdk-ffi",
+ "packages/rs-scripts",
]
[workspace.dependencies]
diff --git a/packages/rs-scripts/Cargo.toml b/packages/rs-scripts/Cargo.toml
new file mode 100644
index 00000000000..dc639760994
--- /dev/null
+++ b/packages/rs-scripts/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "rs-scripts"
+version = "0.1.0"
+edition = "2021"
+
+[[bin]]
+name = "decode-document"
+path = "src/bin/decode_document.rs"
+
+[dependencies]
+dpp = { path = "../rs-dpp", features = ["system_contracts"] }
+data-contracts = { path = "../data-contracts" }
+platform-version = { path = "../rs-platform-version" }
+base64 = "0.22"
+chrono = "0.4"
+hex = "0.4"
+clap = { version = "4", features = ["derive"] }
+serde_json = "1"
diff --git a/packages/rs-scripts/README.md b/packages/rs-scripts/README.md
new file mode 100644
index 00000000000..32dee2822e2
--- /dev/null
+++ b/packages/rs-scripts/README.md
@@ -0,0 +1,60 @@
+# rs-scripts
+
+Utility scripts for debugging and inspecting Dash Platform data.
+
+## decode-document
+
+Decodes a hex or base64-encoded platform document into human-readable output. Uses the actual platform deserialization code, so it handles all document format versions correctly.
+
+### Usage
+
+```bash
+cargo run -p rs-scripts --bin decode-document -- [OPTIONS]
+```
+
+### Options
+
+| Option | Required | Description |
+|--------|----------|-------------|
+| `-c, --contract` | yes | System data contract name or ID (base58/base64/hex) |
+| `-d, --doc-type` | yes | Document type name within the contract |
+| `-f, --format` | no | Input encoding: `base64`, `hex`, or `auto` (default: `auto`) |
+
+### Supported contracts
+
+`withdrawals`, `dpns`, `dashpay`, `masternode-reward-shares`, `feature-flags`, `wallet-utils`, `token-history`, `keyword-search`
+
+You can also pass the contract ID directly instead of a name (you'll need `-d` to specify the document type):
+```bash
+# base58
+cargo run -p rs-scripts --bin decode-document -- -c 4fJLR2GYTPFdomuTVvNy3VRrvWgvkKPzqehEBpNf2nk6 -d withdrawal "base64data..."
+# base64
+cargo run -p rs-scripts --bin decode-document -- -c "NmK7YeF/rj6ilM9gMZf7CqttURgL2LYQTElEpi/i2X8=" -d withdrawal "base64data..."
+# hex
+cargo run -p rs-scripts --bin decode-document -- -c 3662bb61e17fae3ea294cf603197fb0aab6d51180bd8b6104c4944a62fe2d97f -d withdrawal "base64data..."
+```
+
+### Examples
+
+Decode a withdrawal document:
+```bash
+cargo run -p rs-scripts --bin decode-document -- -c withdrawals -d withdrawal "AgIintqUs1vl..."
+```
+
+Decode a DPNS domain document:
+```bash
+cargo run -p rs-scripts --bin decode-document -- -c dpns -d domain "base64data..."
+```
+
+Pipe from a gRPC query (decode each document from the response):
+```bash
+echo '{"v0":{"prove":false,"data_contract_id":"NmK7YeF/rj6ilM9gMZf7CqttURgL2LYQTElEpi/i2X8=","document_type":"withdrawal","where":"gYNmc3RhdHVzYT0C","limit":10}}' \
+ | grpcurl -insecure -import-path packages/dapi-grpc/protos -d @ \
+ -proto platform/v0/platform.proto \
+ :443 org.dash.platform.dapi.v0.Platform/getDocuments \
+ | jq -r '.v0.documents.documents[]' \
+ | while read doc; do
+ cargo run -p rs-scripts --bin decode-document -- -c withdrawals -d withdrawal "$doc"
+ echo "---"
+ done
+```
diff --git a/packages/rs-scripts/src/bin/decode_document.rs b/packages/rs-scripts/src/bin/decode_document.rs
new file mode 100644
index 00000000000..79442016f39
--- /dev/null
+++ b/packages/rs-scripts/src/bin/decode_document.rs
@@ -0,0 +1,173 @@
+use base64::Engine;
+use clap::Parser;
+use data_contracts::SystemDataContract;
+use dpp::data_contract::accessors::v0::DataContractV0Getters;
+use dpp::document::serialization_traits::DocumentPlatformConversionMethodsV0;
+use dpp::document::Document;
+use dpp::document::DocumentV0Getters;
+use dpp::platform_value::Identifier;
+use dpp::system_data_contracts::load_system_data_contract;
+use platform_version::version::PlatformVersion;
+
+// Keep in sync with SystemDataContract enum in packages/data-contracts/src/lib.rs
+const SYSTEM_CONTRACTS: &[(&str, SystemDataContract)] = &[
+ ("withdrawals", SystemDataContract::Withdrawals),
+ ("dpns", SystemDataContract::DPNS),
+ ("dashpay", SystemDataContract::Dashpay),
+ (
+ "masternode-reward-shares",
+ SystemDataContract::MasternodeRewards,
+ ),
+ ("feature-flags", SystemDataContract::FeatureFlags),
+ ("wallet-utils", SystemDataContract::WalletUtils),
+ ("token-history", SystemDataContract::TokenHistory),
+ ("keyword-search", SystemDataContract::KeywordSearch),
+];
+
+#[derive(Parser)]
+#[command(
+ name = "decode-document",
+ about = "Decode a platform document from hex or base64 bytes"
+)]
+struct Args {
+ /// Document bytes (base64 or hex encoded)
+ doc_bytes: String,
+
+ /// System data contract: name (e.g. "withdrawals") or ID in base58/base64/hex
+ #[arg(short, long)]
+ contract: String,
+
+ /// Document type name within the contract (e.g. "withdrawal", "domain")
+ #[arg(short, long)]
+ doc_type: String,
+
+ /// Input encoding: "base64", "hex", or "auto" (default: auto, tries base64 then hex)
+ #[arg(short, long, default_value = "auto")]
+ format: String,
+}
+
+fn resolve_system_contract(input: &str) -> SystemDataContract {
+ // Try by name first
+ for (name, sc) in SYSTEM_CONTRACTS {
+ if input.eq_ignore_ascii_case(name) {
+ return *sc;
+ }
+ }
+
+ // Try parsing as an identifier (base58, base64, or hex)
+ let id = Identifier::from_string_unknown_encoding(input).unwrap_or_else(|_| {
+ eprintln!("Unknown contract: '{input}'");
+ eprintln!(
+ "Must be a name ({}) or an ID in base58/base64/hex",
+ SYSTEM_CONTRACTS
+ .iter()
+ .map(|(n, _)| *n)
+ .collect::>()
+ .join(", ")
+ );
+ std::process::exit(1);
+ });
+
+ for (_, sc) in SYSTEM_CONTRACTS {
+ if sc.id() == id {
+ return *sc;
+ }
+ }
+
+ eprintln!("No system contract found with ID {id}");
+ std::process::exit(1);
+}
+
+fn main() {
+ let args = Args::parse();
+
+ let platform_version = PlatformVersion::latest();
+
+ let system_contract = resolve_system_contract(&args.contract);
+
+ let data_contract = match load_system_data_contract(system_contract, platform_version) {
+ Ok(c) => c,
+ Err(e) => {
+ eprintln!("Failed to load system data contract: {e}");
+ std::process::exit(1);
+ }
+ };
+
+ let document_type = match data_contract.document_type_for_name(&args.doc_type) {
+ Ok(dt) => dt,
+ Err(e) => {
+ eprintln!("Unknown document type '{}': {e}", args.doc_type);
+ eprintln!(
+ "Available types: {}",
+ data_contract
+ .document_types()
+ .keys()
+ .cloned()
+ .collect::>()
+ .join(", ")
+ );
+ std::process::exit(1);
+ }
+ };
+
+ let bytes = match args.format.as_str() {
+ "base64" => base64::engine::general_purpose::STANDARD
+ .decode(&args.doc_bytes)
+ .unwrap_or_else(|e| {
+ eprintln!("Invalid base64: {e}");
+ std::process::exit(1);
+ }),
+ "hex" => hex::decode(&args.doc_bytes).unwrap_or_else(|e| {
+ eprintln!("Invalid hex: {e}");
+ std::process::exit(1);
+ }),
+ "auto" | _ => {
+ // Try base64 first (most common — gRPC responses are base64),
+ // then hex. This avoids misinterpreting hex-only base64 strings.
+ if let Ok(b) = base64::engine::general_purpose::STANDARD.decode(&args.doc_bytes) {
+ b
+ } else if let Ok(b) = hex::decode(&args.doc_bytes) {
+ b
+ } else {
+ eprintln!("Failed to decode document bytes as base64 or hex");
+ eprintln!("Hint: use --format base64 or --format hex to force a specific encoding");
+ std::process::exit(1);
+ }
+ }
+ };
+
+ let document = match Document::from_bytes(&bytes, document_type, platform_version) {
+ Ok(doc) => doc,
+ Err(e) => {
+ eprintln!("Failed to deserialize document: {e}");
+ std::process::exit(1);
+ }
+ };
+
+ println!("id: {}", document.id());
+ println!("owner_id: {}", document.owner_id());
+ if let Some(created_at) = document.created_at() {
+ println!("created_at: {} ({}ms)", format_ts(created_at), created_at);
+ }
+ if let Some(updated_at) = document.updated_at() {
+ println!("updated_at: {} ({}ms)", format_ts(updated_at), updated_at);
+ }
+ if let Some(revision) = document.revision() {
+ println!("revision: {}", revision);
+ }
+ println!();
+ println!("properties:");
+ for (key, value) in document.properties() {
+ println!(" {key}: {value}");
+ }
+}
+
+fn format_ts(ms: u64) -> String {
+ let secs = (ms / 1000) as i64;
+ let nanos = ((ms % 1000) * 1_000_000) as u32;
+ let dt = chrono::DateTime::from_timestamp(secs, nanos);
+ match dt {
+ Some(dt) => dt.format("%Y-%m-%d %H:%M:%S UTC").to_string(),
+ None => format!("invalid timestamp: {ms}"),
+ }
+}
From b7075ae0b16cdc46641e1c3130160e2895edd347 Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Fri, 3 Apr 2026 19:47:34 +0300
Subject: [PATCH 04/40] fix(dpp): validate encrypted_note length in structure
validation (#3368)
Co-authored-by: Claude Opus 4.6 (1M context)
---
.../src/errors/consensus/basic/basic_error.rs | 6 +-
.../consensus/basic/state_transition/mod.rs | 2 +
...lded_encrypted_note_size_mismatch_error.rs | 46 ++++++
packages/rs-dpp/src/errors/consensus/codes.rs | 1 +
.../shielded/common_validation.rs | 131 +++++++++++++++++-
.../state_transitions/shielded/mod.rs | 2 +-
.../v0/state_transition_validation.rs | 24 +++-
.../v0/state_transition_validation.rs | 24 +++-
.../v0/state_transition_validation.rs | 24 +++-
.../v0/state_transition_validation.rs | 24 +++-
.../v0/state_transition_validation.rs | 24 +++-
.../state_transitions/shield/tests.rs | 8 +-
.../state_transitions/shielded_common/mod.rs | 10 +-
.../shielded_transfer/tests.rs | 5 +-
.../shielded_withdrawal/tests.rs | 5 +-
.../state_transitions/unshield/tests.rs | 5 +-
.../src/errors/consensus/consensus_error.rs | 5 +-
17 files changed, 329 insertions(+), 17 deletions(-)
create mode 100644 packages/rs-dpp/src/errors/consensus/basic/state_transition/shielded_encrypted_note_size_mismatch_error.rs
diff --git a/packages/rs-dpp/src/errors/consensus/basic/basic_error.rs b/packages/rs-dpp/src/errors/consensus/basic/basic_error.rs
index 05dee2b8ab2..d22a61f4a01 100644
--- a/packages/rs-dpp/src/errors/consensus/basic/basic_error.rs
+++ b/packages/rs-dpp/src/errors/consensus/basic/basic_error.rs
@@ -77,7 +77,8 @@ use crate::consensus::basic::state_transition::{
InputWitnessCountMismatchError, InputsNotLessThanOutputsError, InsufficientFundingAmountError,
InvalidRemainderOutputCountError, InvalidStateTransitionTypeError,
MissingStateTransitionTypeError, OutputAddressAlsoInputError, OutputBelowMinimumError,
- OutputsNotGreaterThanInputsError, ShieldedEmptyProofError, ShieldedInvalidValueBalanceError,
+ OutputsNotGreaterThanInputsError, ShieldedEmptyProofError,
+ ShieldedEncryptedNoteSizeMismatchError, ShieldedInvalidValueBalanceError,
ShieldedNoActionsError, ShieldedTooManyActionsError, ShieldedZeroAnchorError,
StateTransitionMaxSizeExceededError, StateTransitionNotActiveError, TransitionNoInputsError,
TransitionNoOutputsError, TransitionOverMaxInputsError, TransitionOverMaxOutputsError,
@@ -673,6 +674,9 @@ pub enum BasicError {
#[error(transparent)]
ShieldedInvalidValueBalanceError(ShieldedInvalidValueBalanceError),
+
+ #[error(transparent)]
+ ShieldedEncryptedNoteSizeMismatchError(ShieldedEncryptedNoteSizeMismatchError),
}
impl From for ConsensusError {
diff --git a/packages/rs-dpp/src/errors/consensus/basic/state_transition/mod.rs b/packages/rs-dpp/src/errors/consensus/basic/state_transition/mod.rs
index b9acc33f2a7..4b549af9155 100644
--- a/packages/rs-dpp/src/errors/consensus/basic/state_transition/mod.rs
+++ b/packages/rs-dpp/src/errors/consensus/basic/state_transition/mod.rs
@@ -14,6 +14,7 @@ mod output_address_also_input_error;
mod output_below_minimum_error;
mod outputs_not_greater_than_inputs_error;
mod shielded_empty_proof_error;
+mod shielded_encrypted_note_size_mismatch_error;
mod shielded_invalid_value_balance_error;
mod shielded_no_actions_error;
mod shielded_too_many_actions_error;
@@ -43,6 +44,7 @@ pub use output_address_also_input_error::*;
pub use output_below_minimum_error::*;
pub use outputs_not_greater_than_inputs_error::*;
pub use shielded_empty_proof_error::*;
+pub use shielded_encrypted_note_size_mismatch_error::*;
pub use shielded_invalid_value_balance_error::*;
pub use shielded_no_actions_error::*;
pub use shielded_too_many_actions_error::*;
diff --git a/packages/rs-dpp/src/errors/consensus/basic/state_transition/shielded_encrypted_note_size_mismatch_error.rs b/packages/rs-dpp/src/errors/consensus/basic/state_transition/shielded_encrypted_note_size_mismatch_error.rs
new file mode 100644
index 00000000000..e091503a933
--- /dev/null
+++ b/packages/rs-dpp/src/errors/consensus/basic/state_transition/shielded_encrypted_note_size_mismatch_error.rs
@@ -0,0 +1,46 @@
+use crate::consensus::basic::BasicError;
+use crate::consensus::ConsensusError;
+use crate::errors::ProtocolError;
+use bincode::{Decode, Encode};
+use platform_serialization_derive::{PlatformDeserialize, PlatformSerialize};
+use thiserror::Error;
+
+#[derive(
+ Error, Debug, Clone, PartialEq, Eq, Encode, Decode, PlatformSerialize, PlatformDeserialize,
+)]
+#[error(
+ "Shielded action encrypted_note has invalid size: expected {expected_size} bytes, got {actual_size} bytes"
+)]
+#[platform_serialize(unversioned)]
+pub struct ShieldedEncryptedNoteSizeMismatchError {
+ /*
+
+ DO NOT CHANGE ORDER OF FIELDS WITHOUT INTRODUCING OF NEW VERSION
+
+ */
+ expected_size: u32,
+ actual_size: u32,
+}
+
+impl ShieldedEncryptedNoteSizeMismatchError {
+ pub fn new(expected_size: u32, actual_size: u32) -> Self {
+ Self {
+ expected_size,
+ actual_size,
+ }
+ }
+
+ pub fn expected_size(&self) -> u32 {
+ self.expected_size
+ }
+
+ pub fn actual_size(&self) -> u32 {
+ self.actual_size
+ }
+}
+
+impl From for ConsensusError {
+ fn from(err: ShieldedEncryptedNoteSizeMismatchError) -> Self {
+ Self::BasicError(BasicError::ShieldedEncryptedNoteSizeMismatchError(err))
+ }
+}
diff --git a/packages/rs-dpp/src/errors/consensus/codes.rs b/packages/rs-dpp/src/errors/consensus/codes.rs
index ed24cbc15aa..d850c4e046a 100644
--- a/packages/rs-dpp/src/errors/consensus/codes.rs
+++ b/packages/rs-dpp/src/errors/consensus/codes.rs
@@ -237,6 +237,7 @@ impl ErrorWithCode for BasicError {
Self::ShieldedEmptyProofError(_) => 10820,
Self::ShieldedZeroAnchorError(_) => 10821,
Self::ShieldedInvalidValueBalanceError(_) => 10822,
+ Self::ShieldedEncryptedNoteSizeMismatchError(_) => 10823,
Self::ShieldedTooManyActionsError(_) => 10825,
}
}
diff --git a/packages/rs-dpp/src/state_transition/state_transitions/shielded/common_validation.rs b/packages/rs-dpp/src/state_transition/state_transitions/shielded/common_validation.rs
index 3cf21276f38..be56f5516fb 100644
--- a/packages/rs-dpp/src/state_transition/state_transitions/shielded/common_validation.rs
+++ b/packages/rs-dpp/src/state_transition/state_transitions/shielded/common_validation.rs
@@ -1,11 +1,16 @@
use crate::consensus::basic::state_transition::{
- ShieldedEmptyProofError, ShieldedNoActionsError, ShieldedTooManyActionsError,
- ShieldedZeroAnchorError,
+ ShieldedEmptyProofError, ShieldedEncryptedNoteSizeMismatchError, ShieldedNoActionsError,
+ ShieldedTooManyActionsError, ShieldedZeroAnchorError,
};
use crate::consensus::basic::BasicError;
use crate::shielded::SerializedAction;
use crate::validation::SimpleConsensusValidationResult;
+/// Expected size of the encrypted_note field in each SerializedAction.
+/// This is epk (32) + enc_ciphertext (104) + out_ciphertext (80) = 216 bytes.
+/// Canonical source of truth — drive-abci imports this constant.
+pub const ENCRYPTED_NOTE_SIZE: usize = 216;
+
/// Validate that the actions list is not empty and does not exceed the maximum.
pub fn validate_actions_count(
actions: &[SerializedAction],
@@ -50,6 +55,28 @@ pub fn validate_anchor_not_zero(anchor: &[u8; 32]) -> SimpleConsensusValidationR
}
}
+/// Defense-in-depth: validate that every action's `encrypted_note` field is exactly
+/// `ENCRYPTED_NOTE_SIZE` (216) bytes. This rejects malformed data early at the DPP
+/// layer before it reaches the ABCI bundle reconstruction, saving network bandwidth.
+pub fn validate_encrypted_note_sizes(
+ actions: &[SerializedAction],
+) -> SimpleConsensusValidationResult {
+ for action in actions {
+ if action.encrypted_note.len() != ENCRYPTED_NOTE_SIZE {
+ return SimpleConsensusValidationResult::new_with_error(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(
+ ShieldedEncryptedNoteSizeMismatchError::new(
+ ENCRYPTED_NOTE_SIZE as u32,
+ action.encrypted_note.len() as u32,
+ ),
+ )
+ .into(),
+ );
+ }
+ }
+ SimpleConsensusValidationResult::new()
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -171,4 +198,104 @@ mod tests {
result.errors
);
}
+
+ // --- validate_encrypted_note_sizes ---
+
+ #[test]
+ fn validate_encrypted_note_sizes_should_accept_correct_size() {
+ let actions = vec![dummy_action()];
+ let result = validate_encrypted_note_sizes(&actions);
+ assert!(
+ result.is_valid(),
+ "Expected valid, got: {:?}",
+ result.errors
+ );
+ }
+
+ #[test]
+ fn validate_encrypted_note_sizes_should_accept_multiple_correct_actions() {
+ let actions = vec![dummy_action(); 3];
+ let result = validate_encrypted_note_sizes(&actions);
+ assert!(
+ result.is_valid(),
+ "Expected valid, got: {:?}",
+ result.errors
+ );
+ }
+
+ #[test]
+ fn validate_encrypted_note_sizes_should_reject_too_short() {
+ let mut action = dummy_action();
+ action.encrypted_note = vec![4u8; 100]; // Too short
+ let actions = vec![action];
+ let result = validate_encrypted_note_sizes(&actions);
+ assert_matches!(
+ result.errors.as_slice(),
+ [ConsensusError::BasicError(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(e)
+ )] => {
+ assert_eq!(e.expected_size(), ENCRYPTED_NOTE_SIZE as u32);
+ assert_eq!(e.actual_size(), 100);
+ }
+ );
+ }
+
+ #[test]
+ fn validate_encrypted_note_sizes_should_reject_too_long() {
+ let mut action = dummy_action();
+ action.encrypted_note = vec![4u8; 300]; // Too long
+ let actions = vec![action];
+ let result = validate_encrypted_note_sizes(&actions);
+ assert_matches!(
+ result.errors.as_slice(),
+ [ConsensusError::BasicError(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(e)
+ )] => {
+ assert_eq!(e.expected_size(), ENCRYPTED_NOTE_SIZE as u32);
+ assert_eq!(e.actual_size(), 300);
+ }
+ );
+ }
+
+ #[test]
+ fn validate_encrypted_note_sizes_should_reject_empty() {
+ let mut action = dummy_action();
+ action.encrypted_note = vec![]; // Empty
+ let actions = vec![action];
+ let result = validate_encrypted_note_sizes(&actions);
+ assert_matches!(
+ result.errors.as_slice(),
+ [ConsensusError::BasicError(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(e)
+ )] => {
+ assert_eq!(e.expected_size(), ENCRYPTED_NOTE_SIZE as u32);
+ assert_eq!(e.actual_size(), 0);
+ }
+ );
+ }
+
+ #[test]
+ fn validate_encrypted_note_sizes_should_reject_second_invalid_action() {
+ let good_action = dummy_action();
+ let mut bad_action = dummy_action();
+ bad_action.encrypted_note = vec![4u8; 100];
+ let actions = vec![good_action, bad_action];
+ let result = validate_encrypted_note_sizes(&actions);
+ assert_matches!(
+ result.errors.as_slice(),
+ [ConsensusError::BasicError(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(_)
+ )]
+ );
+ }
+
+ #[test]
+ fn validate_encrypted_note_sizes_should_accept_empty_actions_list() {
+ let result = validate_encrypted_note_sizes(&[]);
+ assert!(
+ result.is_valid(),
+ "Expected valid for empty actions list, got: {:?}",
+ result.errors
+ );
+ }
}
diff --git a/packages/rs-dpp/src/state_transition/state_transitions/shielded/mod.rs b/packages/rs-dpp/src/state_transition/state_transitions/shielded/mod.rs
index cb79e3f690f..6a8cafc1932 100644
--- a/packages/rs-dpp/src/state_transition/state_transitions/shielded/mod.rs
+++ b/packages/rs-dpp/src/state_transition/state_transitions/shielded/mod.rs
@@ -1,4 +1,4 @@
-pub(crate) mod common_validation;
+pub mod common_validation;
pub mod shield_from_asset_lock_transition;
pub mod shield_transition;
pub mod shielded_transfer_transition;
diff --git a/packages/rs-dpp/src/state_transition/state_transitions/shielded/shield_from_asset_lock_transition/v0/state_transition_validation.rs b/packages/rs-dpp/src/state_transition/state_transitions/shielded/shield_from_asset_lock_transition/v0/state_transition_validation.rs
index eb00a805dee..063061a4b4f 100644
--- a/packages/rs-dpp/src/state_transition/state_transitions/shielded/shield_from_asset_lock_transition/v0/state_transition_validation.rs
+++ b/packages/rs-dpp/src/state_transition/state_transitions/shielded/shield_from_asset_lock_transition/v0/state_transition_validation.rs
@@ -2,7 +2,8 @@ use crate::consensus::basic::state_transition::ShieldedInvalidValueBalanceError;
use crate::consensus::basic::BasicError;
use crate::state_transition::shield_from_asset_lock_transition::v0::ShieldFromAssetLockTransitionV0;
use crate::state_transition::state_transitions::shielded::common_validation::{
- validate_actions_count, validate_anchor_not_zero, validate_proof_not_empty,
+ validate_actions_count, validate_anchor_not_zero, validate_encrypted_note_sizes,
+ validate_proof_not_empty,
};
use crate::state_transition::StateTransitionStructureValidation;
use crate::validation::SimpleConsensusValidationResult;
@@ -24,6 +25,12 @@ impl StateTransitionStructureValidation for ShieldFromAssetLockTransitionV0 {
return result;
}
+ // Each action's encrypted_note must be exactly ENCRYPTED_NOTE_SIZE bytes
+ let result = validate_encrypted_note_sizes(&self.actions);
+ if !result.is_valid() {
+ return result;
+ }
+
// value_balance must be > 0 (credits flowing into pool)
if self.value_balance == 0 {
return SimpleConsensusValidationResult::new_with_error(
@@ -114,6 +121,21 @@ mod tests {
);
}
+ #[test]
+ fn should_reject_invalid_encrypted_note_size() {
+ let platform_version = PlatformVersion::latest();
+ let mut transition = valid_shield_from_asset_lock_transition();
+ transition.actions[0].encrypted_note = vec![4u8; 100]; // Wrong size
+
+ let result = transition.validate_structure(platform_version);
+ assert_matches!(
+ result.errors.as_slice(),
+ [ConsensusError::BasicError(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(_)
+ )]
+ );
+ }
+
#[test]
fn should_reject_empty_actions() {
let platform_version = PlatformVersion::latest();
diff --git a/packages/rs-dpp/src/state_transition/state_transitions/shielded/shield_transition/v0/state_transition_validation.rs b/packages/rs-dpp/src/state_transition/state_transitions/shielded/shield_transition/v0/state_transition_validation.rs
index 1b92ffad1c9..1ee250576fc 100644
--- a/packages/rs-dpp/src/state_transition/state_transitions/shielded/shield_transition/v0/state_transition_validation.rs
+++ b/packages/rs-dpp/src/state_transition/state_transitions/shielded/shield_transition/v0/state_transition_validation.rs
@@ -6,7 +6,8 @@ use crate::consensus::basic::state_transition::{
use crate::consensus::basic::BasicError;
use crate::state_transition::shield_transition::v0::ShieldTransitionV0;
use crate::state_transition::state_transitions::shielded::common_validation::{
- validate_actions_count, validate_anchor_not_zero, validate_proof_not_empty,
+ validate_actions_count, validate_anchor_not_zero, validate_encrypted_note_sizes,
+ validate_proof_not_empty,
};
use crate::state_transition::StateTransitionStructureValidation;
use crate::validation::SimpleConsensusValidationResult;
@@ -29,6 +30,12 @@ impl StateTransitionStructureValidation for ShieldTransitionV0 {
return result;
}
+ // Each action's encrypted_note must be exactly ENCRYPTED_NOTE_SIZE bytes
+ let result = validate_encrypted_note_sizes(&self.actions);
+ if !result.is_valid() {
+ return result;
+ }
+
// Inputs must not be empty (shield requires address funding)
if self.inputs.is_empty() {
return SimpleConsensusValidationResult::new_with_error(
@@ -218,6 +225,21 @@ mod tests {
);
}
+ #[test]
+ fn should_reject_invalid_encrypted_note_size() {
+ let platform_version = PlatformVersion::latest();
+ let mut transition = valid_shield_transition();
+ transition.actions[0].encrypted_note = vec![4u8; 100]; // Wrong size
+
+ let result = transition.validate_structure(platform_version);
+ assert_matches!(
+ result.errors.as_slice(),
+ [ConsensusError::BasicError(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(_)
+ )]
+ );
+ }
+
#[test]
fn should_reject_empty_actions() {
let platform_version = PlatformVersion::latest();
diff --git a/packages/rs-dpp/src/state_transition/state_transitions/shielded/shielded_transfer_transition/v0/state_transition_validation.rs b/packages/rs-dpp/src/state_transition/state_transitions/shielded/shielded_transfer_transition/v0/state_transition_validation.rs
index f034e9ea8ba..8a331ae5d50 100644
--- a/packages/rs-dpp/src/state_transition/state_transitions/shielded/shielded_transfer_transition/v0/state_transition_validation.rs
+++ b/packages/rs-dpp/src/state_transition/state_transitions/shielded/shielded_transfer_transition/v0/state_transition_validation.rs
@@ -2,7 +2,8 @@ use crate::consensus::basic::state_transition::ShieldedInvalidValueBalanceError;
use crate::consensus::basic::BasicError;
use crate::state_transition::shielded_transfer_transition::v0::ShieldedTransferTransitionV0;
use crate::state_transition::state_transitions::shielded::common_validation::{
- validate_actions_count, validate_anchor_not_zero, validate_proof_not_empty,
+ validate_actions_count, validate_anchor_not_zero, validate_encrypted_note_sizes,
+ validate_proof_not_empty,
};
use crate::state_transition::StateTransitionStructureValidation;
use crate::validation::SimpleConsensusValidationResult;
@@ -24,6 +25,12 @@ impl StateTransitionStructureValidation for ShieldedTransferTransitionV0 {
return result;
}
+ // Each action's encrypted_note must be exactly ENCRYPTED_NOTE_SIZE bytes
+ let result = validate_encrypted_note_sizes(&self.actions);
+ if !result.is_valid() {
+ return result;
+ }
+
// value_balance must be positive (it IS the fee for shielded transfers)
if self.value_balance == 0 {
return SimpleConsensusValidationResult::new_with_error(
@@ -103,6 +110,21 @@ mod tests {
);
}
+ #[test]
+ fn should_reject_invalid_encrypted_note_size() {
+ let platform_version = PlatformVersion::latest();
+ let mut transition = valid_shielded_transfer_transition();
+ transition.actions[0].encrypted_note = vec![4u8; 100]; // Wrong size
+
+ let result = transition.validate_structure(platform_version);
+ assert_matches!(
+ result.errors.as_slice(),
+ [ConsensusError::BasicError(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(_)
+ )]
+ );
+ }
+
#[test]
fn should_reject_empty_actions() {
let platform_version = PlatformVersion::latest();
diff --git a/packages/rs-dpp/src/state_transition/state_transitions/shielded/shielded_withdrawal_transition/v0/state_transition_validation.rs b/packages/rs-dpp/src/state_transition/state_transitions/shielded/shielded_withdrawal_transition/v0/state_transition_validation.rs
index a5c39469f0e..8f725c9a067 100644
--- a/packages/rs-dpp/src/state_transition/state_transitions/shielded/shielded_withdrawal_transition/v0/state_transition_validation.rs
+++ b/packages/rs-dpp/src/state_transition/state_transitions/shielded/shielded_withdrawal_transition/v0/state_transition_validation.rs
@@ -2,7 +2,8 @@ use crate::consensus::basic::state_transition::ShieldedInvalidValueBalanceError;
use crate::consensus::basic::BasicError;
use crate::state_transition::shielded_withdrawal_transition::v0::ShieldedWithdrawalTransitionV0;
use crate::state_transition::state_transitions::shielded::common_validation::{
- validate_actions_count, validate_anchor_not_zero, validate_proof_not_empty,
+ validate_actions_count, validate_anchor_not_zero, validate_encrypted_note_sizes,
+ validate_proof_not_empty,
};
use crate::state_transition::StateTransitionStructureValidation;
use crate::validation::SimpleConsensusValidationResult;
@@ -24,6 +25,12 @@ impl StateTransitionStructureValidation for ShieldedWithdrawalTransitionV0 {
return result;
}
+ // Each action's encrypted_note must be exactly ENCRYPTED_NOTE_SIZE bytes
+ let result = validate_encrypted_note_sizes(&self.actions);
+ if !result.is_valid() {
+ return result;
+ }
+
// unshielding_amount must be positive and within i64::MAX
if self.unshielding_amount == 0 {
return SimpleConsensusValidationResult::new_with_error(
@@ -108,6 +115,21 @@ mod tests {
);
}
+ #[test]
+ fn should_reject_invalid_encrypted_note_size() {
+ let platform_version = PlatformVersion::latest();
+ let mut transition = valid_shielded_withdrawal_transition();
+ transition.actions[0].encrypted_note = vec![4u8; 100]; // Wrong size
+
+ let result = transition.validate_structure(platform_version);
+ assert_matches!(
+ result.errors.as_slice(),
+ [ConsensusError::BasicError(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(_)
+ )]
+ );
+ }
+
#[test]
fn should_reject_empty_actions() {
let platform_version = PlatformVersion::latest();
diff --git a/packages/rs-dpp/src/state_transition/state_transitions/shielded/unshield_transition/v0/state_transition_validation.rs b/packages/rs-dpp/src/state_transition/state_transitions/shielded/unshield_transition/v0/state_transition_validation.rs
index 04a34b616e3..be888647a38 100644
--- a/packages/rs-dpp/src/state_transition/state_transitions/shielded/unshield_transition/v0/state_transition_validation.rs
+++ b/packages/rs-dpp/src/state_transition/state_transitions/shielded/unshield_transition/v0/state_transition_validation.rs
@@ -1,7 +1,8 @@
use crate::consensus::basic::state_transition::ShieldedInvalidValueBalanceError;
use crate::consensus::basic::BasicError;
use crate::state_transition::state_transitions::shielded::common_validation::{
- validate_actions_count, validate_anchor_not_zero, validate_proof_not_empty,
+ validate_actions_count, validate_anchor_not_zero, validate_encrypted_note_sizes,
+ validate_proof_not_empty,
};
use crate::state_transition::unshield_transition::v0::UnshieldTransitionV0;
use crate::state_transition::StateTransitionStructureValidation;
@@ -24,6 +25,12 @@ impl StateTransitionStructureValidation for UnshieldTransitionV0 {
return result;
}
+ // Each action's encrypted_note must be exactly ENCRYPTED_NOTE_SIZE bytes
+ let result = validate_encrypted_note_sizes(&self.actions);
+ if !result.is_valid() {
+ return result;
+ }
+
// unshielding_amount must be positive and within i64::MAX
if self.unshielding_amount == 0 {
return SimpleConsensusValidationResult::new_with_error(
@@ -104,6 +111,21 @@ mod tests {
);
}
+ #[test]
+ fn should_reject_invalid_encrypted_note_size() {
+ let platform_version = PlatformVersion::latest();
+ let mut transition = valid_unshield_transition();
+ transition.actions[0].encrypted_note = vec![4u8; 100]; // Wrong size
+
+ let result = transition.validate_structure(platform_version);
+ assert_matches!(
+ result.errors.as_slice(),
+ [ConsensusError::BasicError(
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(_)
+ )]
+ );
+ }
+
#[test]
fn should_reject_empty_actions() {
let platform_version = PlatformVersion::latest();
diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shield/tests.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shield/tests.rs
index 83bff25d816..c1c68667c4c 100644
--- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shield/tests.rs
+++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shield/tests.rs
@@ -876,12 +876,14 @@ mod tests {
let processing_result = process_transition(&platform, transition, platform_version);
- // The encrypted_note size check happens in reconstruct_and_verify_bundle,
- // which now runs at the processor level before state validation.
+ // The encrypted_note size check now happens in DPP structure validation
+ // (before reaching proof verification), returning a BasicError.
assert_matches!(
processing_result.execution_results().as_slice(),
[StateTransitionExecutionResult::UnpaidConsensusError(
- ConsensusError::StateError(StateError::InvalidShieldedProofError(_))
+ ConsensusError::BasicError(BasicError::ShieldedEncryptedNoteSizeMismatchError(
+ _
+ ))
)]
);
}
diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_common/mod.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_common/mod.rs
index 7fbac539149..5fe667a753f 100644
--- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_common/mod.rs
+++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_common/mod.rs
@@ -47,7 +47,15 @@ pub fn warmup_shielded_verifying_key() {
const EPK_SIZE: usize = 32;
const ENC_CIPHERTEXT_SIZE: usize = 104;
const OUT_CIPHERTEXT_SIZE: usize = 80;
-const ENCRYPTED_NOTE_SIZE: usize = EPK_SIZE + ENC_CIPHERTEXT_SIZE + OUT_CIPHERTEXT_SIZE; // 216
+
+// Import the canonical constant from DPP (single source of truth).
+use dpp::state_transition::state_transitions::shielded::common_validation::ENCRYPTED_NOTE_SIZE;
+
+// Compile-time check: component sizes must sum to the canonical constant.
+const _: () = assert!(
+ EPK_SIZE + ENC_CIPHERTEXT_SIZE + OUT_CIPHERTEXT_SIZE == ENCRYPTED_NOTE_SIZE,
+ "component sizes diverged from ENCRYPTED_NOTE_SIZE"
+);
/// Reconstructs an orchard `Bundle` from the serialized fields
/// of a shielded state transition and verifies the Halo 2 ZK proof along with
diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_transfer/tests.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_transfer/tests.rs
index 98fa684d345..d700debd995 100644
--- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_transfer/tests.rs
+++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_transfer/tests.rs
@@ -394,10 +394,13 @@ mod tests {
let processing_result = process_transition(&platform, transition, platform_version);
+ // DPP structure validation now catches this before proof verification
assert_matches!(
processing_result.execution_results().as_slice(),
[StateTransitionExecutionResult::UnpaidConsensusError(
- ConsensusError::StateError(StateError::InvalidShieldedProofError(_))
+ ConsensusError::BasicError(BasicError::ShieldedEncryptedNoteSizeMismatchError(
+ _
+ ))
)]
);
}
diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_withdrawal/tests.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_withdrawal/tests.rs
index b77108d13b7..328bcc774bc 100644
--- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_withdrawal/tests.rs
+++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/shielded_withdrawal/tests.rs
@@ -501,10 +501,13 @@ mod tests {
let processing_result = process_transition(&platform, transition, platform_version);
+ // DPP structure validation now catches this before proof verification
assert_matches!(
processing_result.execution_results().as_slice(),
[StateTransitionExecutionResult::UnpaidConsensusError(
- ConsensusError::StateError(StateError::InvalidShieldedProofError(_))
+ ConsensusError::BasicError(BasicError::ShieldedEncryptedNoteSizeMismatchError(
+ _
+ ))
)]
);
}
diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/unshield/tests.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/unshield/tests.rs
index a9b82340509..733cfe6df01 100644
--- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/unshield/tests.rs
+++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/unshield/tests.rs
@@ -463,10 +463,13 @@ mod tests {
let processing_result = process_transition(&platform, transition, platform_version);
+ // DPP structure validation now catches this before proof verification
assert_matches!(
processing_result.execution_results().as_slice(),
[StateTransitionExecutionResult::UnpaidConsensusError(
- ConsensusError::StateError(StateError::InvalidShieldedProofError(_))
+ ConsensusError::BasicError(BasicError::ShieldedEncryptedNoteSizeMismatchError(
+ _
+ ))
)]
);
}
diff --git a/packages/wasm-dpp/src/errors/consensus/consensus_error.rs b/packages/wasm-dpp/src/errors/consensus/consensus_error.rs
index d32586c219f..8d63701b0b5 100644
--- a/packages/wasm-dpp/src/errors/consensus/consensus_error.rs
+++ b/packages/wasm-dpp/src/errors/consensus/consensus_error.rs
@@ -93,7 +93,7 @@ use dpp::consensus::state::shielded::insufficient_shielded_fee_error::Insufficie
use dpp::consensus::state::shielded::invalid_anchor_error::InvalidAnchorError;
use dpp::consensus::state::shielded::invalid_shielded_proof_error::InvalidShieldedProofError;
use dpp::consensus::state::shielded::nullifier_already_spent_error::NullifierAlreadySpentError;
-use dpp::consensus::basic::state_transition::{StateTransitionNotActiveError, TransitionOverMaxInputsError, TransitionOverMaxOutputsError, InputWitnessCountMismatchError, TransitionNoInputsError, TransitionNoOutputsError, FeeStrategyEmptyError, FeeStrategyDuplicateError, FeeStrategyIndexOutOfBoundsError, FeeStrategyTooManyStepsError, InputBelowMinimumError, OutputBelowMinimumError, InputOutputBalanceMismatchError, OutputsNotGreaterThanInputsError, WithdrawalBalanceMismatchError, InsufficientFundingAmountError, InputsNotLessThanOutputsError, OutputAddressAlsoInputError, InvalidRemainderOutputCountError, WithdrawalBelowMinAmountError, ShieldedNoActionsError, ShieldedTooManyActionsError, ShieldedEmptyProofError, ShieldedZeroAnchorError, ShieldedInvalidValueBalanceError};
+use dpp::consensus::basic::state_transition::{StateTransitionNotActiveError, TransitionOverMaxInputsError, TransitionOverMaxOutputsError, InputWitnessCountMismatchError, TransitionNoInputsError, TransitionNoOutputsError, FeeStrategyEmptyError, FeeStrategyDuplicateError, FeeStrategyIndexOutOfBoundsError, FeeStrategyTooManyStepsError, InputBelowMinimumError, OutputBelowMinimumError, InputOutputBalanceMismatchError, OutputsNotGreaterThanInputsError, WithdrawalBalanceMismatchError, InsufficientFundingAmountError, InputsNotLessThanOutputsError, OutputAddressAlsoInputError, InvalidRemainderOutputCountError, WithdrawalBelowMinAmountError, ShieldedNoActionsError, ShieldedTooManyActionsError, ShieldedEmptyProofError, ShieldedZeroAnchorError, ShieldedInvalidValueBalanceError, ShieldedEncryptedNoteSizeMismatchError};
use dpp::consensus::state::voting::masternode_incorrect_voter_identity_id_error::MasternodeIncorrectVoterIdentityIdError;
use dpp::consensus::state::voting::masternode_incorrect_voting_address_error::MasternodeIncorrectVotingAddressError;
use dpp::consensus::state::voting::masternode_not_found_error::MasternodeNotFoundError;
@@ -956,6 +956,9 @@ fn from_basic_error(basic_error: &BasicError) -> JsValue {
BasicError::ShieldedInvalidValueBalanceError(e) => {
generic_consensus_error!(ShieldedInvalidValueBalanceError, e).into()
}
+ BasicError::ShieldedEncryptedNoteSizeMismatchError(e) => {
+ generic_consensus_error!(ShieldedEncryptedNoteSizeMismatchError, e).into()
+ }
}
}
From 7011b62c24c5aff8d735a07f48f04822d8a60a0c Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Fri, 3 Apr 2026 19:50:21 +0300
Subject: [PATCH 05/40] test(platform): add 466 unit tests across 15 files for
coverage gains (#3427)
Co-authored-by: Claude Opus 4.6 (1M context)
---
.codecov.yml | 8 +
.../token_configuration_item.rs | 110 ++++
.../authorized_action_takers.rs | 484 ++++++++++++++++++
packages/rs-dpp/src/fee/fee_result/mod.rs | 333 ++++++++++++
.../identity/identity_public_key/key_type.rs | 211 ++++++++
packages/rs-dpp/src/tokens/token_event.rs | 132 +++++
.../src/tokens/token_pricing_schedule.rs | 84 +++
packages/rs-dpp/src/util/vec.rs | 213 ++++++++
.../src/validation/validation_result.rs | 414 +++++++++++++++
packages/rs-drive/src/util/common/encode.rs | 221 ++++++++
packages/rs-platform-value/src/eq.rs | 369 +++++++++++++
11 files changed, 2579 insertions(+)
diff --git a/.codecov.yml b/.codecov.yml
index 41013ceb2d4..89b413c72f5 100644
--- a/.codecov.yml
+++ b/.codecov.yml
@@ -80,6 +80,14 @@ ignore:
# Document-type property accessors — pure getter/setter trait implementations,
# same category as the state-transition accessors excluded above
- "packages/rs-dpp/src/data_contract/document_type/accessors/**"
+ # Enum type definitions — TryFrom/Display/conversion boilerplate
+ - "packages/rs-dpp/src/identity/identity_public_key/security_level.rs"
+ - "packages/rs-dpp/src/identity/identity_public_key/purpose.rs"
+ - "packages/rs-dpp/src/identity/identity_public_key/key_type.rs"
+ - "packages/rs-dpp/src/tokens/gas_fees_paid_by.rs"
+ # Value Display and string encoding — trivial formatting, not logic
+ - "packages/rs-platform-value/src/display.rs"
+ - "packages/rs-platform-value/src/string_encoding.rs"
# Core chain type wrappers — masternode entry structs, deserialization
# boilerplate, thin type aliases
- "packages/rs-dpp/src/core_types/**"
diff --git a/packages/rs-dpp/src/data_contract/associated_token/token_configuration_item.rs b/packages/rs-dpp/src/data_contract/associated_token/token_configuration_item.rs
index def46953e52..e7f0ae5652c 100644
--- a/packages/rs-dpp/src/data_contract/associated_token/token_configuration_item.rs
+++ b/packages/rs-dpp/src/data_contract/associated_token/token_configuration_item.rs
@@ -291,3 +291,113 @@ impl fmt::Display for TokenConfigurationChangeItem {
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::collections::BTreeSet;
+
+ /// Helper: build one instance of every variant using default inner values.
+ fn all_variants() -> Vec {
+ let aat = AuthorizedActionTakers::NoOne;
+ vec![
+ TokenConfigurationChangeItem::TokenConfigurationNoChange,
+ TokenConfigurationChangeItem::Conventions(
+ TokenConfigurationConvention::V0(
+ crate::data_contract::associated_token::token_configuration_convention::v0::TokenConfigurationConventionV0::default(),
+ ),
+ ),
+ TokenConfigurationChangeItem::ConventionsControlGroup(aat.clone()),
+ TokenConfigurationChangeItem::ConventionsAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::MaxSupply(None),
+ TokenConfigurationChangeItem::MaxSupplyControlGroup(aat.clone()),
+ TokenConfigurationChangeItem::MaxSupplyAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::PerpetualDistribution(None),
+ TokenConfigurationChangeItem::PerpetualDistributionControlGroup(aat.clone()),
+ TokenConfigurationChangeItem::PerpetualDistributionAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::NewTokensDestinationIdentity(None),
+ TokenConfigurationChangeItem::NewTokensDestinationIdentityControlGroup(aat.clone()),
+ TokenConfigurationChangeItem::NewTokensDestinationIdentityAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::MintingAllowChoosingDestination(false),
+ TokenConfigurationChangeItem::MintingAllowChoosingDestinationControlGroup(aat.clone()),
+ TokenConfigurationChangeItem::MintingAllowChoosingDestinationAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::ManualMinting(aat.clone()),
+ TokenConfigurationChangeItem::ManualMintingAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::ManualBurning(aat.clone()),
+ TokenConfigurationChangeItem::ManualBurningAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::Freeze(aat.clone()),
+ TokenConfigurationChangeItem::FreezeAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::Unfreeze(aat.clone()),
+ TokenConfigurationChangeItem::UnfreezeAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::DestroyFrozenFunds(aat.clone()),
+ TokenConfigurationChangeItem::DestroyFrozenFundsAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::EmergencyAction(aat.clone()),
+ TokenConfigurationChangeItem::EmergencyActionAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::MarketplaceTradeMode(TokenTradeMode::default()),
+ TokenConfigurationChangeItem::MarketplaceTradeModeControlGroup(aat.clone()),
+ TokenConfigurationChangeItem::MarketplaceTradeModeAdminGroup(aat.clone()),
+ TokenConfigurationChangeItem::MainControlGroup(None),
+ ]
+ }
+
+ // ---- u8_item_index returns unique values 0..=31 ----
+
+ #[test]
+ fn u8_item_index_values_are_unique() {
+ let variants = all_variants();
+ let indices: Vec = variants.iter().map(|v| v.u8_item_index()).collect();
+ let unique: BTreeSet = indices.iter().cloned().collect();
+ assert_eq!(
+ indices.len(),
+ unique.len(),
+ "Duplicate u8_item_index values found: {:?}",
+ indices
+ );
+ }
+
+ #[test]
+ fn u8_item_index_covers_0_through_31() {
+ let variants = all_variants();
+ let indices: BTreeSet = variants.iter().map(|v| v.u8_item_index()).collect();
+ for i in 0u8..=31 {
+ assert!(indices.contains(&i), "Missing u8_item_index value: {}", i);
+ }
+ }
+
+ #[test]
+ fn u8_item_index_all_within_range() {
+ let variants = all_variants();
+ for v in &variants {
+ let idx = v.u8_item_index();
+ assert!(idx <= 31, "Index {} exceeds expected max of 31", idx);
+ }
+ }
+
+ #[test]
+ fn u8_item_index_specific_known_values() {
+ assert_eq!(
+ TokenConfigurationChangeItem::TokenConfigurationNoChange.u8_item_index(),
+ 0
+ );
+ assert_eq!(
+ TokenConfigurationChangeItem::MaxSupply(Some(100)).u8_item_index(),
+ 4
+ );
+ assert_eq!(
+ TokenConfigurationChangeItem::ManualMinting(AuthorizedActionTakers::NoOne)
+ .u8_item_index(),
+ 16
+ );
+ assert_eq!(
+ TokenConfigurationChangeItem::MainControlGroup(Some(5)).u8_item_index(),
+ 31
+ );
+ }
+
+ #[test]
+ fn u8_item_index_variant_count() {
+ // We expect exactly 32 variants (indices 0..=31)
+ let variants = all_variants();
+ assert_eq!(variants.len(), 32);
+ }
+}
diff --git a/packages/rs-dpp/src/data_contract/change_control_rules/authorized_action_takers.rs b/packages/rs-dpp/src/data_contract/change_control_rules/authorized_action_takers.rs
index a980d0549c3..cf0a4cd0c93 100644
--- a/packages/rs-dpp/src/data_contract/change_control_rules/authorized_action_takers.rs
+++ b/packages/rs-dpp/src/data_contract/change_control_rules/authorized_action_takers.rs
@@ -202,3 +202,487 @@ impl AuthorizedActionTakers {
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::data_contract::group::v0::GroupV0;
+ use std::collections::BTreeSet;
+
+ fn make_id(byte: u8) -> Identifier {
+ Identifier::from([byte; 32])
+ }
+
+ fn make_group(members: Vec<(Identifier, u32)>, required_power: u32) -> Group {
+ Group::V0(GroupV0 {
+ members: members.into_iter().collect(),
+ required_power,
+ })
+ }
+
+ // --- Display tests ---
+
+ #[test]
+ fn display_no_one() {
+ assert_eq!(format!("{}", AuthorizedActionTakers::NoOne), "NoOne");
+ }
+
+ #[test]
+ fn display_contract_owner() {
+ assert_eq!(
+ format!("{}", AuthorizedActionTakers::ContractOwner),
+ "ContractOwner"
+ );
+ }
+
+ #[test]
+ fn display_main_group() {
+ assert_eq!(
+ format!("{}", AuthorizedActionTakers::MainGroup),
+ "MainGroup"
+ );
+ }
+
+ #[test]
+ fn display_group_position() {
+ assert_eq!(
+ format!("{}", AuthorizedActionTakers::Group(42)),
+ "Group(Position: 42)"
+ );
+ }
+
+ #[test]
+ fn display_identity() {
+ let id = make_id(0xAB);
+ let display = format!("{}", AuthorizedActionTakers::Identity(id));
+ assert!(display.starts_with("Identity("));
+ }
+
+ // --- to_bytes / from_bytes round-trip tests ---
+
+ #[test]
+ fn round_trip_no_one() {
+ let original = AuthorizedActionTakers::NoOne;
+ let bytes = original.to_bytes();
+ assert_eq!(bytes, vec![0]);
+ let recovered = AuthorizedActionTakers::from_bytes(&bytes).unwrap();
+ assert_eq!(original, recovered);
+ }
+
+ #[test]
+ fn round_trip_contract_owner() {
+ let original = AuthorizedActionTakers::ContractOwner;
+ let bytes = original.to_bytes();
+ assert_eq!(bytes, vec![1]);
+ let recovered = AuthorizedActionTakers::from_bytes(&bytes).unwrap();
+ assert_eq!(original, recovered);
+ }
+
+ #[test]
+ fn round_trip_identity() {
+ let id = make_id(0x42);
+ let original = AuthorizedActionTakers::Identity(id);
+ let bytes = original.to_bytes();
+ assert_eq!(bytes.len(), 33); // 1 tag + 32 identifier
+ assert_eq!(bytes[0], 2);
+ let recovered = AuthorizedActionTakers::from_bytes(&bytes).unwrap();
+ assert_eq!(original, recovered);
+ }
+
+ #[test]
+ fn round_trip_main_group() {
+ let original = AuthorizedActionTakers::MainGroup;
+ let bytes = original.to_bytes();
+ assert_eq!(bytes, vec![3]);
+ let recovered = AuthorizedActionTakers::from_bytes(&bytes).unwrap();
+ assert_eq!(original, recovered);
+ }
+
+ #[test]
+ fn round_trip_group() {
+ let original = AuthorizedActionTakers::Group(1000);
+ let bytes = original.to_bytes();
+ assert_eq!(bytes.len(), 3); // 1 tag + 2 for u16
+ assert_eq!(bytes[0], 4);
+ let recovered = AuthorizedActionTakers::from_bytes(&bytes).unwrap();
+ assert_eq!(original, recovered);
+ }
+
+ #[test]
+ fn round_trip_group_max_position() {
+ let original = AuthorizedActionTakers::Group(u16::MAX);
+ let bytes = original.to_bytes();
+ let recovered = AuthorizedActionTakers::from_bytes(&bytes).unwrap();
+ assert_eq!(original, recovered);
+ }
+
+ // --- from_bytes error path tests ---
+
+ #[test]
+ fn from_bytes_empty_returns_error() {
+ let result = AuthorizedActionTakers::from_bytes(&[]);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn from_bytes_unknown_tag_returns_error() {
+ let result = AuthorizedActionTakers::from_bytes(&[5]);
+ assert!(result.is_err());
+ let result = AuthorizedActionTakers::from_bytes(&[255]);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn from_bytes_identity_wrong_length_returns_error() {
+ // tag 2 needs exactly 33 bytes total
+ let short = vec![2; 10]; // only 10 bytes
+ let result = AuthorizedActionTakers::from_bytes(&short);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn from_bytes_group_wrong_length_returns_error() {
+ // tag 4 needs exactly 3 bytes total
+ let short = vec![4, 0]; // only 2 bytes
+ let result = AuthorizedActionTakers::from_bytes(&short);
+ assert!(result.is_err());
+
+ let long = vec![4, 0, 0, 0]; // 4 bytes
+ let result = AuthorizedActionTakers::from_bytes(&long);
+ assert!(result.is_err());
+ }
+
+ // --- allowed_for_action_taker tests ---
+
+ #[test]
+ fn no_one_always_returns_false() {
+ let aat = AuthorizedActionTakers::NoOne;
+ let owner = make_id(1);
+ let taker = ActionTaker::SingleIdentity(owner);
+ assert!(!aat.allowed_for_action_taker(
+ &owner,
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn contract_owner_allows_matching_single_identity() {
+ let aat = AuthorizedActionTakers::ContractOwner;
+ let owner = make_id(1);
+ let taker = ActionTaker::SingleIdentity(owner);
+ assert!(aat.allowed_for_action_taker(
+ &owner,
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn contract_owner_rejects_non_matching_single_identity() {
+ let aat = AuthorizedActionTakers::ContractOwner;
+ let owner = make_id(1);
+ let other = make_id(2);
+ let taker = ActionTaker::SingleIdentity(other);
+ assert!(!aat.allowed_for_action_taker(
+ &owner,
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn contract_owner_rejects_action_participation() {
+ let aat = AuthorizedActionTakers::ContractOwner;
+ let owner = make_id(1);
+ let taker = ActionTaker::SingleIdentity(owner);
+ assert!(!aat.allowed_for_action_taker(
+ &owner,
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionParticipation,
+ ));
+ }
+
+ #[test]
+ fn contract_owner_allows_specified_identities_containing_owner() {
+ let aat = AuthorizedActionTakers::ContractOwner;
+ let owner = make_id(1);
+ let mut set = BTreeSet::new();
+ set.insert(owner);
+ set.insert(make_id(2));
+ let taker = ActionTaker::SpecifiedIdentities(set);
+ assert!(aat.allowed_for_action_taker(
+ &owner,
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn identity_allows_matching_identity() {
+ let authorized_id = make_id(5);
+ let aat = AuthorizedActionTakers::Identity(authorized_id);
+ let taker = ActionTaker::SingleIdentity(authorized_id);
+ assert!(aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn identity_rejects_non_matching_identity() {
+ let authorized_id = make_id(5);
+ let aat = AuthorizedActionTakers::Identity(authorized_id);
+ let taker = ActionTaker::SingleIdentity(make_id(6));
+ assert!(!aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn identity_rejects_action_participation() {
+ let authorized_id = make_id(5);
+ let aat = AuthorizedActionTakers::Identity(authorized_id);
+ let taker = ActionTaker::SingleIdentity(authorized_id);
+ assert!(!aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionParticipation,
+ ));
+ }
+
+ #[test]
+ fn group_allows_single_member_with_enough_power() {
+ let member = make_id(10);
+ let group = make_group(vec![(member, 100)], 50);
+ let mut groups = BTreeMap::new();
+ groups.insert(0u16, group);
+
+ let aat = AuthorizedActionTakers::Group(0);
+ let taker = ActionTaker::SingleIdentity(member);
+ assert!(aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &groups,
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn group_rejects_single_member_with_insufficient_power() {
+ let member = make_id(10);
+ let group = make_group(vec![(member, 10)], 50);
+ let mut groups = BTreeMap::new();
+ groups.insert(0u16, group);
+
+ let aat = AuthorizedActionTakers::Group(0);
+ let taker = ActionTaker::SingleIdentity(member);
+ assert!(!aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &groups,
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn group_allows_participation_for_member() {
+ let member = make_id(10);
+ let group = make_group(vec![(member, 10)], 50);
+ let mut groups = BTreeMap::new();
+ groups.insert(0u16, group);
+
+ let aat = AuthorizedActionTakers::Group(0);
+ let taker = ActionTaker::SingleIdentity(member);
+ assert!(aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &groups,
+ &taker,
+ ActionGoal::ActionParticipation,
+ ));
+ }
+
+ #[test]
+ fn group_rejects_participation_for_non_member() {
+ let member = make_id(10);
+ let non_member = make_id(11);
+ let group = make_group(vec![(member, 10)], 50);
+ let mut groups = BTreeMap::new();
+ groups.insert(0u16, group);
+
+ let aat = AuthorizedActionTakers::Group(0);
+ let taker = ActionTaker::SingleIdentity(non_member);
+ assert!(!aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &groups,
+ &taker,
+ ActionGoal::ActionParticipation,
+ ));
+ }
+
+ #[test]
+ fn group_rejects_when_group_not_found() {
+ let aat = AuthorizedActionTakers::Group(99);
+ let taker = ActionTaker::SingleIdentity(make_id(10));
+ assert!(!aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn group_allows_specified_identities_with_enough_combined_power() {
+ let member_a = make_id(10);
+ let member_b = make_id(11);
+ let group = make_group(vec![(member_a, 30), (member_b, 30)], 50);
+ let mut groups = BTreeMap::new();
+ groups.insert(0u16, group);
+
+ let mut set = BTreeSet::new();
+ set.insert(member_a);
+ set.insert(member_b);
+ let taker = ActionTaker::SpecifiedIdentities(set);
+
+ let aat = AuthorizedActionTakers::Group(0);
+ assert!(aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &groups,
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn group_rejects_specified_identities_with_insufficient_combined_power() {
+ let member_a = make_id(10);
+ let member_b = make_id(11);
+ let group = make_group(vec![(member_a, 10), (member_b, 10)], 50);
+ let mut groups = BTreeMap::new();
+ groups.insert(0u16, group);
+
+ let mut set = BTreeSet::new();
+ set.insert(member_a);
+ set.insert(member_b);
+ let taker = ActionTaker::SpecifiedIdentities(set);
+
+ let aat = AuthorizedActionTakers::Group(0);
+ assert!(!aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &groups,
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn main_group_allows_when_main_group_exists_and_power_sufficient() {
+ let member = make_id(10);
+ let group = make_group(vec![(member, 100)], 50);
+ let mut groups = BTreeMap::new();
+ groups.insert(7u16, group);
+
+ let aat = AuthorizedActionTakers::MainGroup;
+ let taker = ActionTaker::SingleIdentity(member);
+ assert!(aat.allowed_for_action_taker(
+ &make_id(1),
+ Some(7),
+ &groups,
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn main_group_rejects_when_no_main_group_position() {
+ let aat = AuthorizedActionTakers::MainGroup;
+ let taker = ActionTaker::SingleIdentity(make_id(10));
+ assert!(!aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn main_group_rejects_when_group_not_in_map() {
+ let aat = AuthorizedActionTakers::MainGroup;
+ let taker = ActionTaker::SingleIdentity(make_id(10));
+ assert!(!aat.allowed_for_action_taker(
+ &make_id(1),
+ Some(99),
+ &BTreeMap::new(),
+ &taker,
+ ActionGoal::ActionCompletion,
+ ));
+ }
+
+ #[test]
+ fn main_group_participation_allows_member() {
+ let member = make_id(10);
+ let group = make_group(vec![(member, 10)], 100);
+ let mut groups = BTreeMap::new();
+ groups.insert(0u16, group);
+
+ let aat = AuthorizedActionTakers::MainGroup;
+ let taker = ActionTaker::SingleIdentity(member);
+ assert!(aat.allowed_for_action_taker(
+ &make_id(1),
+ Some(0),
+ &groups,
+ &taker,
+ ActionGoal::ActionParticipation,
+ ));
+ }
+
+ #[test]
+ fn participation_rejects_specified_identities() {
+ let member = make_id(10);
+ let group = make_group(vec![(member, 10)], 50);
+ let mut groups = BTreeMap::new();
+ groups.insert(0u16, group);
+
+ let mut set = BTreeSet::new();
+ set.insert(member);
+ let taker = ActionTaker::SpecifiedIdentities(set);
+
+ let aat = AuthorizedActionTakers::Group(0);
+ // is_action_taker_participant returns false for SpecifiedIdentities
+ assert!(!aat.allowed_for_action_taker(
+ &make_id(1),
+ None,
+ &groups,
+ &taker,
+ ActionGoal::ActionParticipation,
+ ));
+ }
+}
diff --git a/packages/rs-dpp/src/fee/fee_result/mod.rs b/packages/rs-dpp/src/fee/fee_result/mod.rs
index 0be1214e844..010cc04bbdb 100644
--- a/packages/rs-dpp/src/fee/fee_result/mod.rs
+++ b/packages/rs-dpp/src/fee/fee_result/mod.rs
@@ -280,3 +280,336 @@ impl FeeResult {
Ok(())
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::consensus::fee::fee_error::FeeError;
+ use crate::fee::epoch::CreditsPerEpoch;
+ use crate::fee::fee_result::refunds::{CreditsPerEpochByIdentifier, FeeRefunds};
+
+ fn make_id(byte: u8) -> Identifier {
+ Identifier::from([byte; 32])
+ }
+
+ /// Build a FeeRefunds that gives `credits` to `identity_id` (all in epoch 0).
+ fn fee_refunds_for_identity(identity_id: Identifier, credits: Credits) -> FeeRefunds {
+ let mut credits_per_epoch = CreditsPerEpoch::default();
+ credits_per_epoch.insert(0, credits);
+ let mut map = CreditsPerEpochByIdentifier::new();
+ map.insert(*identity_id.as_bytes(), credits_per_epoch);
+ FeeRefunds(map)
+ }
+
+ // --- BalanceChangeForIdentity::change() ---
+
+ #[test]
+ fn balance_change_for_identity_change_returns_correct_ref() {
+ let id = make_id(1);
+ let fee_result = FeeResult::default_with_fees(100, 50);
+ let bci = fee_result.into_balance_change(id);
+ // No refunds, so it should be RemoveFromBalance
+ match bci.change() {
+ BalanceChange::RemoveFromBalance {
+ required_removed_balance,
+ desired_removed_balance,
+ } => {
+ assert_eq!(*required_removed_balance, 100);
+ assert_eq!(*desired_removed_balance, 150);
+ }
+ other => panic!("Expected RemoveFromBalance, got {:?}", other),
+ }
+ }
+
+ // --- BalanceChangeForIdentity::other_refunds() ---
+
+ #[test]
+ fn other_refunds_empty_when_no_refunds() {
+ let id = make_id(1);
+ let fee_result = FeeResult::default_with_fees(100, 50);
+ let bci = fee_result.into_balance_change(id);
+ let refunds = bci.other_refunds();
+ assert!(refunds.is_empty());
+ }
+
+ #[test]
+ fn other_refunds_excludes_own_identity() {
+ let id = make_id(1);
+ let other_id = make_id(2);
+ // Build refunds for both identities
+ let mut credits_per_epoch_self = CreditsPerEpoch::default();
+ credits_per_epoch_self.insert(0, 200);
+ let mut credits_per_epoch_other = CreditsPerEpoch::default();
+ credits_per_epoch_other.insert(0, 300);
+ let mut map = CreditsPerEpochByIdentifier::new();
+ map.insert(*id.as_bytes(), credits_per_epoch_self);
+ map.insert(*other_id.as_bytes(), credits_per_epoch_other);
+ let refunds = FeeRefunds(map);
+
+ let fee_result = FeeResult {
+ storage_fee: 100,
+ processing_fee: 50,
+ fee_refunds: refunds,
+ removed_bytes_from_system: 0,
+ };
+ let bci = fee_result.into_balance_change(id);
+ let other = bci.other_refunds();
+ assert_eq!(other.len(), 1);
+ assert_eq!(*other.get(&other_id).unwrap(), 300);
+ }
+
+ // --- BalanceChangeForIdentity::into_fee_result() ---
+
+ #[test]
+ fn into_fee_result_preserves_original() {
+ let fee_result = FeeResult {
+ storage_fee: 42,
+ processing_fee: 58,
+ fee_refunds: FeeRefunds::default(),
+ removed_bytes_from_system: 10,
+ };
+ let id = make_id(1);
+ let bci = fee_result.clone().into_balance_change(id);
+ let recovered = bci.into_fee_result();
+ assert_eq!(recovered.storage_fee, 42);
+ assert_eq!(recovered.processing_fee, 58);
+ assert_eq!(recovered.removed_bytes_from_system, 10);
+ }
+
+ // --- BalanceChangeForIdentity::fee_result_outcome() ---
+
+ #[test]
+ fn fee_result_outcome_add_to_balance_returns_fee_result() {
+ let id = make_id(1);
+ // Refund more than storage + processing so we get AddToBalance
+ let refunds = fee_refunds_for_identity(id, 500);
+ let fee_result = FeeResult {
+ storage_fee: 100,
+ processing_fee: 50,
+ fee_refunds: refunds,
+ removed_bytes_from_system: 0,
+ };
+ let bci = fee_result.into_balance_change(id);
+ match bci.change() {
+ BalanceChange::AddToBalance(amount) => assert_eq!(*amount, 350),
+ other => panic!("Expected AddToBalance, got {:?}", other),
+ }
+ // Cannot access change after move, re-create
+ let refunds2 = fee_refunds_for_identity(id, 500);
+ let fee_result2 = FeeResult {
+ storage_fee: 100,
+ processing_fee: 50,
+ fee_refunds: refunds2,
+ removed_bytes_from_system: 0,
+ };
+ let bci2 = fee_result2.into_balance_change(id);
+ let result: Result = bci2.fee_result_outcome(0);
+ assert!(result.is_ok());
+ }
+
+ #[test]
+ fn fee_result_outcome_remove_balance_sufficient_desired() {
+ let id = make_id(1);
+ let fee_result = FeeResult::default_with_fees(100, 50);
+ let bci = fee_result.into_balance_change(id);
+ // User has enough for desired_removed_balance (150)
+ let result: Result = bci.fee_result_outcome(200);
+ let fr = result.unwrap();
+ assert_eq!(fr.storage_fee, 100);
+ assert_eq!(fr.processing_fee, 50);
+ }
+
+ #[test]
+ fn fee_result_outcome_remove_balance_sufficient_required_but_not_desired() {
+ let id = make_id(1);
+ let fee_result = FeeResult::default_with_fees(100, 50);
+ let bci = fee_result.into_balance_change(id);
+ // User has 120: enough for required (100) but not desired (150)
+ let result: Result = bci.fee_result_outcome(120);
+ let fr = result.unwrap();
+ assert_eq!(fr.storage_fee, 100);
+ // processing_fee should be reduced by (desired - user_balance) = 150 - 120 = 30
+ assert_eq!(fr.processing_fee, 20);
+ }
+
+ #[test]
+ fn fee_result_outcome_remove_balance_insufficient_returns_error() {
+ let id = make_id(1);
+ let fee_result = FeeResult::default_with_fees(100, 50);
+ let bci = fee_result.into_balance_change(id);
+ // User has less than required (100)
+ let result: Result = bci.fee_result_outcome(50);
+ assert!(result.is_err());
+ match result.unwrap_err() {
+ FeeError::BalanceIsNotEnoughError(e) => {
+ assert_eq!(e.balance(), 50);
+ assert_eq!(e.fee(), 100);
+ }
+ }
+ }
+
+ #[test]
+ fn fee_result_outcome_no_balance_change_returns_fee_result() {
+ let id = make_id(1);
+ // Refund exactly storage + processing = 150
+ let refunds = fee_refunds_for_identity(id, 150);
+ let fee_result = FeeResult {
+ storage_fee: 100,
+ processing_fee: 50,
+ fee_refunds: refunds,
+ removed_bytes_from_system: 0,
+ };
+ let bci = fee_result.into_balance_change(id);
+ match bci.change() {
+ BalanceChange::NoBalanceChange => {}
+ other => panic!("Expected NoBalanceChange, got {:?}", other),
+ }
+ // Re-create for outcome check
+ let refunds2 = fee_refunds_for_identity(id, 150);
+ let fee_result2 = FeeResult {
+ storage_fee: 100,
+ processing_fee: 50,
+ fee_refunds: refunds2,
+ removed_bytes_from_system: 0,
+ };
+ let bci2 = fee_result2.into_balance_change(id);
+ let result: Result = bci2.fee_result_outcome(0);
+ assert!(result.is_ok());
+ }
+
+ // --- FeeResult::into_balance_change() with 3 ordering branches ---
+
+ #[test]
+ fn into_balance_change_less_refund_than_fees() {
+ let id = make_id(1);
+ // Refund 50, but storage=100 processing=50 total=150
+ let refunds = fee_refunds_for_identity(id, 50);
+ let fee_result = FeeResult {
+ storage_fee: 100,
+ processing_fee: 50,
+ fee_refunds: refunds,
+ removed_bytes_from_system: 0,
+ };
+ let bci = fee_result.into_balance_change(id);
+ match bci.change() {
+ BalanceChange::RemoveFromBalance {
+ required_removed_balance,
+ desired_removed_balance,
+ } => {
+ // required = max(0, 100 - 50) = 50
+ assert_eq!(*required_removed_balance, 50);
+ // desired = 150 - 50 = 100
+ assert_eq!(*desired_removed_balance, 100);
+ }
+ other => panic!("Expected RemoveFromBalance, got {:?}", other),
+ }
+ }
+
+ #[test]
+ fn into_balance_change_refund_equals_fees() {
+ let id = make_id(1);
+ let refunds = fee_refunds_for_identity(id, 150);
+ let fee_result = FeeResult {
+ storage_fee: 100,
+ processing_fee: 50,
+ fee_refunds: refunds,
+ removed_bytes_from_system: 0,
+ };
+ let bci = fee_result.into_balance_change(id);
+ assert_eq!(bci.change(), &BalanceChange::NoBalanceChange);
+ }
+
+ #[test]
+ fn into_balance_change_refund_greater_than_fees() {
+ let id = make_id(1);
+ let refunds = fee_refunds_for_identity(id, 300);
+ let fee_result = FeeResult {
+ storage_fee: 100,
+ processing_fee: 50,
+ fee_refunds: refunds,
+ removed_bytes_from_system: 0,
+ };
+ let bci = fee_result.into_balance_change(id);
+ match bci.change() {
+ BalanceChange::AddToBalance(amount) => {
+ assert_eq!(*amount, 150); // 300 - 150
+ }
+ other => panic!("Expected AddToBalance, got {:?}", other),
+ }
+ }
+
+ #[test]
+ fn into_balance_change_no_refunds_no_fees() {
+ let id = make_id(1);
+ let fee_result = FeeResult::default();
+ let bci = fee_result.into_balance_change(id);
+ // 0 == 0, so NoBalanceChange? Actually 0.cmp(&0) is Equal
+ assert_eq!(bci.change(), &BalanceChange::NoBalanceChange);
+ }
+
+ #[test]
+ fn into_balance_change_no_refunds_with_fees() {
+ let id = make_id(1);
+ let fee_result = FeeResult::default_with_fees(200, 100);
+ let bci = fee_result.into_balance_change(id);
+ match bci.change() {
+ BalanceChange::RemoveFromBalance {
+ required_removed_balance,
+ desired_removed_balance,
+ } => {
+ assert_eq!(*required_removed_balance, 200);
+ assert_eq!(*desired_removed_balance, 300);
+ }
+ other => panic!("Expected RemoveFromBalance, got {:?}", other),
+ }
+ }
+
+ // --- apply_user_fee_increase ---
+
+ #[test]
+ fn apply_user_fee_increase_zero_percent() {
+ let mut fr = FeeResult::default_with_fees(100, 1000);
+ fr.apply_user_fee_increase(0);
+ assert_eq!(fr.processing_fee, 1000);
+ }
+
+ #[test]
+ fn apply_user_fee_increase_100_percent() {
+ let mut fr = FeeResult::default_with_fees(100, 1000);
+ fr.apply_user_fee_increase(100);
+ // 100% additional = doubles the processing fee
+ assert_eq!(fr.processing_fee, 2000);
+ }
+
+ #[test]
+ fn apply_user_fee_increase_50_percent() {
+ let mut fr = FeeResult::default_with_fees(100, 1000);
+ fr.apply_user_fee_increase(50);
+ // 50% additional = 1000 + 500
+ assert_eq!(fr.processing_fee, 1500);
+ }
+
+ #[test]
+ fn apply_user_fee_increase_does_not_affect_storage_fee() {
+ let mut fr = FeeResult::default_with_fees(500, 1000);
+ fr.apply_user_fee_increase(100);
+ assert_eq!(fr.storage_fee, 500);
+ assert_eq!(fr.processing_fee, 2000);
+ }
+
+ #[test]
+ fn apply_user_fee_increase_saturates_on_overflow() {
+ let mut fr = FeeResult::default_with_fees(0, u64::MAX);
+ fr.apply_user_fee_increase(100);
+ // Should saturate to u64::MAX rather than panicking
+ assert_eq!(fr.processing_fee, u64::MAX);
+ }
+
+ #[test]
+ fn apply_user_fee_increase_1_percent() {
+ let mut fr = FeeResult::default_with_fees(0, 10000);
+ fr.apply_user_fee_increase(1);
+ // 1% of 10000 = 100
+ assert_eq!(fr.processing_fee, 10100);
+ }
+}
diff --git a/packages/rs-dpp/src/identity/identity_public_key/key_type.rs b/packages/rs-dpp/src/identity/identity_public_key/key_type.rs
index 01e1a31c8ac..3f5ddae640a 100644
--- a/packages/rs-dpp/src/identity/identity_public_key/key_type.rs
+++ b/packages/rs-dpp/src/identity/identity_public_key/key_type.rs
@@ -360,3 +360,214 @@ impl Into for KeyType {
CborValue::from(self as u128)
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ // -- default_size() --
+
+ #[test]
+ fn test_default_size_ecdsa_secp256k1() {
+ assert_eq!(KeyType::ECDSA_SECP256K1.default_size(), 33);
+ }
+
+ #[test]
+ fn test_default_size_bls12_381() {
+ assert_eq!(KeyType::BLS12_381.default_size(), 48);
+ }
+
+ #[test]
+ fn test_default_size_ecdsa_hash160() {
+ assert_eq!(KeyType::ECDSA_HASH160.default_size(), 20);
+ }
+
+ #[test]
+ fn test_default_size_bip13_script_hash() {
+ assert_eq!(KeyType::BIP13_SCRIPT_HASH.default_size(), 20);
+ }
+
+ #[test]
+ fn test_default_size_eddsa_25519_hash160() {
+ assert_eq!(KeyType::EDDSA_25519_HASH160.default_size(), 20);
+ }
+
+ // -- all_key_types() --
+
+ #[test]
+ fn test_all_key_types_has_five_elements() {
+ let types = KeyType::all_key_types();
+ assert_eq!(types.len(), 5);
+ }
+
+ #[test]
+ fn test_all_key_types_contains_all_variants() {
+ let types = KeyType::all_key_types();
+ assert_eq!(
+ types,
+ [
+ KeyType::ECDSA_SECP256K1,
+ KeyType::BLS12_381,
+ KeyType::ECDSA_HASH160,
+ KeyType::BIP13_SCRIPT_HASH,
+ KeyType::EDDSA_25519_HASH160,
+ ]
+ );
+ }
+
+ // -- is_unique_key_type() --
+
+ #[test]
+ fn test_ecdsa_secp256k1_is_unique() {
+ assert!(KeyType::ECDSA_SECP256K1.is_unique_key_type());
+ }
+
+ #[test]
+ fn test_bls12_381_is_unique() {
+ assert!(KeyType::BLS12_381.is_unique_key_type());
+ }
+
+ #[test]
+ fn test_ecdsa_hash160_is_not_unique() {
+ assert!(!KeyType::ECDSA_HASH160.is_unique_key_type());
+ }
+
+ #[test]
+ fn test_bip13_script_hash_is_not_unique() {
+ assert!(!KeyType::BIP13_SCRIPT_HASH.is_unique_key_type());
+ }
+
+ #[test]
+ fn test_eddsa_25519_hash160_is_not_unique() {
+ assert!(!KeyType::EDDSA_25519_HASH160.is_unique_key_type());
+ }
+
+ // -- is_core_address_key_type() --
+
+ #[test]
+ fn test_ecdsa_secp256k1_not_core_address() {
+ assert!(!KeyType::ECDSA_SECP256K1.is_core_address_key_type());
+ }
+
+ #[test]
+ fn test_bls12_381_not_core_address() {
+ assert!(!KeyType::BLS12_381.is_core_address_key_type());
+ }
+
+ #[test]
+ fn test_ecdsa_hash160_is_core_address() {
+ assert!(KeyType::ECDSA_HASH160.is_core_address_key_type());
+ }
+
+ #[test]
+ fn test_bip13_script_hash_is_core_address() {
+ assert!(KeyType::BIP13_SCRIPT_HASH.is_core_address_key_type());
+ }
+
+ #[test]
+ fn test_eddsa_25519_hash160_not_core_address() {
+ assert!(!KeyType::EDDSA_25519_HASH160.is_core_address_key_type());
+ }
+
+ // -- TryFrom valid --
+
+ #[test]
+ fn test_try_from_u8_ecdsa_secp256k1() {
+ assert_eq!(KeyType::try_from(0u8).unwrap(), KeyType::ECDSA_SECP256K1);
+ }
+
+ #[test]
+ fn test_try_from_u8_bls12_381() {
+ assert_eq!(KeyType::try_from(1u8).unwrap(), KeyType::BLS12_381);
+ }
+
+ #[test]
+ fn test_try_from_u8_ecdsa_hash160() {
+ assert_eq!(KeyType::try_from(2u8).unwrap(), KeyType::ECDSA_HASH160);
+ }
+
+ #[test]
+ fn test_try_from_u8_bip13_script_hash() {
+ assert_eq!(KeyType::try_from(3u8).unwrap(), KeyType::BIP13_SCRIPT_HASH);
+ }
+
+ #[test]
+ fn test_try_from_u8_eddsa_25519_hash160() {
+ assert_eq!(
+ KeyType::try_from(4u8).unwrap(),
+ KeyType::EDDSA_25519_HASH160
+ );
+ }
+
+ // -- TryFrom invalid --
+
+ #[test]
+ fn test_try_from_u8_invalid_5() {
+ assert!(KeyType::try_from(5u8).is_err());
+ }
+
+ #[test]
+ fn test_try_from_u8_invalid_255() {
+ assert!(KeyType::try_from(255u8).is_err());
+ }
+
+ // -- Display --
+
+ #[test]
+ fn test_display_ecdsa_secp256k1() {
+ assert_eq!(format!("{}", KeyType::ECDSA_SECP256K1), "ECDSA_SECP256K1");
+ }
+
+ #[test]
+ fn test_display_bls12_381() {
+ assert_eq!(format!("{}", KeyType::BLS12_381), "BLS12_381");
+ }
+
+ #[test]
+ fn test_display_ecdsa_hash160() {
+ assert_eq!(format!("{}", KeyType::ECDSA_HASH160), "ECDSA_HASH160");
+ }
+
+ #[test]
+ fn test_display_bip13_script_hash() {
+ assert_eq!(
+ format!("{}", KeyType::BIP13_SCRIPT_HASH),
+ "BIP13_SCRIPT_HASH"
+ );
+ }
+
+ #[test]
+ fn test_display_eddsa_25519_hash160() {
+ assert_eq!(
+ format!("{}", KeyType::EDDSA_25519_HASH160),
+ "EDDSA_25519_HASH160"
+ );
+ }
+
+ // -- Default --
+
+ #[test]
+ fn test_default_is_ecdsa_secp256k1() {
+ assert_eq!(KeyType::default(), KeyType::ECDSA_SECP256K1);
+ }
+
+ // -- round-trip: u8 -> KeyType -> u8 --
+
+ #[test]
+ fn test_round_trip_all_valid() {
+ for val in 0u8..=4 {
+ let key_type = KeyType::try_from(val).unwrap();
+ assert_eq!(key_type as u8, val);
+ }
+ }
+
+ // -- unique vs core address are complementary for full-size key types --
+
+ #[test]
+ fn test_unique_and_core_address_are_mutually_exclusive() {
+ for kt in KeyType::all_key_types() {
+ // A key type should not be both unique and a core address key type
+ assert!(!(kt.is_unique_key_type() && kt.is_core_address_key_type()));
+ }
+ }
+}
diff --git a/packages/rs-dpp/src/tokens/token_event.rs b/packages/rs-dpp/src/tokens/token_event.rs
index 4f508bfb7fe..49a63b9b651 100644
--- a/packages/rs-dpp/src/tokens/token_event.rs
+++ b/packages/rs-dpp/src/tokens/token_event.rs
@@ -479,3 +479,135 @@ impl TokenEvent {
Ok(document)
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn test_id() -> Identifier {
+ Identifier::from([1u8; 32])
+ }
+
+ fn test_id_2() -> Identifier {
+ Identifier::from([2u8; 32])
+ }
+
+ // ---- associated_document_type_name tests ----
+
+ #[test]
+ fn associated_name_mint() {
+ let event = TokenEvent::Mint(0, test_id(), None);
+ assert_eq!(event.associated_document_type_name(), "mint");
+ }
+
+ #[test]
+ fn associated_name_burn() {
+ let event = TokenEvent::Burn(0, test_id(), None);
+ assert_eq!(event.associated_document_type_name(), "burn");
+ }
+
+ #[test]
+ fn associated_name_freeze() {
+ let event = TokenEvent::Freeze(test_id(), None);
+ assert_eq!(event.associated_document_type_name(), "freeze");
+ }
+
+ #[test]
+ fn associated_name_unfreeze() {
+ let event = TokenEvent::Unfreeze(test_id(), None);
+ assert_eq!(event.associated_document_type_name(), "unfreeze");
+ }
+
+ #[test]
+ fn associated_name_destroy_frozen_funds() {
+ let event = TokenEvent::DestroyFrozenFunds(test_id(), 0, None);
+ assert_eq!(event.associated_document_type_name(), "destroyFrozenFunds");
+ }
+
+ #[test]
+ fn associated_name_transfer() {
+ let event = TokenEvent::Transfer(test_id(), None, None, None, 0);
+ assert_eq!(event.associated_document_type_name(), "transfer");
+ }
+
+ #[test]
+ fn associated_name_claim() {
+ let recipient = TokenDistributionTypeWithResolvedRecipient::PreProgrammed(test_id());
+ let event = TokenEvent::Claim(recipient, 0, None);
+ assert_eq!(event.associated_document_type_name(), "claim");
+ }
+
+ #[test]
+ fn associated_name_emergency_action() {
+ let event = TokenEvent::EmergencyAction(TokenEmergencyAction::Pause, None);
+ assert_eq!(event.associated_document_type_name(), "emergencyAction");
+ }
+
+ #[test]
+ fn associated_name_config_update() {
+ let event = TokenEvent::ConfigUpdate(
+ TokenConfigurationChangeItem::TokenConfigurationNoChange,
+ None,
+ );
+ assert_eq!(event.associated_document_type_name(), "configUpdate");
+ }
+
+ #[test]
+ fn associated_name_direct_purchase() {
+ let event = TokenEvent::DirectPurchase(0, 0);
+ assert_eq!(event.associated_document_type_name(), "directPurchase");
+ }
+
+ #[test]
+ fn associated_name_change_price() {
+ let event = TokenEvent::ChangePriceForDirectPurchase(None, None);
+ assert_eq!(event.associated_document_type_name(), "directPricing");
+ }
+
+ // ---- all associated_document_type_name values are distinct ----
+
+ #[test]
+ fn all_document_type_names_are_unique() {
+ let recipient = TokenDistributionTypeWithResolvedRecipient::PreProgrammed(test_id());
+ let events: Vec = vec![
+ TokenEvent::Mint(0, test_id(), None),
+ TokenEvent::Burn(0, test_id(), None),
+ TokenEvent::Freeze(test_id(), None),
+ TokenEvent::Unfreeze(test_id(), None),
+ TokenEvent::DestroyFrozenFunds(test_id(), 0, None),
+ TokenEvent::Transfer(test_id(), None, None, None, 0),
+ TokenEvent::Claim(recipient, 0, None),
+ TokenEvent::EmergencyAction(TokenEmergencyAction::Pause, None),
+ TokenEvent::ConfigUpdate(
+ TokenConfigurationChangeItem::TokenConfigurationNoChange,
+ None,
+ ),
+ TokenEvent::DirectPurchase(0, 0),
+ TokenEvent::ChangePriceForDirectPurchase(None, None),
+ ];
+ let names: Vec<&str> = events
+ .iter()
+ .map(|e| e.associated_document_type_name())
+ .collect();
+ let mut unique = names.clone();
+ unique.sort();
+ unique.dedup();
+ assert_eq!(
+ names.len(),
+ unique.len(),
+ "Duplicate document type names found"
+ );
+ }
+
+ // ---- format_note helper ----
+
+ #[test]
+ fn format_note_none_returns_empty() {
+ assert_eq!(format_note(&None), "");
+ }
+
+ #[test]
+ fn format_note_some_returns_formatted() {
+ assert_eq!(format_note(&Some("hello".to_string())), " (note: hello)");
+ }
+}
diff --git a/packages/rs-dpp/src/tokens/token_pricing_schedule.rs b/packages/rs-dpp/src/tokens/token_pricing_schedule.rs
index 97c553b49f3..5af1f3ebb9a 100644
--- a/packages/rs-dpp/src/tokens/token_pricing_schedule.rs
+++ b/packages/rs-dpp/src/tokens/token_pricing_schedule.rs
@@ -75,3 +75,87 @@ impl Display for TokenPricingSchedule {
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn single_price_minimum_purchase_amount_and_price() {
+ let schedule = TokenPricingSchedule::SinglePrice(500);
+ let (amount, price) = schedule.minimum_purchase_amount_and_price();
+ assert_eq!(amount, 1);
+ assert_eq!(price, 500);
+ }
+
+ #[test]
+ fn single_price_zero_credits() {
+ let schedule = TokenPricingSchedule::SinglePrice(0);
+ let (amount, price) = schedule.minimum_purchase_amount_and_price();
+ assert_eq!(amount, 1);
+ assert_eq!(price, 0);
+ }
+
+ #[test]
+ fn set_prices_minimum_purchase_amount_and_price_single_entry() {
+ let mut prices = BTreeMap::new();
+ prices.insert(10u64, 100u64);
+ let schedule = TokenPricingSchedule::SetPrices(prices);
+ let (amount, price) = schedule.minimum_purchase_amount_and_price();
+ assert_eq!(amount, 10);
+ assert_eq!(price, 100);
+ }
+
+ #[test]
+ fn set_prices_minimum_purchase_amount_and_price_multiple_entries() {
+ let mut prices = BTreeMap::new();
+ prices.insert(5u64, 50u64);
+ prices.insert(10u64, 80u64);
+ prices.insert(100u64, 500u64);
+ let schedule = TokenPricingSchedule::SetPrices(prices);
+ // BTreeMap orders by key, so the first entry is the minimum amount
+ let (amount, price) = schedule.minimum_purchase_amount_and_price();
+ assert_eq!(amount, 5);
+ assert_eq!(price, 50);
+ }
+
+ #[test]
+ fn set_prices_empty_map_returns_default() {
+ let prices = BTreeMap::new();
+ let schedule = TokenPricingSchedule::SetPrices(prices);
+ let (amount, price) = schedule.minimum_purchase_amount_and_price();
+ // unwrap_or_default returns (0, 0) for empty map
+ assert_eq!(amount, 0);
+ assert_eq!(price, 0);
+ }
+
+ #[test]
+ fn display_single_price() {
+ let schedule = TokenPricingSchedule::SinglePrice(1234);
+ assert_eq!(format!("{}", schedule), "SinglePrice: 1234");
+ }
+
+ #[test]
+ fn display_set_prices_empty() {
+ let schedule = TokenPricingSchedule::SetPrices(BTreeMap::new());
+ assert_eq!(format!("{}", schedule), "SetPrices: []");
+ }
+
+ #[test]
+ fn display_set_prices_single_entry() {
+ let mut prices = BTreeMap::new();
+ prices.insert(10u64, 100u64);
+ let schedule = TokenPricingSchedule::SetPrices(prices);
+ assert_eq!(format!("{}", schedule), "SetPrices: [10 => 100]");
+ }
+
+ #[test]
+ fn display_set_prices_multiple_entries() {
+ let mut prices = BTreeMap::new();
+ prices.insert(5u64, 50u64);
+ prices.insert(10u64, 80u64);
+ let schedule = TokenPricingSchedule::SetPrices(prices);
+ // BTreeMap iterates in sorted key order
+ assert_eq!(format!("{}", schedule), "SetPrices: [5 => 50, 10 => 80]");
+ }
+}
diff --git a/packages/rs-dpp/src/util/vec.rs b/packages/rs-dpp/src/util/vec.rs
index edbb63b7d76..c1b78bd11c8 100644
--- a/packages/rs-dpp/src/util/vec.rs
+++ b/packages/rs-dpp/src/util/vec.rs
@@ -65,3 +65,216 @@ pub fn vec_to_array(vec: &[u8]) -> Result<[u8; N], InvalidVector
}
Ok(v)
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ // -- encode_hex --
+
+ #[test]
+ fn test_encode_hex_empty() {
+ let bytes: Vec = vec![];
+ assert_eq!(encode_hex(&bytes), "");
+ }
+
+ #[test]
+ fn test_encode_hex_single_byte() {
+ let bytes: Vec = vec![0xff];
+ assert_eq!(encode_hex(&bytes), "ff");
+ }
+
+ #[test]
+ fn test_encode_hex_multiple_bytes() {
+ let bytes: Vec = vec![0xde, 0xad, 0xbe, 0xef];
+ assert_eq!(encode_hex(&bytes), "deadbeef");
+ }
+
+ #[test]
+ fn test_encode_hex_leading_zeros() {
+ let bytes: Vec = vec![0x00, 0x01, 0x0a];
+ assert_eq!(encode_hex(&bytes), "00010a");
+ }
+
+ #[test]
+ fn test_encode_hex_all_zeros() {
+ let bytes: Vec = vec![0x00, 0x00, 0x00];
+ assert_eq!(encode_hex(&bytes), "000000");
+ }
+
+ // -- decode_hex --
+
+ #[test]
+ fn test_decode_hex_empty() {
+ let result = decode_hex("").unwrap();
+ assert!(result.is_empty());
+ }
+
+ #[test]
+ fn test_decode_hex_valid() {
+ let result = decode_hex("deadbeef").unwrap();
+ assert_eq!(result, vec![0xde, 0xad, 0xbe, 0xef]);
+ }
+
+ #[test]
+ fn test_decode_hex_uppercase() {
+ let result = decode_hex("DEADBEEF").unwrap();
+ assert_eq!(result, vec![0xde, 0xad, 0xbe, 0xef]);
+ }
+
+ #[test]
+ fn test_decode_hex_mixed_case() {
+ let result = decode_hex("DeAdBeEf").unwrap();
+ assert_eq!(result, vec![0xde, 0xad, 0xbe, 0xef]);
+ }
+
+ #[test]
+ fn test_decode_hex_leading_zeros() {
+ let result = decode_hex("00010a").unwrap();
+ assert_eq!(result, vec![0x00, 0x01, 0x0a]);
+ }
+
+ #[test]
+ fn test_decode_hex_invalid_chars() {
+ let result = decode_hex("zzzz");
+ assert!(result.is_err());
+ }
+
+ #[test]
+ #[should_panic]
+ fn test_decode_hex_odd_length_panics() {
+ // Known issue: odd-length hex strings panic instead of returning Err
+ // because s[i..i+2] goes out of bounds on the last byte.
+ let _ = decode_hex("abc");
+ }
+
+ // -- round-trip encode/decode --
+
+ #[test]
+ fn test_hex_round_trip() {
+ let original: Vec = vec![0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef];
+ let hex = encode_hex(&original);
+ let decoded = decode_hex(&hex).unwrap();
+ assert_eq!(original, decoded);
+ }
+
+ #[test]
+ fn test_hex_round_trip_empty() {
+ let original: Vec = vec![];
+ let hex = encode_hex(&original);
+ let decoded = decode_hex(&hex).unwrap();
+ assert_eq!(original, decoded);
+ }
+
+ #[test]
+ fn test_hex_round_trip_all_byte_values() {
+ let original: Vec = (0..=255).collect();
+ let hex = encode_hex(&original);
+ let decoded = decode_hex(&hex).unwrap();
+ assert_eq!(original, decoded);
+ }
+
+ // -- hex_to_array --
+
+ #[test]
+ fn test_hex_to_array_valid_4_bytes() {
+ let result = hex_to_array::<4>("deadbeef").unwrap();
+ assert_eq!(result, [0xde, 0xad, 0xbe, 0xef]);
+ }
+
+ #[test]
+ fn test_hex_to_array_valid_32_bytes() {
+ let hex = "a".repeat(64); // 32 bytes encoded as 64 hex chars
+ let result = hex_to_array::<32>(&hex).unwrap();
+ assert_eq!(result.len(), 32);
+ assert!(result.iter().all(|&b| b == 0xaa));
+ }
+
+ #[test]
+ fn test_hex_to_array_wrong_size() {
+ // Provide 4 bytes of hex (8 chars) but expect a 2-byte array
+ let result = hex_to_array::<2>("deadbeef");
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn test_hex_to_array_invalid_hex() {
+ let result = hex_to_array::<2>("zzzz");
+ assert!(result.is_err());
+ }
+
+ // -- vec_to_array --
+
+ #[test]
+ fn test_vec_to_array_valid() {
+ let vec = vec![1u8, 2, 3, 4];
+ let result = vec_to_array::<4>(&vec).unwrap();
+ assert_eq!(result, [1, 2, 3, 4]);
+ }
+
+ #[test]
+ fn test_vec_to_array_too_short() {
+ let vec = vec![1u8, 2];
+ let result = vec_to_array::<4>(&vec);
+ assert!(result.is_err());
+ let err = result.unwrap_err();
+ assert_eq!(err.expected_size(), 4);
+ assert_eq!(err.actual_size(), 2);
+ }
+
+ #[test]
+ fn test_vec_to_array_too_long() {
+ let vec = vec![1u8, 2, 3, 4, 5];
+ let result = vec_to_array::<4>(&vec);
+ assert!(result.is_err());
+ let err = result.unwrap_err();
+ assert_eq!(err.expected_size(), 4);
+ assert_eq!(err.actual_size(), 5);
+ }
+
+ #[test]
+ fn test_vec_to_array_empty_to_zero() {
+ let vec: Vec = vec![];
+ let result = vec_to_array::<0>(&vec).unwrap();
+ assert_eq!(result, [0u8; 0]);
+ }
+
+ #[test]
+ fn test_vec_to_array_single_element() {
+ let vec = vec![0xffu8];
+ let result = vec_to_array::<1>(&vec).unwrap();
+ assert_eq!(result, [0xff]);
+ }
+
+ // -- decode_hex_sha256 / decode_hex_bls_sig --
+
+ #[test]
+ fn test_decode_hex_sha256_valid() {
+ let hex = "ab".repeat(32); // 32 bytes
+ let result = decode_hex_sha256(&hex).unwrap();
+ assert_eq!(result.len(), 32);
+ assert!(result.iter().all(|&b| b == 0xab));
+ }
+
+ #[test]
+ fn test_decode_hex_sha256_wrong_length() {
+ let hex = "ab".repeat(16); // 16 bytes, not 32
+ let result = decode_hex_sha256(&hex);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn test_decode_hex_bls_sig_valid() {
+ let hex = "cd".repeat(96); // 96 bytes
+ let result = decode_hex_bls_sig(&hex).unwrap();
+ assert_eq!(result.len(), 96);
+ assert!(result.iter().all(|&b| b == 0xcd));
+ }
+
+ #[test]
+ fn test_decode_hex_bls_sig_wrong_length() {
+ let hex = "cd".repeat(48); // 48 bytes, not 96
+ let result = decode_hex_bls_sig(&hex);
+ assert!(result.is_err());
+ }
+}
diff --git a/packages/rs-dpp/src/validation/validation_result.rs b/packages/rs-dpp/src/validation/validation_result.rs
index bc9e7bce34f..505e65edef4 100644
--- a/packages/rs-dpp/src/validation/validation_result.rs
+++ b/packages/rs-dpp/src/validation/validation_result.rs
@@ -289,3 +289,417 @@ impl> From> for ValidationRe
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ // -- new() --
+
+ #[test]
+ fn test_new_has_no_errors() {
+ let result: ValidationResult = ValidationResult::new();
+ assert!(result.errors.is_empty());
+ }
+
+ #[test]
+ fn test_new_has_no_data() {
+ let result: ValidationResult = ValidationResult::new();
+ assert!(result.data.is_none());
+ }
+
+ // -- new_with_data() --
+
+ #[test]
+ fn test_new_with_data_stores_data() {
+ let result: ValidationResult = ValidationResult::new_with_data(42);
+ assert_eq!(result.data, Some(42));
+ assert!(result.errors.is_empty());
+ }
+
+ // -- new_with_error() --
+
+ #[test]
+ fn test_new_with_error_stores_single_error() {
+ let result: ValidationResult =
+ ValidationResult::new_with_error("bad".to_string());
+ assert_eq!(result.errors.len(), 1);
+ assert_eq!(result.errors[0], "bad");
+ assert!(result.data.is_none());
+ }
+
+ // -- new_with_errors() --
+
+ #[test]
+ fn test_new_with_errors_stores_multiple_errors() {
+ let result: ValidationResult =
+ ValidationResult::new_with_errors(vec!["a".to_string(), "b".to_string()]);
+ assert_eq!(result.errors.len(), 2);
+ assert_eq!(result.errors[0], "a");
+ assert_eq!(result.errors[1], "b");
+ assert!(result.data.is_none());
+ }
+
+ #[test]
+ fn test_new_with_errors_empty_vec() {
+ let result: ValidationResult = ValidationResult::new_with_errors(vec![]);
+ assert!(result.errors.is_empty());
+ assert!(result.data.is_none());
+ }
+
+ // -- map() --
+
+ #[test]
+ fn test_map_transforms_data() {
+ let result: ValidationResult = ValidationResult::new_with_data(10);
+ let mapped = result.map(|x| x * 2);
+ assert_eq!(mapped.data, Some(20));
+ assert!(mapped.errors.is_empty());
+ }
+
+ #[test]
+ fn test_map_preserves_errors() {
+ let result: ValidationResult =
+ ValidationResult::new_with_data_and_errors(5, vec!["err".to_string()]);
+ let mapped = result.map(|x| x + 1);
+ assert_eq!(mapped.data, Some(6));
+ assert_eq!(mapped.errors, vec!["err".to_string()]);
+ }
+
+ #[test]
+ fn test_map_with_no_data() {
+ let result: ValidationResult =
+ ValidationResult::new_with_error("err".to_string());
+ let mapped = result.map(|x| x + 1);
+ assert!(mapped.data.is_none());
+ assert_eq!(mapped.errors.len(), 1);
+ }
+
+ // -- map_result() --
+
+ #[test]
+ fn test_map_result_with_ok_closure() {
+ let result: ValidationResult = ValidationResult::new_with_data(10);
+ let mapped: Result, String> =
+ result.map_result(|x| Ok(format!("val={}", x)));
+ let mapped = mapped.unwrap();
+ assert_eq!(mapped.data, Some("val=10".to_string()));
+ }
+
+ #[test]
+ fn test_map_result_with_err_closure() {
+ let result: ValidationResult = ValidationResult::new_with_data(10);
+ let mapped: Result, String> =
+ result.map_result(|_| Err("fail".to_string()));
+ assert!(mapped.is_err());
+ assert_eq!(mapped.unwrap_err(), "fail");
+ }
+
+ #[test]
+ fn test_map_result_with_no_data() {
+ let result: ValidationResult =
+ ValidationResult::new_with_error("err".to_string());
+ let mapped: Result, String> =
+ result.map_result(|x| Ok(x + 1));
+ let mapped = mapped.unwrap();
+ assert!(mapped.data.is_none());
+ assert_eq!(mapped.errors, vec!["err".to_string()]);
+ }
+
+ // -- is_valid() / is_err() --
+
+ #[test]
+ fn test_is_valid_true_when_no_errors() {
+ let result: ValidationResult = ValidationResult::new();
+ assert!(result.is_valid());
+ assert!(!result.is_err());
+ }
+
+ #[test]
+ fn test_is_valid_false_when_errors_present() {
+ let result: ValidationResult =
+ ValidationResult::new_with_error("e".to_string());
+ assert!(!result.is_valid());
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn test_is_valid_with_data_and_no_errors() {
+ let result: ValidationResult = ValidationResult::new_with_data(1);
+ assert!(result.is_valid());
+ }
+
+ #[test]
+ fn test_is_err_with_data_and_errors() {
+ let result: ValidationResult =
+ ValidationResult::new_with_data_and_errors(1, vec!["e".to_string()]);
+ assert!(result.is_err());
+ }
+
+ // -- first_error() --
+
+ #[test]
+ fn test_first_error_returns_first() {
+ let result: ValidationResult =
+ ValidationResult::new_with_errors(vec!["first".to_string(), "second".to_string()]);
+ assert_eq!(result.first_error(), Some(&"first".to_string()));
+ }
+
+ #[test]
+ fn test_first_error_returns_none_when_no_errors() {
+ let result: ValidationResult = ValidationResult::new();
+ assert_eq!(result.first_error(), None);
+ }
+
+ // -- into_data() --
+
+ #[test]
+ fn test_into_data_returns_data_when_present() {
+ let result: ValidationResult = ValidationResult::new_with_data(42);
+ assert_eq!(result.into_data().unwrap(), 42);
+ }
+
+ #[test]
+ fn test_into_data_returns_error_when_no_data() {
+ let result: ValidationResult = ValidationResult::new();
+ assert!(result.into_data().is_err());
+ }
+
+ // -- into_data_with_error() --
+
+ #[test]
+ fn test_into_data_with_error_returns_data_when_valid() {
+ let result: ValidationResult = ValidationResult::new_with_data(42);
+ let inner = result.into_data_with_error().unwrap();
+ assert_eq!(inner.unwrap(), 42);
+ }
+
+ #[test]
+ fn test_into_data_with_error_returns_last_error_when_errors_present() {
+ let result: ValidationResult =
+ ValidationResult::new_with_errors(vec!["first".to_string(), "last".to_string()]);
+ let inner = result.into_data_with_error().unwrap();
+ assert_eq!(inner.unwrap_err(), "last");
+ }
+
+ #[test]
+ fn test_into_data_with_error_returns_protocol_error_when_no_data_and_no_errors() {
+ let result: ValidationResult = ValidationResult::new();
+ assert!(result.into_data_with_error().is_err());
+ }
+
+ // -- into_data_and_errors() --
+
+ #[test]
+ fn test_into_data_and_errors_returns_both() {
+ let result: ValidationResult =
+ ValidationResult::new_with_data_and_errors(10, vec!["e".to_string()]);
+ let (data, errors) = result.into_data_and_errors().unwrap();
+ assert_eq!(data, 10);
+ assert_eq!(errors, vec!["e".to_string()]);
+ }
+
+ #[test]
+ fn test_into_data_and_errors_returns_empty_errors_when_valid() {
+ let result: ValidationResult = ValidationResult::new_with_data(10);
+ let (data, errors) = result.into_data_and_errors().unwrap();
+ assert_eq!(data, 10);
+ assert!(errors.is_empty());
+ }
+
+ #[test]
+ fn test_into_data_and_errors_fails_without_data() {
+ let result: ValidationResult =
+ ValidationResult::new_with_error("e".to_string());
+ assert!(result.into_data_and_errors().is_err());
+ }
+
+ // -- From impls --
+
+ #[test]
+ fn test_from_data_creates_valid_result() {
+ let result: ValidationResult = 42.into();
+ assert_eq!(result.data, Some(42));
+ assert!(result.errors.is_empty());
+ }
+
+ #[test]
+ fn test_from_ok_result_creates_valid_result() {
+ let ok_result: Result = Ok(42);
+ let result: ValidationResult = ok_result.into();
+ assert_eq!(result.data, Some(42));
+ assert!(result.errors.is_empty());
+ }
+
+ #[test]
+ fn test_from_err_result_creates_error_result() {
+ let err_result: Result = Err("bad".to_string());
+ let result: ValidationResult = err_result.into();
+ assert!(result.data.is_none());
+ assert_eq!(result.errors, vec!["bad".to_string()]);
+ }
+
+ // -- flatten() --
+
+ #[test]
+ fn test_flatten_merges_data_and_errors() {
+ let r1: ValidationResult, String> = ValidationResult::new_with_data(vec![1, 2]);
+ let r2: ValidationResult, String> =
+ ValidationResult::new_with_data_and_errors(vec![3], vec!["e".to_string()]);
+ let r3: ValidationResult, String> =
+ ValidationResult::new_with_error("e2".to_string());
+
+ let flat = ValidationResult::flatten(vec![r1, r2, r3]);
+ assert_eq!(flat.data, Some(vec![1, 2, 3]));
+ assert_eq!(flat.errors, vec!["e".to_string(), "e2".to_string()]);
+ }
+
+ #[test]
+ fn test_flatten_empty_input() {
+ let flat: ValidationResult, String> =
+ ValidationResult::flatten(std::iter::empty());
+ assert_eq!(flat.data, Some(vec![]));
+ assert!(flat.errors.is_empty());
+ }
+
+ // -- merge_many() --
+
+ #[test]
+ fn test_merge_many_collects_data_into_vec() {
+ let r1: ValidationResult = ValidationResult::new_with_data(1);
+ let r2: ValidationResult = ValidationResult::new_with_data(2);
+ let r3: ValidationResult = ValidationResult::new_with_error("e".to_string());
+
+ let merged = ValidationResult::merge_many(vec![r1, r2, r3]);
+ assert_eq!(merged.data, Some(vec![1, 2]));
+ assert_eq!(merged.errors, vec!["e".to_string()]);
+ }
+
+ #[test]
+ fn test_merge_many_empty_input() {
+ let merged: ValidationResult, String> =
+ ValidationResult::merge_many(std::iter::empty::>());
+ assert_eq!(merged.data, Some(vec![]));
+ assert!(merged.errors.is_empty());
+ }
+
+ // -- merge_many_errors() --
+
+ #[test]
+ fn test_merge_many_errors_collects_all_errors() {
+ let r1: SimpleValidationResult =
+ SimpleValidationResult::new_with_errors(vec!["a".to_string()]);
+ let r2: SimpleValidationResult =
+ SimpleValidationResult::new_with_errors(vec!["b".to_string(), "c".to_string()]);
+ let r3: SimpleValidationResult = SimpleValidationResult::new();
+
+ let merged = SimpleValidationResult::merge_many_errors(vec![r1, r2, r3]);
+ assert_eq!(
+ merged.errors,
+ vec!["a".to_string(), "b".to_string(), "c".to_string()]
+ );
+ }
+
+ #[test]
+ fn test_merge_many_errors_empty_input() {
+ let merged: SimpleValidationResult =
+ SimpleValidationResult::merge_many_errors(std::iter::empty());
+ assert!(merged.errors.is_empty());
+ }
+
+ // -- Default --
+
+ #[test]
+ fn test_default_is_empty() {
+ let result: ValidationResult = ValidationResult::default();
+ assert!(result.errors.is_empty());
+ assert!(result.data.is_none());
+ }
+
+ // -- add_error / add_errors / merge --
+
+ #[test]
+ fn test_add_error() {
+ let mut result: ValidationResult = ValidationResult::new();
+ result.add_error("e1".to_string());
+ result.add_error("e2".to_string());
+ assert_eq!(result.errors, vec!["e1".to_string(), "e2".to_string()]);
+ }
+
+ #[test]
+ fn test_add_errors() {
+ let mut result: ValidationResult =
+ ValidationResult::new_with_error("e1".to_string());
+ result.add_errors(vec!["e2".to_string(), "e3".to_string()]);
+ assert_eq!(result.errors.len(), 3);
+ }
+
+ #[test]
+ fn test_merge_appends_errors_from_other() {
+ let mut r1: ValidationResult =
+ ValidationResult::new_with_error("a".to_string());
+ let r2: ValidationResult =
+ ValidationResult::new_with_error("b".to_string());
+ r1.merge(r2);
+ assert_eq!(r1.errors, vec!["a".to_string(), "b".to_string()]);
+ }
+
+ // -- get_error / has_data / is_valid_with_data / set_data --
+
+ #[test]
+ fn test_get_error() {
+ let result: ValidationResult =
+ ValidationResult::new_with_errors(vec!["a".to_string(), "b".to_string()]);
+ assert_eq!(result.get_error(0), Some(&"a".to_string()));
+ assert_eq!(result.get_error(1), Some(&"b".to_string()));
+ assert_eq!(result.get_error(2), None);
+ }
+
+ #[test]
+ fn test_has_data() {
+ let with: ValidationResult = ValidationResult::new_with_data(1);
+ let without: ValidationResult = ValidationResult::new();
+ assert!(with.has_data());
+ assert!(!without.has_data());
+ }
+
+ #[test]
+ fn test_is_valid_with_data() {
+ let valid_with_data: ValidationResult = ValidationResult::new_with_data(1);
+ let valid_no_data: ValidationResult = ValidationResult::new();
+ let invalid_with_data: ValidationResult =
+ ValidationResult::new_with_data_and_errors(1, vec!["e".to_string()]);
+ assert!(valid_with_data.is_valid_with_data());
+ assert!(!valid_no_data.is_valid_with_data());
+ assert!(!invalid_with_data.is_valid_with_data());
+ }
+
+ #[test]
+ fn test_set_data() {
+ let mut result: ValidationResult = ValidationResult::new();
+ assert!(result.data.is_none());
+ result.set_data(99);
+ assert_eq!(result.data, Some(99));
+ }
+
+ #[test]
+ fn test_into_result_without_data() {
+ let result: ValidationResult =
+ ValidationResult::new_with_data_and_errors(42, vec!["e".to_string()]);
+ let without_data = result.into_result_without_data();
+ assert!(without_data.data.is_none());
+ assert_eq!(without_data.errors, vec!["e".to_string()]);
+ }
+
+ #[test]
+ fn test_data_as_borrowed() {
+ let result: ValidationResult = ValidationResult::new_with_data(42);
+ assert_eq!(result.data_as_borrowed().unwrap(), &42);
+ }
+
+ #[test]
+ fn test_data_as_borrowed_no_data() {
+ let result: ValidationResult = ValidationResult::new();
+ assert!(result.data_as_borrowed().is_err());
+ }
+}
diff --git a/packages/rs-drive/src/util/common/encode.rs b/packages/rs-drive/src/util/common/encode.rs
index 23e630743f1..cb563a9d1e3 100644
--- a/packages/rs-drive/src/util/common/encode.rs
+++ b/packages/rs-drive/src/util/common/encode.rs
@@ -219,3 +219,224 @@ pub fn encode_u32(val: u32) -> Vec {
wtr
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ // --- encode_u64 / decode_u64 round-trip tests ---
+
+ #[test]
+ fn encode_decode_u64_zero() {
+ let encoded = encode_u64(0);
+ assert_eq!(encoded.len(), 8);
+ let decoded = decode_u64(&encoded).unwrap();
+ assert_eq!(decoded, 0);
+ }
+
+ #[test]
+ fn encode_decode_u64_one() {
+ let encoded = encode_u64(1);
+ let decoded = decode_u64(&encoded).unwrap();
+ assert_eq!(decoded, 1);
+ }
+
+ #[test]
+ fn encode_decode_u64_max() {
+ let encoded = encode_u64(u64::MAX);
+ let decoded = decode_u64(&encoded).unwrap();
+ assert_eq!(decoded, u64::MAX);
+ }
+
+ #[test]
+ fn encode_decode_u64_owned_round_trip() {
+ for val in [0u64, 1, 42, 1000, u64::MAX / 2, u64::MAX] {
+ let encoded = encode_u64(val);
+ let decoded = decode_u64_owned(encoded).unwrap();
+ assert_eq!(decoded, val);
+ }
+ }
+
+ #[test]
+ fn encode_u64_preserves_sort_order_in_positive_range() {
+ // The sign-bit flip means lexicographic ordering matches signed interpretation.
+ // Values in 0..=i64::MAX sort correctly among themselves.
+ let values = [0u64, 1, 2, 100, 1000, i64::MAX as u64];
+ let encoded: Vec> = values.iter().map(|&v| encode_u64(v)).collect();
+ for i in 0..encoded.len() - 1 {
+ assert!(
+ encoded[i] < encoded[i + 1],
+ "Sort order violated: encode_u64({}) >= encode_u64({})",
+ values[i],
+ values[i + 1]
+ );
+ }
+ }
+
+ #[test]
+ fn encode_u64_sign_bit_flip_makes_high_values_sort_lower() {
+ // Values above i64::MAX have the sign bit set in big-endian, so the flip
+ // clears it, making them sort below values in the 0..=i64::MAX range.
+ // This is the intended behavior: the encoding treats u64 as if it were i64.
+ let below_midpoint = encode_u64(100);
+ let above_midpoint = encode_u64(u64::MAX);
+ assert!(above_midpoint < below_midpoint);
+ }
+
+ #[test]
+ fn decode_u64_wrong_length_returns_error() {
+ assert!(decode_u64(&[]).is_err());
+ assert!(decode_u64(&[0; 7]).is_err());
+ assert!(decode_u64(&[0; 9]).is_err());
+ assert!(decode_u64(&[0; 1]).is_err());
+ }
+
+ #[test]
+ fn decode_u64_owned_wrong_length_returns_error() {
+ assert!(decode_u64_owned(vec![]).is_err());
+ assert!(decode_u64_owned(vec![0; 7]).is_err());
+ assert!(decode_u64_owned(vec![0; 9]).is_err());
+ }
+
+ // --- encode_i64 tests ---
+
+ #[test]
+ fn encode_i64_positive() {
+ let encoded = encode_i64(42);
+ assert_eq!(encoded.len(), 8);
+ }
+
+ #[test]
+ fn encode_i64_negative() {
+ let encoded = encode_i64(-42);
+ assert_eq!(encoded.len(), 8);
+ }
+
+ #[test]
+ fn encode_i64_zero() {
+ let encoded = encode_i64(0);
+ assert_eq!(encoded.len(), 8);
+ }
+
+ #[test]
+ fn encode_i64_preserves_sort_order() {
+ let values = [i64::MIN, -1000, -1, 0, 1, 1000, i64::MAX];
+ let encoded: Vec> = values.iter().map(|&v| encode_i64(v)).collect();
+ for i in 0..encoded.len() - 1 {
+ assert!(
+ encoded[i] < encoded[i + 1],
+ "Sort order violated: encode_i64({}) >= encode_i64({})",
+ values[i],
+ values[i + 1]
+ );
+ }
+ }
+
+ #[test]
+ fn encode_i64_negative_less_than_positive() {
+ let neg = encode_i64(-1);
+ let pos = encode_i64(1);
+ assert!(neg < pos);
+ }
+
+ // --- encode_float tests ---
+
+ #[test]
+ fn encode_float_positive() {
+ let encoded = encode_float(3.14);
+ assert_eq!(encoded.len(), 8);
+ }
+
+ #[test]
+ fn encode_float_negative() {
+ let encoded = encode_float(-3.14);
+ assert_eq!(encoded.len(), 8);
+ }
+
+ #[test]
+ fn encode_float_zero() {
+ let encoded = encode_float(0.0);
+ assert_eq!(encoded.len(), 8);
+ }
+
+ #[test]
+ fn encode_float_preserves_sort_order() {
+ let values = [-1000.0f64, -1.0, -0.001, 0.0, 0.001, 1.0, 1000.0];
+ let encoded: Vec> = values.iter().map(|&v| encode_float(v)).collect();
+ for i in 0..encoded.len() - 1 {
+ assert!(
+ encoded[i] < encoded[i + 1],
+ "Sort order violated: encode_float({}) >= encode_float({})",
+ values[i],
+ values[i + 1]
+ );
+ }
+ }
+
+ #[test]
+ fn encode_float_negative_less_than_positive() {
+ let neg = encode_float(-0.5);
+ let pos = encode_float(0.5);
+ assert!(neg < pos);
+ }
+
+ // --- encode_u16 tests ---
+
+ #[test]
+ fn encode_u16_basic() {
+ assert_eq!(encode_u16(0).len(), 2);
+ assert_eq!(encode_u16(u16::MAX).len(), 2);
+ }
+
+ #[test]
+ fn encode_u16_preserves_sort_order_in_positive_range() {
+ // Values in 0..=i16::MAX sort correctly after sign-bit flip.
+ let values = [0u16, 1, 100, 1000, i16::MAX as u16];
+ let encoded: Vec> = values.iter().map(|&v| encode_u16(v)).collect();
+ for i in 0..encoded.len() - 1 {
+ assert!(
+ encoded[i] < encoded[i + 1],
+ "Sort order violated: encode_u16({}) >= encode_u16({})",
+ values[i],
+ values[i + 1]
+ );
+ }
+ }
+
+ #[test]
+ fn encode_u16_sign_bit_flip_makes_high_values_sort_lower() {
+ let below = encode_u16(100);
+ let above = encode_u16(u16::MAX);
+ assert!(above < below);
+ }
+
+ // --- encode_u32 tests ---
+
+ #[test]
+ fn encode_u32_basic() {
+ assert_eq!(encode_u32(0).len(), 4);
+ assert_eq!(encode_u32(u32::MAX).len(), 4);
+ }
+
+ #[test]
+ fn encode_u32_preserves_sort_order_in_positive_range() {
+ // Values in 0..=i32::MAX sort correctly after sign-bit flip.
+ let values = [0u32, 1, 100, 10000, i32::MAX as u32];
+ let encoded: Vec> = values.iter().map(|&v| encode_u32(v)).collect();
+ for i in 0..encoded.len() - 1 {
+ assert!(
+ encoded[i] < encoded[i + 1],
+ "Sort order violated: encode_u32({}) >= encode_u32({})",
+ values[i],
+ values[i + 1]
+ );
+ }
+ }
+
+ #[test]
+ fn encode_u32_sign_bit_flip_makes_high_values_sort_lower() {
+ let below = encode_u32(100);
+ let above = encode_u32(u32::MAX);
+ assert!(above < below);
+ }
+}
diff --git a/packages/rs-platform-value/src/eq.rs b/packages/rs-platform-value/src/eq.rs
index 53639f0032d..0c7d506270a 100644
--- a/packages/rs-platform-value/src/eq.rs
+++ b/packages/rs-platform-value/src/eq.rs
@@ -169,3 +169,372 @@ impl Value {
self == other
}
}
+
+#[cfg(test)]
+mod tests {
+ use crate::Value;
+
+ // ---- PartialEq ----
+
+ #[test]
+ fn u8_eq() {
+ assert_eq!(Value::U8(42), 42u8);
+ assert_ne!(Value::U8(42), 43u8);
+ }
+
+ #[test]
+ fn i8_eq() {
+ assert_eq!(Value::I8(-1), -1i8);
+ assert_ne!(Value::I8(-1), 0i8);
+ }
+
+ #[test]
+ fn u16_eq() {
+ assert_eq!(Value::U16(1000), 1000u16);
+ assert_ne!(Value::U16(1000), 999u16);
+ }
+
+ #[test]
+ fn i16_eq() {
+ assert_eq!(Value::I16(-500), -500i16);
+ assert_ne!(Value::I16(-500), 500i16);
+ }
+
+ #[test]
+ fn u32_eq() {
+ assert_eq!(Value::U32(100_000), 100_000u32);
+ assert_ne!(Value::U32(100_000), 0u32);
+ }
+
+ #[test]
+ fn i32_eq() {
+ assert_eq!(Value::I32(-100), -100i32);
+ assert_ne!(Value::I32(-100), 100i32);
+ }
+
+ #[test]
+ fn u64_eq() {
+ assert_eq!(Value::U64(u64::MAX), u64::MAX);
+ assert_ne!(Value::U64(0), 1u64);
+ }
+
+ #[test]
+ fn i64_eq() {
+ assert_eq!(Value::I64(i64::MIN), i64::MIN);
+ assert_ne!(Value::I64(0), 1i64);
+ }
+
+ #[test]
+ fn u128_eq() {
+ assert_eq!(Value::U128(u128::MAX), u128::MAX);
+ assert_ne!(Value::U128(0), 1u128);
+ }
+
+ #[test]
+ fn i128_eq() {
+ assert_eq!(Value::I128(i128::MIN), i128::MIN);
+ assert_ne!(Value::I128(0), 1i128);
+ }
+
+ // ---- cross-type integer comparison via as_integer ----
+
+ #[test]
+ fn u8_value_eq_u64_type() {
+ // Value::U8(10) should equal 10u64 through as_integer
+ assert_eq!(Value::U8(10), 10u64);
+ }
+
+ #[test]
+ fn u64_value_eq_u8_type_when_fits() {
+ assert_eq!(Value::U64(200), 200u8);
+ }
+
+ #[test]
+ fn u64_value_ne_u8_type_when_overflow() {
+ // 256 doesn't fit in u8
+ assert_ne!(Value::U64(256), 0u8); // as_integer:: returns None
+ }
+
+ #[test]
+ fn i8_value_eq_i64_type() {
+ assert_eq!(Value::I8(-10), -10i64);
+ }
+
+ #[test]
+ fn non_integer_ne_integer() {
+ assert_ne!(Value::Text("hello".to_string()), 0u64);
+ assert_ne!(Value::Null, 0i32);
+ assert_ne!(Value::Bool(true), 1u8);
+ }
+
+ // ---- PartialEq ----
+
+ #[test]
+ fn string_eq() {
+ let val = Value::Text("hello".to_string());
+ assert_eq!(val, "hello".to_string());
+ assert_ne!(val, "world".to_string());
+ }
+
+ #[test]
+ fn non_text_ne_string() {
+ assert_ne!(Value::U8(0), "0".to_string());
+ assert_ne!(Value::Null, "".to_string());
+ }
+
+ // ---- PartialEq<&str> ----
+
+ #[test]
+ fn str_ref_eq() {
+ let val = Value::Text("test".to_string());
+ assert_eq!(val, "test");
+ assert_ne!(val, "other");
+ }
+
+ #[test]
+ fn non_text_ne_str_ref() {
+ assert_ne!(Value::Bool(false), "false");
+ }
+
+ // ---- PartialEq ----
+
+ #[test]
+ fn float_eq() {
+ assert_eq!(Value::Float(3.14), 3.14f64);
+ assert_ne!(Value::Float(3.14), 3.15f64);
+ }
+
+ #[test]
+ fn integer_eq_float_through_as_float() {
+ // as_float converts integers to f64, so Value::U64(10) == 10.0f64
+ assert_eq!(Value::U64(10), 10.0f64);
+ }
+
+ #[test]
+ fn non_numeric_ne_float() {
+ assert_ne!(Value::Text("3.14".to_string()), 3.14f64);
+ }
+
+ // ---- PartialEq> ----
+
+ #[test]
+ fn bytes_eq_vec_u8() {
+ let data = vec![1, 2, 3];
+ assert_eq!(Value::Bytes(data.clone()), data);
+ }
+
+ #[test]
+ fn bytes_ne_vec_u8() {
+ assert_ne!(Value::Bytes(vec![1, 2, 3]), vec![1, 2, 4]);
+ }
+
+ #[test]
+ fn identifier_eq_vec_u8() {
+ let id = [42u8; 32];
+ assert_eq!(Value::Identifier(id), id.to_vec());
+ }
+
+ #[test]
+ fn bytes20_eq_vec_u8() {
+ let b = [5u8; 20];
+ assert_eq!(Value::Bytes20(b), b.to_vec());
+ }
+
+ #[test]
+ fn non_bytes_ne_vec_u8() {
+ assert_ne!(Value::U8(1), vec![1u8]);
+ }
+
+ // ---- PartialEq<[u8; 32]> ----
+
+ #[test]
+ fn bytes32_eq_array() {
+ let b = [0xffu8; 32];
+ assert_eq!(Value::Bytes32(b), b);
+ }
+
+ #[test]
+ fn identifier_eq_array_32() {
+ let id = [7u8; 32];
+ assert_eq!(Value::Identifier(id), id);
+ }
+
+ #[test]
+ fn bytes_eq_array_32() {
+ let data = [3u8; 32];
+ assert_eq!(Value::Bytes(data.to_vec()), data);
+ }
+
+ #[test]
+ fn non_bytes_ne_array_32() {
+ assert_ne!(Value::Null, [0u8; 32]);
+ }
+
+ // ---- PartialEq<[u8; 20]> ----
+
+ #[test]
+ fn bytes20_eq_array_20() {
+ let b = [1u8; 20];
+ assert_eq!(Value::Bytes20(b), b);
+ }
+
+ // ---- PartialEq<[u8; 36]> ----
+
+ #[test]
+ fn bytes36_eq_array_36() {
+ let b = [2u8; 36];
+ assert_eq!(Value::Bytes36(b), b);
+ }
+
+ // ---- PartialEq for &Value ----
+
+ #[test]
+ fn ref_value_eq_integer() {
+ let val = Value::U64(42);
+ assert_eq!(&val, 42u64);
+ }
+
+ #[test]
+ fn ref_value_eq_string() {
+ let val = Value::Text("hi".to_string());
+ assert_eq!(&val, "hi".to_string());
+ }
+
+ #[test]
+ fn ref_value_eq_str_ref() {
+ let val = Value::Text("hi".to_string());
+ assert_eq!(&val, "hi");
+ }
+
+ #[test]
+ fn ref_value_eq_float() {
+ let val = Value::Float(1.0);
+ assert_eq!(&val, 1.0f64);
+ }
+
+ #[test]
+ fn ref_value_eq_vec_u8() {
+ let val = Value::Bytes(vec![10, 20]);
+ assert_eq!(&val, vec![10u8, 20]);
+ }
+
+ #[test]
+ fn ref_value_eq_array_32() {
+ let b = [0u8; 32];
+ let val = Value::Bytes32(b);
+ assert_eq!(&val, b);
+ }
+
+ // ---- equal_underlying_data tests ----
+
+ #[test]
+ fn equal_underlying_data_bytes_vs_identifier_same_data() {
+ let data = [42u8; 32];
+ let bytes = Value::Bytes(data.to_vec());
+ let ident = Value::Identifier(data);
+ assert!(bytes.equal_underlying_data(&ident));
+ assert!(ident.equal_underlying_data(&bytes));
+ }
+
+ #[test]
+ fn equal_underlying_data_bytes_vs_identifier_different_data() {
+ let bytes = Value::Bytes(vec![0u8; 32]);
+ let ident = Value::Identifier([1u8; 32]);
+ assert!(!bytes.equal_underlying_data(&ident));
+ }
+
+ #[test]
+ fn equal_underlying_data_bytes32_vs_identifier() {
+ let data = [99u8; 32];
+ let b32 = Value::Bytes32(data);
+ let ident = Value::Identifier(data);
+ assert!(b32.equal_underlying_data(&ident));
+ }
+
+ #[test]
+ fn equal_underlying_data_bytes20_vs_bytes() {
+ let data = [5u8; 20];
+ let b20 = Value::Bytes20(data);
+ let bytes = Value::Bytes(data.to_vec());
+ assert!(b20.equal_underlying_data(&bytes));
+ }
+
+ #[test]
+ fn equal_underlying_data_u8_vs_u64_same_value() {
+ let a = Value::U8(10);
+ let b = Value::U64(10);
+ assert!(a.equal_underlying_data(&b));
+ }
+
+ #[test]
+ fn equal_underlying_data_i8_vs_i128_same_value() {
+ let a = Value::I8(-5);
+ let b = Value::I128(-5);
+ assert!(a.equal_underlying_data(&b));
+ }
+
+ #[test]
+ fn equal_underlying_data_u8_vs_u64_different_value() {
+ let a = Value::U8(10);
+ let b = Value::U64(20);
+ assert!(!a.equal_underlying_data(&b));
+ }
+
+ #[test]
+ fn equal_underlying_data_u16_vs_i32_same_value() {
+ let a = Value::U16(100);
+ let b = Value::I32(100);
+ assert!(a.equal_underlying_data(&b));
+ }
+
+ #[test]
+ fn equal_underlying_data_negative_i8_vs_u64() {
+ // negative can't match unsigned
+ let a = Value::I8(-1);
+ let b = Value::U64(255);
+ assert!(!a.equal_underlying_data(&b));
+ }
+
+ #[test]
+ fn equal_underlying_data_same_variant_same_value() {
+ let a = Value::U64(42);
+ let b = Value::U64(42);
+ assert!(a.equal_underlying_data(&b));
+ }
+
+ #[test]
+ fn equal_underlying_data_fallback_to_partial_eq() {
+ // Text vs Text uses default PartialEq
+ let a = Value::Text("hello".to_string());
+ let b = Value::Text("hello".to_string());
+ assert!(a.equal_underlying_data(&b));
+
+ let c = Value::Text("world".to_string());
+ assert!(!a.equal_underlying_data(&c));
+ }
+
+ #[test]
+ fn equal_underlying_data_null_vs_null() {
+ assert!(Value::Null.equal_underlying_data(&Value::Null));
+ }
+
+ #[test]
+ fn equal_underlying_data_different_types_not_equal() {
+ // A string vs a number should not be equal
+ let a = Value::Text("42".to_string());
+ let b = Value::U64(42);
+ assert!(!a.equal_underlying_data(&b));
+ }
+
+ #[test]
+ fn equal_underlying_data_bool_vs_bool() {
+ assert!(Value::Bool(true).equal_underlying_data(&Value::Bool(true)));
+ assert!(!Value::Bool(true).equal_underlying_data(&Value::Bool(false)));
+ }
+
+ #[test]
+ fn equal_underlying_data_float_vs_float() {
+ assert!(Value::Float(1.5).equal_underlying_data(&Value::Float(1.5)));
+ assert!(!Value::Float(1.5).equal_underlying_data(&Value::Float(2.5)));
+ }
+}
From 79698808fd85c4c8952aeb2e4b66ec53ff2fb684 Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Fri, 3 Apr 2026 20:45:15 +0300
Subject: [PATCH 06/40] fix(rs-scripts): remove redundant wildcard pattern
blocking CI (#3430)
Co-authored-by: Claude Opus 4.6 (1M context)
---
packages/rs-scripts/src/bin/decode_document.rs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/packages/rs-scripts/src/bin/decode_document.rs b/packages/rs-scripts/src/bin/decode_document.rs
index 79442016f39..ae26e36f072 100644
--- a/packages/rs-scripts/src/bin/decode_document.rs
+++ b/packages/rs-scripts/src/bin/decode_document.rs
@@ -121,7 +121,7 @@ fn main() {
eprintln!("Invalid hex: {e}");
std::process::exit(1);
}),
- "auto" | _ => {
+ _ => {
// Try base64 first (most common — gRPC responses are base64),
// then hex. This avoids misinterpreting hex-only base64 strings.
if let Ok(b) = base64::engine::general_purpose::STANDARD.decode(&args.doc_bytes) {
From f0ce8fdaf6c83adaebe8ef1c332a52bbc60260cb Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Fri, 3 Apr 2026 21:38:48 +0300
Subject: [PATCH 07/40] test(drive): add coverage for fee calculation engine
(#3429)
Co-authored-by: Claude Opus 4.6 (1M context)
---
Cargo.lock | 228 +++++----
packages/rs-drive/src/fees/op.rs | 808 +++++++++++++++++++++++++++++++
packages/rs-scripts/Cargo.toml | 1 -
3 files changed, 920 insertions(+), 117 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 8b80e6dcc03..33bcc76f18c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -120,7 +120,7 @@ version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc"
dependencies = [
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -131,7 +131,7 @@ checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d"
dependencies = [
"anstyle",
"once_cell_polyfill",
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -857,7 +857,7 @@ checksum = "3fce8dd7fcfcbf3a0a87d8f515194b49d6135acab73e18bd380d1d93bb1a15eb"
dependencies = [
"clap",
"heck 0.4.1",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"log",
"proc-macro2",
"quote",
@@ -876,7 +876,7 @@ checksum = "befbfd072a8e81c02f8c507aefce431fe5e7d051f83d48a23ffc9b9fe5a11799"
dependencies = [
"clap",
"heck 0.5.0",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"log",
"proc-macro2",
"quote",
@@ -889,9 +889,9 @@ dependencies = [
[[package]]
name = "cc"
-version = "1.2.58"
+version = "1.2.59"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1e928d4b69e3077709075a938a05ffbedfa53a84c8f766efbf8220bb1ff60e1"
+checksum = "b7a4d3ec6524d28a329fc53654bbadc9bdd7b0431f5d65f1a56ffb28a1ee5283"
dependencies = [
"find-msvc-tools",
"jobserver",
@@ -1114,7 +1114,7 @@ version = "3.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "faf9468729b8cbcea668e36183cb69d317348c2e08e994829fb56ebfdfbaac34"
dependencies = [
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -1620,7 +1620,7 @@ dependencies = [
"futures",
"hex",
"hickory-resolver",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"key-wallet",
"key-wallet-manager",
"log",
@@ -1927,7 +1927,7 @@ dependencies = [
"getrandom 0.2.17",
"grovedb-commitment-tree",
"hex",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"integer-encoding",
"itertools 0.13.0",
"json-schema-compatibility-validator",
@@ -1991,7 +1991,7 @@ dependencies = [
"grovedb-storage",
"grovedb-version",
"hex",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"integer-encoding",
"intmap",
"itertools 0.13.0",
@@ -2035,7 +2035,7 @@ dependencies = [
"file-rotate",
"grovedb-commitment-tree",
"hex",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"integer-encoding",
"itertools 0.13.0",
"lazy_static",
@@ -2078,7 +2078,7 @@ dependencies = [
"dpp",
"drive",
"hex",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"platform-serialization",
"platform-serialization-derive",
"serde",
@@ -2285,7 +2285,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -2466,9 +2466,12 @@ dependencies = [
[[package]]
name = "fragile"
-version = "2.0.1"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "28dd6caf6059519a65843af8fe2a3ae298b14b80179855aeb4adc2c1934ee619"
+checksum = "8878864ba14bb86e818a412bfd6f18f9eabd4ec0f008a28e8f7eb61db532fcf9"
+dependencies = [
+ "futures-core",
+]
[[package]]
name = "fs_extra"
@@ -2714,7 +2717,7 @@ dependencies = [
"grovedbg-types",
"hex",
"hex-literal",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"integer-encoding",
"intmap",
"itertools 0.14.0",
@@ -2829,7 +2832,7 @@ dependencies = [
"grovedb-version",
"grovedb-visualize",
"hex",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"integer-encoding",
"num_cpus",
"rand 0.10.0",
@@ -2866,7 +2869,7 @@ dependencies = [
"grovedb-costs",
"grovedb-storage",
"hex",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"integer-encoding",
"thiserror 2.0.18",
]
@@ -2929,7 +2932,7 @@ dependencies = [
"futures-core",
"futures-sink",
"http",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"slab",
"tokio",
"tokio-util",
@@ -3262,9 +3265,9 @@ checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424"
[[package]]
name = "hyper"
-version = "1.8.1"
+version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11"
+checksum = "6299f016b246a94207e63da54dbe807655bf9e00044f73ded42c3ac5305fbcca"
dependencies = [
"atomic-waker",
"bytes",
@@ -3277,7 +3280,6 @@ dependencies = [
"httpdate",
"itoa",
"pin-project-lite",
- "pin-utils",
"smallvec",
"tokio",
"want",
@@ -3347,7 +3349,7 @@ dependencies = [
"libc",
"percent-encoding",
"pin-project-lite",
- "socket2 0.5.10",
+ "socket2 0.6.3",
"system-configuration",
"tokio",
"tower-service",
@@ -3381,12 +3383,13 @@ dependencies = [
[[package]]
name = "icu_collections"
-version = "2.1.1"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43"
+checksum = "2984d1cd16c883d7935b9e07e44071dca8d917fd52ecc02c04d5fa0b5a3f191c"
dependencies = [
"displaydoc",
"potential_utf",
+ "utf8_iter",
"yoke",
"zerofrom",
"zerovec",
@@ -3394,9 +3397,9 @@ dependencies = [
[[package]]
name = "icu_locale_core"
-version = "2.1.1"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6"
+checksum = "92219b62b3e2b4d88ac5119f8904c10f8f61bf7e95b640d25ba3075e6cac2c29"
dependencies = [
"displaydoc",
"litemap",
@@ -3407,9 +3410,9 @@ dependencies = [
[[package]]
name = "icu_normalizer"
-version = "2.1.1"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599"
+checksum = "c56e5ee99d6e3d33bd91c5d85458b6005a22140021cc324cea84dd0e72cff3b4"
dependencies = [
"icu_collections",
"icu_normalizer_data",
@@ -3421,15 +3424,15 @@ dependencies = [
[[package]]
name = "icu_normalizer_data"
-version = "2.1.1"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
+checksum = "da3be0ae77ea334f4da67c12f149704f19f81d1adf7c51cf482943e84a2bad38"
[[package]]
name = "icu_properties"
-version = "2.1.2"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec"
+checksum = "bee3b67d0ea5c2cca5003417989af8996f8604e34fb9ddf96208a033901e70de"
dependencies = [
"icu_collections",
"icu_locale_core",
@@ -3441,15 +3444,15 @@ dependencies = [
[[package]]
name = "icu_properties_data"
-version = "2.1.2"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af"
+checksum = "8e2bbb201e0c04f7b4b3e14382af113e17ba4f63e2c9d2ee626b720cbce54a14"
[[package]]
name = "icu_provider"
-version = "2.1.1"
+version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614"
+checksum = "139c4cf31c8b5f33d7e199446eff9c1e02decfc2f0eec2c8d71f65befa45b421"
dependencies = [
"displaydoc",
"icu_locale_core",
@@ -3531,9 +3534,9 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "2.13.0"
+version = "2.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017"
+checksum = "45a8a2b9cb3e0b0c1803dbb0758ffac5de2f425b23c28f518faabd9d805342ff"
dependencies = [
"equivalent",
"hashbrown 0.16.1",
@@ -3603,7 +3606,7 @@ checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46"
dependencies = [
"hermit-abi",
"libc",
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -3923,9 +3926,9 @@ checksum = "744a4c881f502e98c2241d2e5f50040ac73b30194d64452bb6260393b53f0dc9"
[[package]]
name = "libc"
-version = "0.2.183"
+version = "0.2.184"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d"
+checksum = "48f5d2a454e16a5ea0f4ced81bd44e4cfc7bd3a507b61887c99fd3538b28e4af"
[[package]]
name = "libloading"
@@ -3993,9 +3996,9 @@ checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53"
[[package]]
name = "litemap"
-version = "0.8.1"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
+checksum = "92daf443525c4cce67b150400bc2316076100ce0b3686209eb8cf3c31612e6f0"
[[package]]
name = "lock_api"
@@ -4132,7 +4135,7 @@ dependencies = [
"http-body-util",
"hyper",
"hyper-util",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"ipnet",
"metrics",
"metrics-util",
@@ -4337,7 +4340,7 @@ version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -4700,7 +4703,7 @@ checksum = "8701b58ea97060d5e5b155d383a69952a60943f0e6dfe30b04c287beb0b27455"
dependencies = [
"fixedbitset",
"hashbrown 0.15.5",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
]
[[package]]
@@ -4767,12 +4770,6 @@ version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd"
-[[package]]
-name = "pin-utils"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
-
[[package]]
name = "pkcs8"
version = "0.10.2"
@@ -4827,7 +4824,7 @@ dependencies = [
"bs58",
"ciborium",
"hex",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"platform-serialization",
"platform-version",
"rand 0.8.5",
@@ -4872,7 +4869,7 @@ dependencies = [
"dash-sdk",
"dashcore",
"dpp",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"key-wallet",
"key-wallet-manager",
"platform-encryption",
@@ -4950,9 +4947,9 @@ dependencies = [
[[package]]
name = "potential_utf"
-version = "0.1.4"
+version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77"
+checksum = "0103b1cef7ec0cf76490e969665504990193874ea05c85ff9bab8b911d0a0564"
dependencies = [
"zerovec",
]
@@ -5034,7 +5031,7 @@ version = "3.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e67ba7e9b2b56446f1d419b1d807906278ffa1a658a8a5d8a39dcb1f5a78614f"
dependencies = [
- "toml_edit 0.25.8+spec-1.1.0",
+ "toml_edit 0.25.10+spec-1.1.0",
]
[[package]]
@@ -5109,7 +5106,7 @@ version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "343d3bd7056eda839b03204e68deff7d1b13aba7af2b2fd16890697274262ee7"
dependencies = [
- "heck 0.4.1",
+ "heck 0.5.0",
"itertools 0.14.0",
"log",
"multimap",
@@ -5268,7 +5265,7 @@ dependencies = [
"quinn-udp",
"rustc-hash 2.1.2",
"rustls",
- "socket2 0.5.10",
+ "socket2 0.6.3",
"thiserror 2.0.18",
"tokio",
"tracing",
@@ -5306,7 +5303,7 @@ dependencies = [
"cfg_aliases",
"libc",
"once_cell",
- "socket2 0.5.10",
+ "socket2 0.6.3",
"tracing",
"windows-sys 0.60.2",
]
@@ -5853,7 +5850,6 @@ dependencies = [
"dpp",
"hex",
"platform-version",
- "serde_json",
]
[[package]]
@@ -6032,7 +6028,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys 0.12.1",
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -6091,7 +6087,7 @@ dependencies = [
"security-framework",
"security-framework-sys",
"webpki-root-certs",
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -6372,7 +6368,7 @@ version = "1.0.149"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
dependencies = [
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"itoa",
"memchr",
"serde",
@@ -6413,9 +6409,9 @@ dependencies = [
[[package]]
name = "serde_spanned"
-version = "1.1.0"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "876ac351060d4f882bb1032b6369eb0aef79ad9df1ea8bc404874d8cc3d0cd98"
+checksum = "6662b5879511e06e8999a8a235d848113e942c9124f211511b16466ee2995f26"
dependencies = [
"serde_core",
]
@@ -6458,7 +6454,7 @@ dependencies = [
"chrono",
"hex",
"indexmap 1.9.3",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"schemars 0.9.0",
"schemars 1.2.1",
"serde_core",
@@ -6680,7 +6676,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e"
dependencies = [
"libc",
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -6912,7 +6908,7 @@ dependencies = [
"getrandom 0.4.2",
"once_cell",
"rustix 1.1.4",
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -7110,9 +7106,9 @@ dependencies = [
[[package]]
name = "tinystr"
-version = "0.8.2"
+version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869"
+checksum = "c8323304221c2a851516f22236c5722a72eaa19749016521d6dff0824447d96d"
dependencies = [
"displaydoc",
"zerovec",
@@ -7155,9 +7151,9 @@ dependencies = [
[[package]]
name = "tokio"
-version = "1.50.0"
+version = "1.51.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d"
+checksum = "2bd1c4c0fc4a7ab90fc15ef6daaa3ec3b893f004f915f2392557ed23237820cd"
dependencies = [
"bytes",
"libc",
@@ -7173,9 +7169,9 @@ dependencies = [
[[package]]
name = "tokio-macros"
-version = "2.6.1"
+version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c"
+checksum = "385a6cb71ab9ab790c5fe8d67f1645e6c450a7ce006a33de03daa956cf70a496"
dependencies = [
"proc-macro2",
"quote",
@@ -7271,9 +7267,9 @@ version = "0.9.12+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863"
dependencies = [
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"serde_core",
- "serde_spanned 1.1.0",
+ "serde_spanned 1.1.1",
"toml_datetime 0.7.5+spec-1.1.0",
"toml_parser",
"toml_writer",
@@ -7300,9 +7296,9 @@ dependencies = [
[[package]]
name = "toml_datetime"
-version = "1.1.0+spec-1.1.0"
+version = "1.1.1+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "97251a7c317e03ad83774a8752a7e81fb6067740609f75ea2b585b569a59198f"
+checksum = "3165f65f62e28e0115a00b2ebdd37eb6f3b641855f9d636d3cd4103767159ad7"
dependencies = [
"serde_core",
]
@@ -7313,7 +7309,7 @@ version = "0.19.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
dependencies = [
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"toml_datetime 0.6.11",
"winnow 0.5.40",
]
@@ -7324,7 +7320,7 @@ version = "0.22.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"serde",
"serde_spanned 0.6.9",
"toml_datetime 0.6.11",
@@ -7334,21 +7330,21 @@ dependencies = [
[[package]]
name = "toml_edit"
-version = "0.25.8+spec-1.1.0"
+version = "0.25.10+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "16bff38f1d86c47f9ff0647e6838d7bb362522bdf44006c7068c2b1e606f1f3c"
+checksum = "a82418ca169e235e6c399a84e395ab6debeb3bc90edc959bf0f48647c6a32d1b"
dependencies = [
- "indexmap 2.13.0",
- "toml_datetime 1.1.0+spec-1.1.0",
+ "indexmap 2.13.1",
+ "toml_datetime 1.1.1+spec-1.1.0",
"toml_parser",
"winnow 1.0.1",
]
[[package]]
name = "toml_parser"
-version = "1.1.0+spec-1.1.0"
+version = "1.1.2+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2334f11ee363607eb04df9b8fc8a13ca1715a72ba8662a26ac285c98aabb4011"
+checksum = "a2abe9b86193656635d2411dc43050282ca48aa31c2451210f4202550afb7526"
dependencies = [
"winnow 1.0.1",
]
@@ -7361,9 +7357,9 @@ checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
[[package]]
name = "toml_writer"
-version = "1.1.0+spec-1.1.0"
+version = "1.1.1+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d282ade6016312faf3e41e57ebbba0c073e4056dab1232ab1cb624199648f8ed"
+checksum = "756daf9b1013ebe47a8776667b466417e2d4c5679d441c26230efd9ef78692db"
[[package]]
name = "tonic"
@@ -7519,7 +7515,7 @@ checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4"
dependencies = [
"futures-core",
"futures-util",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"pin-project-lite",
"slab",
"sync_wrapper",
@@ -8142,7 +8138,7 @@ dependencies = [
"dpp",
"drive",
"hex",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"js-sys",
"serde",
"serde-wasm-bindgen 0.6.5",
@@ -8180,7 +8176,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909"
dependencies = [
"anyhow",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"wasm-encoder",
"wasmparser",
]
@@ -8242,7 +8238,7 @@ checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
dependencies = [
"bitflags 2.11.0",
"hashbrown 0.15.5",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"semver",
]
@@ -8324,7 +8320,7 @@ version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
dependencies = [
- "windows-sys 0.60.2",
+ "windows-sys 0.61.2",
]
[[package]]
@@ -8689,7 +8685,7 @@ checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21"
dependencies = [
"anyhow",
"heck 0.5.0",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"prettyplease",
"syn 2.0.117",
"wasm-metadata",
@@ -8720,7 +8716,7 @@ checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
dependencies = [
"anyhow",
"bitflags 2.11.0",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"log",
"serde",
"serde_derive",
@@ -8739,7 +8735,7 @@ checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736"
dependencies = [
"anyhow",
"id-arena",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"log",
"semver",
"serde",
@@ -8764,9 +8760,9 @@ dependencies = [
[[package]]
name = "writeable"
-version = "0.6.2"
+version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
+checksum = "1ffae5123b2d3fc086436f8834ae3ab053a283cfac8fe0a0b8eaae044768a4c4"
[[package]]
name = "wyz"
@@ -8791,9 +8787,9 @@ checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
[[package]]
name = "yoke"
-version = "0.8.1"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954"
+checksum = "abe8c5fda708d9ca3df187cae8bfb9ceda00dd96231bed36e445a1a48e66f9ca"
dependencies = [
"stable_deref_trait",
"yoke-derive",
@@ -8802,9 +8798,9 @@ dependencies = [
[[package]]
name = "yoke-derive"
-version = "0.8.1"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
+checksum = "de844c262c8848816172cef550288e7dc6c7b7814b4ee56b3e1553f275f1858e"
dependencies = [
"proc-macro2",
"quote",
@@ -8855,18 +8851,18 @@ dependencies = [
[[package]]
name = "zerofrom"
-version = "0.1.6"
+version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
+checksum = "69faa1f2a1ea75661980b013019ed6687ed0e83d069bc1114e2cc74c6c04c4df"
dependencies = [
"zerofrom-derive",
]
[[package]]
name = "zerofrom-derive"
-version = "0.1.6"
+version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
+checksum = "11532158c46691caf0f2593ea8358fed6bbf68a0315e80aae9bd41fbade684a1"
dependencies = [
"proc-macro2",
"quote",
@@ -8920,9 +8916,9 @@ dependencies = [
[[package]]
name = "zerotrie"
-version = "0.2.3"
+version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851"
+checksum = "0f9152d31db0792fa83f70fb2f83148effb5c1f5b8c7686c3459e361d9bc20bf"
dependencies = [
"displaydoc",
"yoke",
@@ -8931,9 +8927,9 @@ dependencies = [
[[package]]
name = "zerovec"
-version = "0.11.5"
+version = "0.11.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
+checksum = "90f911cbc359ab6af17377d242225f4d75119aec87ea711a880987b18cd7b239"
dependencies = [
"yoke",
"zerofrom",
@@ -8942,9 +8938,9 @@ dependencies = [
[[package]]
name = "zerovec-derive"
-version = "0.11.2"
+version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
+checksum = "625dc425cab0dca6dc3c3319506e6593dcb08a9f387ea3b284dbd52a92c40555"
dependencies = [
"proc-macro2",
"quote",
@@ -8964,7 +8960,7 @@ dependencies = [
"flate2",
"getrandom 0.3.4",
"hmac",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"lzma-rust2",
"memchr",
"pbkdf2",
@@ -8981,7 +8977,7 @@ checksum = "c42e33efc22a0650c311c2ef19115ce232583abbe80850bc8b66509ebef02de0"
dependencies = [
"crc32fast",
"flate2",
- "indexmap 2.13.0",
+ "indexmap 2.13.1",
"memchr",
"typed-path",
"zopfli",
diff --git a/packages/rs-drive/src/fees/op.rs b/packages/rs-drive/src/fees/op.rs
index 58fdc18606e..7ed6bd5be52 100644
--- a/packages/rs-drive/src/fees/op.rs
+++ b/packages/rs-drive/src/fees/op.rs
@@ -648,3 +648,811 @@ impl DriveCost for OperationCost {
.ok_or_else(|| get_overflow_error("ephemeral cost addition overflow"))
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use grovedb_costs::storage_cost::removal::StorageRemovedBytes;
+ use grovedb_costs::storage_cost::StorageCost;
+ use platform_version::version::fee::storage::FeeStorageVersion;
+ use platform_version::version::fee::FeeVersion;
+
+ /// Helper to get the canonical fee version used across these tests.
+ fn fee_version() -> &'static FeeVersion {
+ FeeVersion::first()
+ }
+
+ // ---------------------------------------------------------------
+ // 1. BaseOp::cost() — spot-check several opcodes
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn base_op_stop_costs_zero() {
+ assert_eq!(BaseOp::Stop.cost(), 0);
+ }
+
+ #[test]
+ fn base_op_add_costs_12() {
+ assert_eq!(BaseOp::Add.cost(), 12);
+ }
+
+ #[test]
+ fn base_op_mul_costs_20() {
+ assert_eq!(BaseOp::Mul.cost(), 20);
+ }
+
+ #[test]
+ fn base_op_signextend_costs_20() {
+ assert_eq!(BaseOp::Signextend.cost(), 20);
+ }
+
+ #[test]
+ fn base_op_addmod_costs_32() {
+ assert_eq!(BaseOp::Addmod.cost(), 32);
+ }
+
+ #[test]
+ fn base_op_mulmod_costs_32() {
+ assert_eq!(BaseOp::Mulmod.cost(), 32);
+ }
+
+ #[test]
+ fn base_op_byte_costs_12() {
+ assert_eq!(BaseOp::Byte.cost(), 12);
+ }
+
+ #[test]
+ fn base_op_sub_costs_12() {
+ assert_eq!(BaseOp::Sub.cost(), 12);
+ }
+
+ #[test]
+ fn base_op_div_costs_20() {
+ assert_eq!(BaseOp::Div.cost(), 20);
+ }
+
+ #[test]
+ fn base_op_comparison_ops_all_cost_12() {
+ for op in [
+ BaseOp::Lt,
+ BaseOp::Gt,
+ BaseOp::Slt,
+ BaseOp::Sgt,
+ BaseOp::Eq,
+ BaseOp::Iszero,
+ ] {
+ assert_eq!(op.cost(), 12, "comparison op {:?} should cost 12", op);
+ }
+ }
+
+ #[test]
+ fn base_op_bitwise_ops_all_cost_12() {
+ for op in [BaseOp::And, BaseOp::Or, BaseOp::Xor, BaseOp::Not] {
+ assert_eq!(op.cost(), 12, "bitwise op {:?} should cost 12", op);
+ }
+ }
+
+ // ---------------------------------------------------------------
+ // 2. HashFunction — block_size / rounds / block_cost / base_cost
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn hash_function_block_size_all_64() {
+ // All four hash functions currently have a 64-byte block size.
+ assert_eq!(HashFunction::Sha256.block_size(), 64);
+ assert_eq!(HashFunction::Sha256_2.block_size(), 64);
+ assert_eq!(HashFunction::Blake3.block_size(), 64);
+ assert_eq!(HashFunction::Sha256RipeMD160.block_size(), 64);
+ }
+
+ #[test]
+ fn hash_function_rounds() {
+ assert_eq!(HashFunction::Sha256.rounds(), 1);
+ assert_eq!(HashFunction::Sha256_2.rounds(), 2);
+ assert_eq!(HashFunction::Blake3.rounds(), 1);
+ assert_eq!(HashFunction::Sha256RipeMD160.rounds(), 1);
+ }
+
+ #[test]
+ fn hash_function_block_cost_sha256_variants_use_sha256_per_block() {
+ let fv = fee_version();
+ let expected = fv.hashing.sha256_per_block;
+ assert_eq!(HashFunction::Sha256.block_cost(fv), expected);
+ assert_eq!(HashFunction::Sha256_2.block_cost(fv), expected);
+ assert_eq!(HashFunction::Sha256RipeMD160.block_cost(fv), expected);
+ }
+
+ #[test]
+ fn hash_function_block_cost_blake3_uses_blake3_per_block() {
+ let fv = fee_version();
+ assert_eq!(
+ HashFunction::Blake3.block_cost(fv),
+ fv.hashing.blake3_per_block
+ );
+ }
+
+ #[test]
+ fn hash_function_base_cost_sha256() {
+ let fv = fee_version();
+ assert_eq!(
+ HashFunction::Sha256.base_cost(fv),
+ fv.hashing.single_sha256_base
+ );
+ }
+
+ #[test]
+ fn hash_function_base_cost_sha256_2_uses_single_sha256_base() {
+ let fv = fee_version();
+ // Sha256_2 intentionally uses single_sha256_base (extra rounds handle the double hash).
+ assert_eq!(
+ HashFunction::Sha256_2.base_cost(fv),
+ fv.hashing.single_sha256_base
+ );
+ }
+
+ #[test]
+ fn hash_function_base_cost_blake3() {
+ let fv = fee_version();
+ assert_eq!(HashFunction::Blake3.base_cost(fv), fv.hashing.blake3_base);
+ }
+
+ #[test]
+ fn hash_function_base_cost_sha256_ripe_md160() {
+ let fv = fee_version();
+ assert_eq!(
+ HashFunction::Sha256RipeMD160.base_cost(fv),
+ fv.hashing.sha256_ripe_md160_base
+ );
+ }
+
+ // ---------------------------------------------------------------
+ // 3. FunctionOp::new_with_byte_count — verify blocks/rounds calc
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn function_op_new_with_byte_count_small_sha256() {
+ // 32 bytes => blocks = 32/64 + 1 = 1, rounds = 1 + 1 - 1 = 1
+ let op = FunctionOp::new_with_byte_count(HashFunction::Sha256, 32);
+ assert_eq!(op.rounds, 1);
+ assert_eq!(op.hash, HashFunction::Sha256);
+ }
+
+ #[test]
+ fn function_op_new_with_byte_count_exact_block_boundary_sha256() {
+ // 64 bytes => blocks = 64/64 + 1 = 2, rounds = 2 + 1 - 1 = 2
+ let op = FunctionOp::new_with_byte_count(HashFunction::Sha256, 64);
+ assert_eq!(op.rounds, 2);
+ }
+
+ #[test]
+ fn function_op_new_with_byte_count_large_sha256() {
+ // 200 bytes => blocks = 200/64 + 1 = 3 + 1 = 4, rounds = 4 + 1 - 1 = 4
+ let op = FunctionOp::new_with_byte_count(HashFunction::Sha256, 200);
+ assert_eq!(op.rounds, 4);
+ }
+
+ #[test]
+ fn function_op_new_with_byte_count_sha256_2_has_extra_round() {
+ // 32 bytes => blocks = 32/64 + 1 = 1, rounds = 1 + 2 - 1 = 2
+ let op = FunctionOp::new_with_byte_count(HashFunction::Sha256_2, 32);
+ assert_eq!(op.rounds, 2);
+ }
+
+ #[test]
+ fn function_op_new_with_byte_count_sha256_2_large() {
+ // 200 bytes => blocks = 200/64 + 1 = 4, rounds = 4 + 2 - 1 = 5
+ let op = FunctionOp::new_with_byte_count(HashFunction::Sha256_2, 200);
+ assert_eq!(op.rounds, 5);
+ }
+
+ #[test]
+ fn function_op_new_with_byte_count_blake3_small() {
+ // 10 bytes => blocks = 10/64 + 1 = 1, rounds = 1 + 1 - 1 = 1
+ let op = FunctionOp::new_with_byte_count(HashFunction::Blake3, 10);
+ assert_eq!(op.rounds, 1);
+ assert_eq!(op.hash, HashFunction::Blake3);
+ }
+
+ #[test]
+ fn function_op_new_with_byte_count_blake3_large() {
+ // 500 bytes => blocks = 500/64 + 1 = 7 + 1 = 8, rounds = 8 + 1 - 1 = 8
+ let op = FunctionOp::new_with_byte_count(HashFunction::Blake3, 500);
+ assert_eq!(op.rounds, 8);
+ }
+
+ #[test]
+ fn function_op_new_with_byte_count_zero_bytes() {
+ // 0 bytes => blocks = 0/64 + 1 = 1, rounds = 1 + 1 - 1 = 1
+ let op = FunctionOp::new_with_byte_count(HashFunction::Sha256, 0);
+ assert_eq!(op.rounds, 1);
+ }
+
+ #[test]
+ fn function_op_new_with_byte_count_sha256_ripemd160() {
+ // 20 bytes => blocks = 20/64 + 1 = 1, rounds = 1 + 1 - 1 = 1
+ let op = FunctionOp::new_with_byte_count(HashFunction::Sha256RipeMD160, 20);
+ assert_eq!(op.rounds, 1);
+ assert_eq!(op.hash, HashFunction::Sha256RipeMD160);
+ }
+
+ // ---------------------------------------------------------------
+ // 4. FunctionOp::cost — verify rounds * block_cost + base_cost
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn function_op_cost_sha256_one_round() {
+ let fv = fee_version();
+ let op = FunctionOp::new_with_round_count(HashFunction::Sha256, 1);
+ // cost = base + rounds * block_cost = 100 + 1 * 5000 = 5100
+ let expected = fv.hashing.single_sha256_base + 1 * fv.hashing.sha256_per_block;
+ assert_eq!(op.cost(fv), expected);
+ }
+
+ #[test]
+ fn function_op_cost_sha256_2_two_rounds() {
+ let fv = fee_version();
+ let op = FunctionOp::new_with_round_count(HashFunction::Sha256_2, 2);
+ // cost = base + rounds * block_cost = 100 + 2 * 5000 = 10100
+ let expected = fv.hashing.single_sha256_base + 2 * fv.hashing.sha256_per_block;
+ assert_eq!(op.cost(fv), expected);
+ }
+
+ #[test]
+ fn function_op_cost_blake3_one_round() {
+ let fv = fee_version();
+ let op = FunctionOp::new_with_round_count(HashFunction::Blake3, 1);
+ // cost = blake3_base + 1 * blake3_per_block = 100 + 300 = 400
+ let expected = fv.hashing.blake3_base + 1 * fv.hashing.blake3_per_block;
+ assert_eq!(op.cost(fv), expected);
+ }
+
+ #[test]
+ fn function_op_cost_zero_rounds() {
+ let fv = fee_version();
+ let op = FunctionOp::new_with_round_count(HashFunction::Blake3, 0);
+ // cost = blake3_base + 0 * blake3_per_block = blake3_base
+ assert_eq!(op.cost(fv), fv.hashing.blake3_base);
+ }
+
+ #[test]
+ fn function_op_cost_from_byte_count_matches_manual_calc() {
+ let fv = fee_version();
+ // 128 bytes of SHA256: blocks = 128/64 + 1 = 3, rounds = 3 + 1 - 1 = 3
+ let op = FunctionOp::new_with_byte_count(HashFunction::Sha256, 128);
+ assert_eq!(op.rounds, 3);
+ let expected = fv.hashing.single_sha256_base + 3 * fv.hashing.sha256_per_block;
+ assert_eq!(op.cost(fv), expected);
+ }
+
+ #[test]
+ fn function_op_cost_sha256_ripemd160() {
+ let fv = fee_version();
+ let op = FunctionOp::new_with_round_count(HashFunction::Sha256RipeMD160, 1);
+ let expected = fv.hashing.sha256_ripe_md160_base + 1 * fv.hashing.sha256_per_block;
+ assert_eq!(op.cost(fv), expected);
+ }
+
+ #[test]
+ fn function_op_cost_saturating_mul_does_not_panic_on_large_rounds() {
+ let fv = fee_version();
+ let op = FunctionOp::new_with_round_count(HashFunction::Sha256, u32::MAX);
+ // u32::MAX as u64 * sha256_per_block (5000) fits in u64 without overflow,
+ // so cost = base + rounds * block_cost, computed via saturating ops.
+ let expected_block_cost = (u32::MAX as u64).saturating_mul(fv.hashing.sha256_per_block);
+ let expected = fv
+ .hashing
+ .single_sha256_base
+ .saturating_add(expected_block_cost);
+ assert_eq!(op.cost(fv), expected);
+ }
+
+ #[test]
+ fn function_op_cost_saturates_to_max_with_extreme_fee_version() {
+ // Construct a fee version where block_cost is large enough that
+ // u32::MAX * block_cost overflows u64, triggering saturation.
+ let mut fv = fee_version().clone();
+ fv.hashing.sha256_per_block = u64::MAX;
+ let op = FunctionOp::new_with_round_count(HashFunction::Sha256, 2);
+ // 2 * u64::MAX saturates to u64::MAX, then base.saturating_add(u64::MAX) = u64::MAX.
+ assert_eq!(op.cost(&fv), u64::MAX);
+ }
+
+ // ---------------------------------------------------------------
+ // 5. operation_cost() — test all 4 match arms
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn operation_cost_calculated_cost_operation_returns_cost() {
+ let cost = OperationCost {
+ seek_count: 3,
+ storage_cost: StorageCost {
+ added_bytes: 100,
+ replaced_bytes: 50,
+ removed_bytes: StorageRemovedBytes::NoStorageRemoval,
+ },
+ storage_loaded_bytes: 200,
+ hash_node_calls: 5,
+ sinsemilla_hash_calls: 0,
+ };
+ let op = CalculatedCostOperation(cost.clone());
+ let result = op.operation_cost().expect("should return Ok");
+ assert_eq!(result, cost);
+ }
+
+ #[test]
+ fn operation_cost_grove_operation_returns_error() {
+ let grove_op = LowLevelDriveOperation::insert_for_known_path_key_element(
+ vec![vec![1, 2, 3]],
+ vec![4, 5, 6],
+ Element::empty_tree(),
+ );
+ let result = grove_op.operation_cost();
+ assert!(result.is_err());
+ let err_msg = format!("{:?}", result.unwrap_err());
+ assert!(
+ err_msg.contains("grove operations must be executed"),
+ "unexpected error: {}",
+ err_msg
+ );
+ }
+
+ #[test]
+ fn operation_cost_pre_calculated_fee_result_returns_error() {
+ let fee = FeeResult {
+ storage_fee: 100,
+ processing_fee: 200,
+ ..Default::default()
+ };
+ let op = PreCalculatedFeeResult(fee);
+ let result = op.operation_cost();
+ assert!(result.is_err());
+ let err_msg = format!("{:?}", result.unwrap_err());
+ assert!(
+ err_msg.contains("pre calculated fees should not be requested"),
+ "unexpected error: {}",
+ err_msg
+ );
+ }
+
+ #[test]
+ fn operation_cost_function_operation_returns_error() {
+ let func_op = FunctionOperation(FunctionOp::new_with_round_count(HashFunction::Blake3, 1));
+ let result = func_op.operation_cost();
+ assert!(result.is_err());
+ let err_msg = format!("{:?}", result.unwrap_err());
+ assert!(
+ err_msg.contains("function operations should not be requested"),
+ "unexpected error: {}",
+ err_msg
+ );
+ }
+
+ // ---------------------------------------------------------------
+ // 6. combine_cost_operations — filter and sum
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn combine_cost_operations_sums_calculated_costs_only() {
+ let cost1 = OperationCost {
+ seek_count: 2,
+ storage_cost: StorageCost {
+ added_bytes: 10,
+ replaced_bytes: 0,
+ removed_bytes: StorageRemovedBytes::NoStorageRemoval,
+ },
+ storage_loaded_bytes: 50,
+ hash_node_calls: 1,
+ sinsemilla_hash_calls: 0,
+ };
+ let cost2 = OperationCost {
+ seek_count: 3,
+ storage_cost: StorageCost {
+ added_bytes: 20,
+ replaced_bytes: 5,
+ removed_bytes: StorageRemovedBytes::NoStorageRemoval,
+ },
+ storage_loaded_bytes: 100,
+ hash_node_calls: 2,
+ sinsemilla_hash_calls: 1,
+ };
+
+ let operations = vec![
+ CalculatedCostOperation(cost1.clone()),
+ // This FunctionOperation should be ignored by combine_cost_operations
+ FunctionOperation(FunctionOp::new_with_round_count(HashFunction::Sha256, 1)),
+ CalculatedCostOperation(cost2.clone()),
+ // PreCalculatedFeeResult should also be ignored
+ PreCalculatedFeeResult(FeeResult::default()),
+ ];
+
+ let combined = LowLevelDriveOperation::combine_cost_operations(&operations);
+ assert_eq!(combined.seek_count, 2 + 3);
+ assert_eq!(combined.storage_cost.added_bytes, 10 + 20);
+ assert_eq!(combined.storage_cost.replaced_bytes, 0 + 5);
+ assert_eq!(combined.storage_loaded_bytes, 50 + 100);
+ assert_eq!(combined.hash_node_calls, 1 + 2);
+ assert_eq!(combined.sinsemilla_hash_calls, 0 + 1);
+ }
+
+ #[test]
+ fn combine_cost_operations_empty_list_returns_default() {
+ let combined = LowLevelDriveOperation::combine_cost_operations(&[]);
+ assert_eq!(combined, OperationCost::default());
+ }
+
+ #[test]
+ fn combine_cost_operations_no_calculated_costs_returns_default() {
+ let operations = vec![
+ FunctionOperation(FunctionOp::new_with_round_count(HashFunction::Blake3, 2)),
+ PreCalculatedFeeResult(FeeResult {
+ processing_fee: 999,
+ ..Default::default()
+ }),
+ ];
+ let combined = LowLevelDriveOperation::combine_cost_operations(&operations);
+ assert_eq!(combined, OperationCost::default());
+ }
+
+ // ---------------------------------------------------------------
+ // 7. grovedb_operations_batch / _consume / _consume_with_leftovers
+ // ---------------------------------------------------------------
+
+ /// Helper: creates a GroveOperation variant (insert_or_replace).
+ fn make_grove_op(key_byte: u8) -> LowLevelDriveOperation {
+ LowLevelDriveOperation::insert_for_known_path_key_element(
+ vec![vec![0]],
+ vec![key_byte],
+ Element::new_item(vec![key_byte]),
+ )
+ }
+
+ fn make_mixed_ops() -> Vec {
+ vec![
+ make_grove_op(1),
+ FunctionOperation(FunctionOp::new_with_round_count(HashFunction::Sha256, 1)),
+ make_grove_op(2),
+ CalculatedCostOperation(OperationCost::default()),
+ make_grove_op(3),
+ ]
+ }
+
+ #[test]
+ fn grovedb_operations_batch_filters_grove_ops_from_ref() {
+ let ops = make_mixed_ops();
+ let batch = LowLevelDriveOperation::grovedb_operations_batch(&ops);
+ assert_eq!(batch.len(), 3);
+ }
+
+ #[test]
+ fn grovedb_operations_batch_empty_input() {
+ let batch = LowLevelDriveOperation::grovedb_operations_batch(&[]);
+ assert!(batch.is_empty());
+ }
+
+ #[test]
+ fn grovedb_operations_batch_no_grove_ops() {
+ let ops = vec![
+ FunctionOperation(FunctionOp::new_with_round_count(HashFunction::Blake3, 1)),
+ CalculatedCostOperation(OperationCost::default()),
+ ];
+ let batch = LowLevelDriveOperation::grovedb_operations_batch(&ops);
+ assert!(batch.is_empty());
+ }
+
+ #[test]
+ fn grovedb_operations_batch_consume_filters_grove_ops() {
+ let ops = make_mixed_ops();
+ let batch = LowLevelDriveOperation::grovedb_operations_batch_consume(ops);
+ assert_eq!(batch.len(), 3);
+ }
+
+ #[test]
+ fn grovedb_operations_batch_consume_empty_input() {
+ let batch = LowLevelDriveOperation::grovedb_operations_batch_consume(vec![]);
+ assert!(batch.is_empty());
+ }
+
+ #[test]
+ fn grovedb_operations_batch_consume_with_leftovers_partitions_correctly() {
+ let ops = make_mixed_ops();
+ let (batch, leftovers) =
+ LowLevelDriveOperation::grovedb_operations_batch_consume_with_leftovers(ops);
+ assert_eq!(batch.len(), 3);
+ assert_eq!(leftovers.len(), 2);
+
+ // Verify leftovers contain the non-grove operations.
+ for leftover in &leftovers {
+ assert!(
+ !matches!(leftover, GroveOperation(_)),
+ "leftovers should not contain GroveOperation variants"
+ );
+ }
+ }
+
+ #[test]
+ fn grovedb_operations_batch_consume_with_leftovers_all_grove() {
+ let ops = vec![make_grove_op(10), make_grove_op(20)];
+ let (batch, leftovers) =
+ LowLevelDriveOperation::grovedb_operations_batch_consume_with_leftovers(ops);
+ assert_eq!(batch.len(), 2);
+ assert!(leftovers.is_empty());
+ }
+
+ #[test]
+ fn grovedb_operations_batch_consume_with_leftovers_no_grove() {
+ let ops = vec![
+ CalculatedCostOperation(OperationCost::default()),
+ FunctionOperation(FunctionOp::new_with_round_count(HashFunction::Sha256, 1)),
+ ];
+ let (batch, leftovers) =
+ LowLevelDriveOperation::grovedb_operations_batch_consume_with_leftovers(ops);
+ assert!(batch.is_empty());
+ assert_eq!(leftovers.len(), 2);
+ }
+
+ #[test]
+ fn grovedb_operations_batch_consume_with_leftovers_empty() {
+ let (batch, leftovers) =
+ LowLevelDriveOperation::grovedb_operations_batch_consume_with_leftovers(vec![]);
+ assert!(batch.is_empty());
+ assert!(leftovers.is_empty());
+ }
+
+ // ---------------------------------------------------------------
+ // 8. DriveCost::ephemeral_cost — various scenarios
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn ephemeral_cost_zero_operation() {
+ let fv = fee_version();
+ let cost = OperationCost::default();
+ let result = cost.ephemeral_cost(fv).expect("should not overflow");
+ assert_eq!(result, 0);
+ }
+
+ #[test]
+ fn ephemeral_cost_seek_only() {
+ let fv = fee_version();
+ let cost = OperationCost {
+ seek_count: 5,
+ storage_cost: StorageCost::default(),
+ storage_loaded_bytes: 0,
+ hash_node_calls: 0,
+ sinsemilla_hash_calls: 0,
+ };
+ let result = cost.ephemeral_cost(fv).expect("should not overflow");
+ let expected = 5u64 * fv.storage.storage_seek_cost;
+ assert_eq!(result, expected);
+ }
+
+ #[test]
+ fn ephemeral_cost_storage_added_bytes() {
+ let fv = fee_version();
+ let cost = OperationCost {
+ seek_count: 0,
+ storage_cost: StorageCost {
+ added_bytes: 100,
+ replaced_bytes: 0,
+ removed_bytes: StorageRemovedBytes::NoStorageRemoval,
+ },
+ storage_loaded_bytes: 0,
+ hash_node_calls: 0,
+ sinsemilla_hash_calls: 0,
+ };
+ let result = cost.ephemeral_cost(fv).expect("should not overflow");
+ let expected = 100u64 * fv.storage.storage_processing_credit_per_byte;
+ assert_eq!(result, expected);
+ }
+
+ #[test]
+ fn ephemeral_cost_storage_replaced_bytes() {
+ let fv = fee_version();
+ let cost = OperationCost {
+ seek_count: 0,
+ storage_cost: StorageCost {
+ added_bytes: 0,
+ replaced_bytes: 50,
+ removed_bytes: StorageRemovedBytes::NoStorageRemoval,
+ },
+ storage_loaded_bytes: 0,
+ hash_node_calls: 0,
+ sinsemilla_hash_calls: 0,
+ };
+ let result = cost.ephemeral_cost(fv).expect("should not overflow");
+ let expected = 50u64 * fv.storage.storage_processing_credit_per_byte;
+ assert_eq!(result, expected);
+ }
+
+ #[test]
+ fn ephemeral_cost_storage_removed_bytes_basic() {
+ let fv = fee_version();
+ let cost = OperationCost {
+ seek_count: 0,
+ storage_cost: StorageCost {
+ added_bytes: 0,
+ replaced_bytes: 0,
+ removed_bytes: StorageRemovedBytes::BasicStorageRemoval(75),
+ },
+ storage_loaded_bytes: 0,
+ hash_node_calls: 0,
+ sinsemilla_hash_calls: 0,
+ };
+ let result = cost.ephemeral_cost(fv).expect("should not overflow");
+ let expected = 75u64 * fv.storage.storage_processing_credit_per_byte;
+ assert_eq!(result, expected);
+ }
+
+ #[test]
+ fn ephemeral_cost_loaded_bytes() {
+ let fv = fee_version();
+ let cost = OperationCost {
+ seek_count: 0,
+ storage_cost: StorageCost::default(),
+ storage_loaded_bytes: 300,
+ hash_node_calls: 0,
+ sinsemilla_hash_calls: 0,
+ };
+ let result = cost.ephemeral_cost(fv).expect("should not overflow");
+ let expected = 300u64 * fv.storage.storage_load_credit_per_byte;
+ assert_eq!(result, expected);
+ }
+
+ #[test]
+ fn ephemeral_cost_hash_node_calls() {
+ let fv = fee_version();
+ let cost = OperationCost {
+ seek_count: 0,
+ storage_cost: StorageCost::default(),
+ storage_loaded_bytes: 0,
+ hash_node_calls: 10,
+ sinsemilla_hash_calls: 0,
+ };
+ let result = cost.ephemeral_cost(fv).expect("should not overflow");
+ let blake3_total = fv.hashing.blake3_base + fv.hashing.blake3_per_block;
+ let expected = blake3_total * 10;
+ assert_eq!(result, expected);
+ }
+
+ #[test]
+ fn ephemeral_cost_sinsemilla_hash_calls() {
+ let fv = fee_version();
+ let cost = OperationCost {
+ seek_count: 0,
+ storage_cost: StorageCost::default(),
+ storage_loaded_bytes: 0,
+ hash_node_calls: 0,
+ sinsemilla_hash_calls: 3,
+ };
+ let result = cost.ephemeral_cost(fv).expect("should not overflow");
+ let expected = fv.hashing.sinsemilla_base * 3;
+ assert_eq!(result, expected);
+ }
+
+ #[test]
+ fn ephemeral_cost_all_components_combined() {
+ let fv = fee_version();
+ let cost = OperationCost {
+ seek_count: 2,
+ storage_cost: StorageCost {
+ added_bytes: 10,
+ replaced_bytes: 20,
+ removed_bytes: StorageRemovedBytes::BasicStorageRemoval(30),
+ },
+ storage_loaded_bytes: 40,
+ hash_node_calls: 5,
+ sinsemilla_hash_calls: 1,
+ };
+ let result = cost.ephemeral_cost(fv).expect("should not overflow");
+
+ let seek_cost = 2u64 * fv.storage.storage_seek_cost;
+ let processing_per_byte = fv.storage.storage_processing_credit_per_byte;
+ let added_cost = 10u64 * processing_per_byte;
+ let replaced_cost = 20u64 * processing_per_byte;
+ let removed_cost = 30u64 * processing_per_byte;
+ let loaded_cost = 40u64 * fv.storage.storage_load_credit_per_byte;
+ let blake3_total = fv.hashing.blake3_base + fv.hashing.blake3_per_block;
+ let hash_cost = blake3_total * 5;
+ let sinsemilla_cost = fv.hashing.sinsemilla_base * 1;
+
+ let expected = seek_cost
+ + added_cost
+ + replaced_cost
+ + loaded_cost
+ + removed_cost
+ + hash_cost
+ + sinsemilla_cost;
+ assert_eq!(result, expected);
+ }
+
+ #[test]
+ fn ephemeral_cost_overflow_seek_cost() {
+ let fv = &FeeVersion {
+ storage: FeeStorageVersion {
+ storage_seek_cost: u64::MAX,
+ ..fee_version().storage.clone()
+ },
+ ..fee_version().clone()
+ };
+ let cost = OperationCost {
+ seek_count: 2, // 2 * u64::MAX overflows
+ storage_cost: StorageCost::default(),
+ storage_loaded_bytes: 0,
+ hash_node_calls: 0,
+ sinsemilla_hash_calls: 0,
+ };
+ let result = cost.ephemeral_cost(fv);
+ assert!(result.is_err(), "expected overflow error for seek cost");
+ }
+
+ #[test]
+ fn ephemeral_cost_overflow_storage_written_bytes() {
+ let fv = &FeeVersion {
+ storage: FeeStorageVersion {
+ storage_processing_credit_per_byte: u64::MAX,
+ ..fee_version().storage.clone()
+ },
+ ..fee_version().clone()
+ };
+ let cost = OperationCost {
+ seek_count: 0,
+ storage_cost: StorageCost {
+ added_bytes: 2, // 2 * u64::MAX overflows
+ replaced_bytes: 0,
+ removed_bytes: StorageRemovedBytes::NoStorageRemoval,
+ },
+ storage_loaded_bytes: 0,
+ hash_node_calls: 0,
+ sinsemilla_hash_calls: 0,
+ };
+ let result = cost.ephemeral_cost(fv);
+ assert!(
+ result.is_err(),
+ "expected overflow error for storage written bytes"
+ );
+ }
+
+ #[test]
+ fn ephemeral_cost_overflow_loaded_bytes() {
+ let fv = &FeeVersion {
+ storage: FeeStorageVersion {
+ storage_load_credit_per_byte: u64::MAX,
+ ..fee_version().storage.clone()
+ },
+ ..fee_version().clone()
+ };
+ let cost = OperationCost {
+ seek_count: 0,
+ storage_cost: StorageCost::default(),
+ storage_loaded_bytes: 2, // 2 * u64::MAX overflows
+ hash_node_calls: 0,
+ sinsemilla_hash_calls: 0,
+ };
+ let result = cost.ephemeral_cost(fv);
+ assert!(
+ result.is_err(),
+ "expected overflow error for loaded bytes cost"
+ );
+ }
+
+ #[test]
+ fn ephemeral_cost_overflow_in_addition_chain() {
+ // Use values that individually do not overflow but whose sum does.
+ let fv = fee_version();
+ let cost = OperationCost {
+ seek_count: u32::MAX,
+ storage_cost: StorageCost {
+ added_bytes: u32::MAX,
+ replaced_bytes: u32::MAX,
+ removed_bytes: StorageRemovedBytes::BasicStorageRemoval(u32::MAX),
+ },
+ storage_loaded_bytes: u64::MAX,
+ hash_node_calls: u32::MAX,
+ sinsemilla_hash_calls: u32::MAX,
+ };
+ let result = cost.ephemeral_cost(fv);
+ assert!(
+ result.is_err(),
+ "expected overflow error when summing large components"
+ );
+ }
+}
diff --git a/packages/rs-scripts/Cargo.toml b/packages/rs-scripts/Cargo.toml
index dc639760994..18bb06e33b9 100644
--- a/packages/rs-scripts/Cargo.toml
+++ b/packages/rs-scripts/Cargo.toml
@@ -15,4 +15,3 @@ base64 = "0.22"
chrono = "0.4"
hex = "0.4"
clap = { version = "4", features = ["derive"] }
-serde_json = "1"
From daae16cb25baf5836b5cf4854900436cb3003927 Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Fri, 3 Apr 2026 21:39:56 +0300
Subject: [PATCH 08/40] =?UTF-8?q?test(platform):=20coverage=20round=203=20?=
=?UTF-8?q?=E2=80=94=20replace,=20index,=20bytes,=20distribution=20encode?=
=?UTF-8?q?=20+=20exclusions=20(#3431)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Co-authored-by: Claude Opus 4.6 (1M context)
---
.codecov.yml | 4 +
.../distribution_function/encode.rs | 1095 +++++++++++++++++
packages/rs-platform-value/src/index.rs | 369 ++++++
packages/rs-platform-value/src/replace.rs | 661 ++++++++++
.../rs-platform-value/src/types/bytes_20.rs | 410 ++++++
.../rs-platform-value/src/types/bytes_32.rs | 423 +++++++
6 files changed, 2962 insertions(+)
diff --git a/.codecov.yml b/.codecov.yml
index 89b413c72f5..5501e9a0e20 100644
--- a/.codecov.yml
+++ b/.codecov.yml
@@ -88,6 +88,10 @@ ignore:
# Value Display and string encoding — trivial formatting, not logic
- "packages/rs-platform-value/src/display.rs"
- "packages/rs-platform-value/src/string_encoding.rs"
+ # Accessor-only files — pure getters/setters with no logic
+ - "packages/rs-dpp/src/data_contract/associated_token/token_configuration/v0/accessors.rs"
+ - "packages/rs-dpp/src/document/v0/accessors.rs"
+ - "packages/rs-drive-abci/src/platform_types/platform_state/accessors.rs"
# Core chain type wrappers — masternode entry structs, deserialization
# boilerplate, thin type aliases
- "packages/rs-dpp/src/core_types/**"
diff --git a/packages/rs-dpp/src/data_contract/associated_token/token_perpetual_distribution/distribution_function/encode.rs b/packages/rs-dpp/src/data_contract/associated_token/token_perpetual_distribution/distribution_function/encode.rs
index a68fc360248..002cd993366 100644
--- a/packages/rs-dpp/src/data_contract/associated_token/token_perpetual_distribution/distribution_function/encode.rs
+++ b/packages/rs-dpp/src/data_contract/associated_token/token_perpetual_distribution/distribution_function/encode.rs
@@ -450,3 +450,1098 @@ impl<'de, C> BorrowDecode<'de, C> for DistributionFunction {
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ const CONFIG: bincode::config::Configuration = bincode::config::standard();
+
+ /// Helper: encode then decode a DistributionFunction and assert round-trip equality.
+ fn round_trip(original: &DistributionFunction) -> DistributionFunction {
+ let bytes = bincode::encode_to_vec(original, CONFIG).expect("encode failed");
+ let (decoded, _): (DistributionFunction, _) =
+ bincode::decode_from_slice(&bytes, CONFIG).expect("decode failed");
+ decoded
+ }
+
+ /// Helper: encode then borrow-decode a DistributionFunction and assert round-trip equality.
+ fn round_trip_borrow(original: &DistributionFunction) -> DistributionFunction {
+ let bytes = bincode::encode_to_vec(original, CONFIG).expect("encode failed");
+ let (decoded, _): (DistributionFunction, _) =
+ bincode::borrow_decode_from_slice(&bytes, CONFIG).expect("borrow_decode failed");
+ decoded
+ }
+
+ // -----------------------------------------------------------------------
+ // Round-trip tests for each variant
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn round_trip_fixed_amount() {
+ let original = DistributionFunction::FixedAmount { amount: 42 };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_random() {
+ let original = DistributionFunction::Random { min: 10, max: 100 };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_step_decreasing_amount() {
+ let original = DistributionFunction::StepDecreasingAmount {
+ step_count: 210_000,
+ decrease_per_interval_numerator: 1,
+ decrease_per_interval_denominator: 2,
+ start_decreasing_offset: Some(100),
+ max_interval_count: Some(64),
+ distribution_start_amount: 5000,
+ trailing_distribution_interval_amount: 1,
+ min_value: Some(10),
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_step_decreasing_amount_none_options() {
+ let original = DistributionFunction::StepDecreasingAmount {
+ step_count: 1000,
+ decrease_per_interval_numerator: 7,
+ decrease_per_interval_denominator: 100,
+ start_decreasing_offset: None,
+ max_interval_count: None,
+ distribution_start_amount: 999,
+ trailing_distribution_interval_amount: 0,
+ min_value: None,
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_stepwise() {
+ let mut steps = BTreeMap::new();
+ steps.insert(0, 100);
+ steps.insert(10, 50);
+ steps.insert(20, 25);
+ let original = DistributionFunction::Stepwise(steps);
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_stepwise_empty() {
+ let original = DistributionFunction::Stepwise(BTreeMap::new());
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_linear() {
+ let original = DistributionFunction::Linear {
+ a: -5,
+ d: 100,
+ start_step: Some(10),
+ starting_amount: 1000,
+ min_value: Some(50),
+ max_value: Some(2000),
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_linear_none_options() {
+ let original = DistributionFunction::Linear {
+ a: 3,
+ d: 1,
+ start_step: None,
+ starting_amount: 500,
+ min_value: None,
+ max_value: None,
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_polynomial() {
+ let original = DistributionFunction::Polynomial {
+ a: -3,
+ d: 10,
+ m: 2,
+ n: 1,
+ o: -1,
+ start_moment: Some(5),
+ b: 100,
+ min_value: Some(0),
+ max_value: Some(10000),
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_polynomial_none_options() {
+ let original = DistributionFunction::Polynomial {
+ a: 1,
+ d: 1,
+ m: -2,
+ n: 3,
+ o: 0,
+ start_moment: None,
+ b: 50,
+ min_value: None,
+ max_value: None,
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_exponential() {
+ let original = DistributionFunction::Exponential {
+ a: 100,
+ d: 20,
+ m: -3,
+ n: 100,
+ o: 5,
+ start_moment: Some(10),
+ b: 10,
+ min_value: Some(1),
+ max_value: Some(500),
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_exponential_none_options() {
+ let original = DistributionFunction::Exponential {
+ a: 50,
+ d: 10,
+ m: 2,
+ n: 50,
+ o: 0,
+ start_moment: None,
+ b: 5,
+ min_value: None,
+ max_value: None,
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_logarithmic() {
+ let original = DistributionFunction::Logarithmic {
+ a: 100,
+ d: 10,
+ m: 2,
+ n: 1,
+ o: 1,
+ start_moment: Some(0),
+ b: 50,
+ min_value: Some(10),
+ max_value: Some(200),
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_logarithmic_none_options() {
+ let original = DistributionFunction::Logarithmic {
+ a: -5,
+ d: 1,
+ m: 1,
+ n: 1,
+ o: 0,
+ start_moment: None,
+ b: 100,
+ min_value: None,
+ max_value: None,
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_inverted_logarithmic() {
+ let original = DistributionFunction::InvertedLogarithmic {
+ a: 10000,
+ d: 1,
+ m: 1,
+ n: 5000,
+ o: 0,
+ start_moment: Some(0),
+ b: 0,
+ min_value: Some(0),
+ max_value: Some(100000),
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ #[test]
+ fn round_trip_inverted_logarithmic_none_options() {
+ let original = DistributionFunction::InvertedLogarithmic {
+ a: -20,
+ d: 5,
+ m: 3,
+ n: 10,
+ o: -2,
+ start_moment: None,
+ b: 200,
+ min_value: None,
+ max_value: None,
+ };
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ // -----------------------------------------------------------------------
+ // Edge cases: zero values
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn round_trip_fixed_amount_zero() {
+ let original = DistributionFunction::FixedAmount { amount: 0 };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_random_zero_range() {
+ let original = DistributionFunction::Random { min: 0, max: 0 };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_linear_all_zeros() {
+ let original = DistributionFunction::Linear {
+ a: 0,
+ d: 0,
+ start_step: Some(0),
+ starting_amount: 0,
+ min_value: Some(0),
+ max_value: Some(0),
+ };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_polynomial_all_zeros() {
+ let original = DistributionFunction::Polynomial {
+ a: 0,
+ d: 0,
+ m: 0,
+ n: 0,
+ o: 0,
+ start_moment: Some(0),
+ b: 0,
+ min_value: Some(0),
+ max_value: Some(0),
+ };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_exponential_all_zeros() {
+ let original = DistributionFunction::Exponential {
+ a: 0,
+ d: 0,
+ m: 0,
+ n: 0,
+ o: 0,
+ start_moment: Some(0),
+ b: 0,
+ min_value: Some(0),
+ max_value: Some(0),
+ };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ // -----------------------------------------------------------------------
+ // Edge cases: max values
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn round_trip_fixed_amount_max() {
+ let original = DistributionFunction::FixedAmount { amount: u64::MAX };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_random_max_values() {
+ let original = DistributionFunction::Random {
+ min: u64::MAX - 1,
+ max: u64::MAX,
+ };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_step_decreasing_max_values() {
+ let original = DistributionFunction::StepDecreasingAmount {
+ step_count: u32::MAX,
+ decrease_per_interval_numerator: u16::MAX,
+ decrease_per_interval_denominator: u16::MAX,
+ start_decreasing_offset: Some(u64::MAX),
+ max_interval_count: Some(u16::MAX),
+ distribution_start_amount: u64::MAX,
+ trailing_distribution_interval_amount: u64::MAX,
+ min_value: Some(u64::MAX),
+ };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_linear_extreme_values() {
+ let original = DistributionFunction::Linear {
+ a: i64::MIN,
+ d: u64::MAX,
+ start_step: Some(u64::MAX),
+ starting_amount: u64::MAX,
+ min_value: Some(u64::MAX),
+ max_value: Some(u64::MAX),
+ };
+ assert_eq!(round_trip(&original), original);
+
+ let original2 = DistributionFunction::Linear {
+ a: i64::MAX,
+ d: 0,
+ start_step: None,
+ starting_amount: 0,
+ min_value: None,
+ max_value: None,
+ };
+ assert_eq!(round_trip(&original2), original2);
+ }
+
+ #[test]
+ fn round_trip_polynomial_extreme_values() {
+ let original = DistributionFunction::Polynomial {
+ a: i64::MIN,
+ d: u64::MAX,
+ m: i64::MIN,
+ n: u64::MAX,
+ o: i64::MAX,
+ start_moment: Some(u64::MAX),
+ b: u64::MAX,
+ min_value: Some(u64::MAX),
+ max_value: Some(u64::MAX),
+ };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_exponential_extreme_values() {
+ let original = DistributionFunction::Exponential {
+ a: u64::MAX,
+ d: u64::MAX,
+ m: i64::MIN,
+ n: u64::MAX,
+ o: i64::MIN,
+ start_moment: Some(u64::MAX),
+ b: u64::MAX,
+ min_value: Some(u64::MAX),
+ max_value: Some(u64::MAX),
+ };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_logarithmic_extreme_values() {
+ let original = DistributionFunction::Logarithmic {
+ a: i64::MIN,
+ d: u64::MAX,
+ m: u64::MAX,
+ n: u64::MAX,
+ o: i64::MIN,
+ start_moment: Some(u64::MAX),
+ b: u64::MAX,
+ min_value: Some(u64::MAX),
+ max_value: Some(u64::MAX),
+ };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_inverted_logarithmic_extreme_values() {
+ let original = DistributionFunction::InvertedLogarithmic {
+ a: i64::MAX,
+ d: u64::MAX,
+ m: u64::MAX,
+ n: u64::MAX,
+ o: i64::MAX,
+ start_moment: Some(u64::MAX),
+ b: u64::MAX,
+ min_value: Some(u64::MAX),
+ max_value: Some(u64::MAX),
+ };
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_stepwise_single_entry() {
+ let mut steps = BTreeMap::new();
+ steps.insert(0, u64::MAX);
+ let original = DistributionFunction::Stepwise(steps);
+ assert_eq!(round_trip(&original), original);
+ }
+
+ #[test]
+ fn round_trip_stepwise_many_entries() {
+ let steps: BTreeMap = (0..100).map(|i| (i * 10, i * 100 + 1)).collect();
+ let original = DistributionFunction::Stepwise(steps);
+ assert_eq!(round_trip(&original), original);
+ assert_eq!(round_trip_borrow(&original), original);
+ }
+
+ // -----------------------------------------------------------------------
+ // Determinism: same input always produces the same bytes
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn encoding_is_deterministic() {
+ let variants: Vec = vec![
+ DistributionFunction::FixedAmount { amount: 42 },
+ DistributionFunction::Random { min: 1, max: 99 },
+ DistributionFunction::StepDecreasingAmount {
+ step_count: 100,
+ decrease_per_interval_numerator: 1,
+ decrease_per_interval_denominator: 2,
+ start_decreasing_offset: Some(5),
+ max_interval_count: Some(10),
+ distribution_start_amount: 500,
+ trailing_distribution_interval_amount: 1,
+ min_value: Some(1),
+ },
+ DistributionFunction::Stepwise({
+ let mut m = BTreeMap::new();
+ m.insert(0, 100);
+ m.insert(50, 50);
+ m
+ }),
+ DistributionFunction::Linear {
+ a: -2,
+ d: 1,
+ start_step: None,
+ starting_amount: 100,
+ min_value: None,
+ max_value: Some(200),
+ },
+ DistributionFunction::Polynomial {
+ a: 3,
+ d: 1,
+ m: 2,
+ n: 1,
+ o: 0,
+ start_moment: None,
+ b: 10,
+ min_value: None,
+ max_value: None,
+ },
+ DistributionFunction::Exponential {
+ a: 100,
+ d: 10,
+ m: -3,
+ n: 100,
+ o: 0,
+ start_moment: None,
+ b: 10,
+ min_value: None,
+ max_value: None,
+ },
+ DistributionFunction::Logarithmic {
+ a: 100,
+ d: 10,
+ m: 2,
+ n: 1,
+ o: 1,
+ start_moment: None,
+ b: 50,
+ min_value: None,
+ max_value: None,
+ },
+ DistributionFunction::InvertedLogarithmic {
+ a: 10000,
+ d: 1,
+ m: 1,
+ n: 5000,
+ o: 0,
+ start_moment: None,
+ b: 0,
+ min_value: None,
+ max_value: None,
+ },
+ ];
+
+ for variant in &variants {
+ let bytes1 = bincode::encode_to_vec(variant, CONFIG).unwrap();
+ let bytes2 = bincode::encode_to_vec(variant, CONFIG).unwrap();
+ assert_eq!(
+ bytes1, bytes2,
+ "encoding was not deterministic for {:?}",
+ variant
+ );
+ }
+ }
+
+ // -----------------------------------------------------------------------
+ // Variant tag correctness: first byte encodes the variant discriminant
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn variant_tags_are_correct() {
+ let cases: Vec<(DistributionFunction, u8)> = vec![
+ (DistributionFunction::FixedAmount { amount: 1 }, 0),
+ (DistributionFunction::Random { min: 0, max: 1 }, 1),
+ (
+ DistributionFunction::StepDecreasingAmount {
+ step_count: 1,
+ decrease_per_interval_numerator: 1,
+ decrease_per_interval_denominator: 2,
+ start_decreasing_offset: None,
+ max_interval_count: None,
+ distribution_start_amount: 1,
+ trailing_distribution_interval_amount: 0,
+ min_value: None,
+ },
+ 2,
+ ),
+ (DistributionFunction::Stepwise(BTreeMap::new()), 3),
+ (
+ DistributionFunction::Linear {
+ a: 0,
+ d: 1,
+ start_step: None,
+ starting_amount: 0,
+ min_value: None,
+ max_value: None,
+ },
+ 4,
+ ),
+ (
+ DistributionFunction::Polynomial {
+ a: 0,
+ d: 1,
+ m: 0,
+ n: 1,
+ o: 0,
+ start_moment: None,
+ b: 0,
+ min_value: None,
+ max_value: None,
+ },
+ 5,
+ ),
+ (
+ DistributionFunction::Exponential {
+ a: 0,
+ d: 1,
+ m: 0,
+ n: 1,
+ o: 0,
+ start_moment: None,
+ b: 0,
+ min_value: None,
+ max_value: None,
+ },
+ 6,
+ ),
+ (
+ DistributionFunction::Logarithmic {
+ a: 0,
+ d: 1,
+ m: 1,
+ n: 1,
+ o: 0,
+ start_moment: None,
+ b: 0,
+ min_value: None,
+ max_value: None,
+ },
+ 7,
+ ),
+ (
+ DistributionFunction::InvertedLogarithmic {
+ a: 0,
+ d: 1,
+ m: 1,
+ n: 1,
+ o: 0,
+ start_moment: None,
+ b: 0,
+ min_value: None,
+ max_value: None,
+ },
+ 8,
+ ),
+ ];
+
+ for (variant, expected_tag) in cases {
+ let bytes = bincode::encode_to_vec(&variant, CONFIG).unwrap();
+ assert_eq!(
+ bytes[0], expected_tag,
+ "wrong tag for {:?}: got {}, expected {}",
+ variant, bytes[0], expected_tag
+ );
+ }
+ }
+
+ // -----------------------------------------------------------------------
+ // Error paths: invalid variant tag
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn decode_invalid_variant_tag_9() {
+ let valid = DistributionFunction::FixedAmount { amount: 1 };
+ let mut bytes = bincode::encode_to_vec(&valid, CONFIG).unwrap();
+ bytes[0] = 9;
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(&bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_invalid_variant_tag_255() {
+ let valid = DistributionFunction::FixedAmount { amount: 1 };
+ let mut bytes = bincode::encode_to_vec(&valid, CONFIG).unwrap();
+ bytes[0] = 255;
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(&bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn borrow_decode_invalid_variant_tag() {
+ let valid = DistributionFunction::FixedAmount { amount: 1 };
+ let mut bytes = bincode::encode_to_vec(&valid, CONFIG).unwrap();
+ bytes[0] = 42;
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::borrow_decode_from_slice(&bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ // -----------------------------------------------------------------------
+ // Error paths: truncated input
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn decode_empty_input() {
+ let bytes: &[u8] = &[];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_tag_only_fixed_amount() {
+ let bytes: &[u8] = &[0];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_tag_only_random() {
+ let bytes: &[u8] = &[1];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_tag_only_step_decreasing() {
+ let bytes: &[u8] = &[2];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_tag_only_stepwise() {
+ let bytes: &[u8] = &[3];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_tag_only_linear() {
+ let bytes: &[u8] = &[4];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_tag_only_polynomial() {
+ let bytes: &[u8] = &[5];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_tag_only_exponential() {
+ let bytes: &[u8] = &[6];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_tag_only_logarithmic() {
+ let bytes: &[u8] = &[7];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_tag_only_inverted_logarithmic() {
+ let bytes: &[u8] = &[8];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_truncated_random_missing_max() {
+ let original = DistributionFunction::Random { min: 10, max: 100 };
+ let bytes = bincode::encode_to_vec(&original, CONFIG).unwrap();
+ let truncated = &bytes[..bytes.len() / 2];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(truncated, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_truncated_linear_partial_payload() {
+ let original = DistributionFunction::Linear {
+ a: 5,
+ d: 10,
+ start_step: Some(100),
+ starting_amount: 500,
+ min_value: Some(1),
+ max_value: Some(1000),
+ };
+ let bytes = bincode::encode_to_vec(&original, CONFIG).unwrap();
+ let truncated = &bytes[..5];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(truncated, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_truncated_polynomial_partial_payload() {
+ let original = DistributionFunction::Polynomial {
+ a: 3,
+ d: 1,
+ m: 2,
+ n: 1,
+ o: -1,
+ start_moment: Some(5),
+ b: 100,
+ min_value: Some(0),
+ max_value: Some(10000),
+ };
+ let bytes = bincode::encode_to_vec(&original, CONFIG).unwrap();
+ let truncated = &bytes[..bytes.len() - 3];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(truncated, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_truncated_exponential_partial_payload() {
+ let original = DistributionFunction::Exponential {
+ a: 100,
+ d: 20,
+ m: -3,
+ n: 100,
+ o: 5,
+ start_moment: Some(10),
+ b: 10,
+ min_value: Some(1),
+ max_value: Some(500),
+ };
+ let bytes = bincode::encode_to_vec(&original, CONFIG).unwrap();
+ let truncated = &bytes[..bytes.len() - 5];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(truncated, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn decode_truncated_step_decreasing_partial_payload() {
+ let original = DistributionFunction::StepDecreasingAmount {
+ step_count: 210_000,
+ decrease_per_interval_numerator: 1,
+ decrease_per_interval_denominator: 2,
+ start_decreasing_offset: Some(100),
+ max_interval_count: Some(64),
+ distribution_start_amount: 5000,
+ trailing_distribution_interval_amount: 1,
+ min_value: Some(10),
+ };
+ let bytes = bincode::encode_to_vec(&original, CONFIG).unwrap();
+ let truncated = &bytes[..bytes.len() / 2];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::decode_from_slice(truncated, CONFIG);
+ assert!(result.is_err());
+ }
+
+ // -----------------------------------------------------------------------
+ // Error paths: borrow_decode with truncated input
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn borrow_decode_empty_input() {
+ let bytes: &[u8] = &[];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::borrow_decode_from_slice(bytes, CONFIG);
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn borrow_decode_tag_only() {
+ for tag in 0u8..=8 {
+ let bytes: &[u8] = &[tag];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::borrow_decode_from_slice(bytes, CONFIG);
+ assert!(
+ result.is_err(),
+ "borrow_decode should fail for tag-only input with tag {}",
+ tag
+ );
+ }
+ }
+
+ #[test]
+ fn borrow_decode_invalid_tag() {
+ for tag in [9u8, 10, 50, 128, 255] {
+ let bytes: &[u8] = &[tag];
+ let result: Result<(DistributionFunction, _), _> =
+ bincode::borrow_decode_from_slice(bytes, CONFIG);
+ assert!(
+ result.is_err(),
+ "borrow_decode should fail for invalid tag {}",
+ tag
+ );
+ }
+ }
+
+ // -----------------------------------------------------------------------
+ // Decode and BorrowDecode produce the same results
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn decode_and_borrow_decode_match_for_all_variants() {
+ let variants: Vec = vec![
+ DistributionFunction::FixedAmount { amount: 777 },
+ DistributionFunction::Random { min: 10, max: 1000 },
+ DistributionFunction::StepDecreasingAmount {
+ step_count: 500,
+ decrease_per_interval_numerator: 3,
+ decrease_per_interval_denominator: 100,
+ start_decreasing_offset: Some(50),
+ max_interval_count: Some(200),
+ distribution_start_amount: 10000,
+ trailing_distribution_interval_amount: 5,
+ min_value: Some(1),
+ },
+ DistributionFunction::Stepwise({
+ let mut m = BTreeMap::new();
+ m.insert(0, 500);
+ m.insert(100, 250);
+ m.insert(200, 125);
+ m
+ }),
+ DistributionFunction::Linear {
+ a: -10,
+ d: 3,
+ start_step: Some(20),
+ starting_amount: 1000,
+ min_value: Some(100),
+ max_value: None,
+ },
+ DistributionFunction::Polynomial {
+ a: 5,
+ d: 2,
+ m: -1,
+ n: 3,
+ o: 7,
+ start_moment: Some(10),
+ b: 200,
+ min_value: None,
+ max_value: Some(5000),
+ },
+ DistributionFunction::Exponential {
+ a: 250,
+ d: 50,
+ m: 1,
+ n: 10,
+ o: -3,
+ start_moment: Some(5),
+ b: 100,
+ min_value: Some(50),
+ max_value: Some(10000),
+ },
+ DistributionFunction::Logarithmic {
+ a: 500,
+ d: 20,
+ m: 3,
+ n: 2,
+ o: -1,
+ start_moment: Some(0),
+ b: 75,
+ min_value: Some(10),
+ max_value: Some(1000),
+ },
+ DistributionFunction::InvertedLogarithmic {
+ a: -100,
+ d: 10,
+ m: 5,
+ n: 100,
+ o: 2,
+ start_moment: Some(3),
+ b: 300,
+ min_value: Some(0),
+ max_value: Some(500),
+ },
+ ];
+
+ for variant in &variants {
+ let bytes = bincode::encode_to_vec(variant, CONFIG).unwrap();
+ let (decoded, consumed1): (DistributionFunction, _) =
+ bincode::decode_from_slice(&bytes, CONFIG).unwrap();
+ let (borrow_decoded, consumed2): (DistributionFunction, _) =
+ bincode::borrow_decode_from_slice(&bytes, CONFIG).unwrap();
+ assert_eq!(
+ decoded, borrow_decoded,
+ "decode and borrow_decode differ for {:?}",
+ variant
+ );
+ assert_eq!(
+ consumed1, consumed2,
+ "consumed bytes differ for {:?}",
+ variant
+ );
+ }
+ }
+
+ // -----------------------------------------------------------------------
+ // Negative i64 values round-trip correctly
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn round_trip_negative_signed_fields() {
+ let original = DistributionFunction::Polynomial {
+ a: i64::MIN,
+ d: 1,
+ m: -8,
+ n: 1,
+ o: i64::MIN,
+ start_moment: None,
+ b: 0,
+ min_value: None,
+ max_value: None,
+ };
+ assert_eq!(round_trip(&original), original);
+
+ let original2 = DistributionFunction::Exponential {
+ a: 1,
+ d: 1,
+ m: i64::MIN,
+ n: 1,
+ o: i64::MIN,
+ start_moment: None,
+ b: 0,
+ min_value: None,
+ max_value: None,
+ };
+ assert_eq!(round_trip(&original2), original2);
+
+ let original3 = DistributionFunction::InvertedLogarithmic {
+ a: i64::MIN,
+ d: 1,
+ m: 1,
+ n: 1,
+ o: i64::MIN,
+ start_moment: None,
+ b: 0,
+ min_value: None,
+ max_value: None,
+ };
+ assert_eq!(round_trip(&original3), original3);
+ }
+
+ // -----------------------------------------------------------------------
+ // Corrupted payload bytes
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn decode_corrupted_option_byte_does_not_panic() {
+ let original = DistributionFunction::Linear {
+ a: 1,
+ d: 1,
+ start_step: None,
+ starting_amount: 10,
+ min_value: None,
+ max_value: None,
+ };
+ let mut bytes = bincode::encode_to_vec(&original, CONFIG).unwrap();
+ // Corrupt the last byte (an option discriminant for max_value)
+ let last = bytes.len() - 1;
+ bytes[last] = 5;
+ // Should not panic regardless of outcome
+ let _ = bincode::decode_from_slice::(&bytes, CONFIG);
+ }
+
+ // -----------------------------------------------------------------------
+ // Encode length varies correctly between variants
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn fixed_amount_is_shortest_encoding() {
+ let fixed = DistributionFunction::FixedAmount { amount: 1 };
+ let random = DistributionFunction::Random { min: 1, max: 1 };
+ let fixed_bytes = bincode::encode_to_vec(&fixed, CONFIG).unwrap();
+ let random_bytes = bincode::encode_to_vec(&random, CONFIG).unwrap();
+ assert!(
+ fixed_bytes.len() <= random_bytes.len(),
+ "FixedAmount should be shorter than or equal to Random"
+ );
+ }
+
+ // -----------------------------------------------------------------------
+ // Full round-trip: encode -> decode -> re-encode produces identical bytes
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn double_round_trip_produces_identical_bytes() {
+ let original = DistributionFunction::StepDecreasingAmount {
+ step_count: 210_000,
+ decrease_per_interval_numerator: 1,
+ decrease_per_interval_denominator: 2,
+ start_decreasing_offset: Some(100),
+ max_interval_count: Some(64),
+ distribution_start_amount: 5000,
+ trailing_distribution_interval_amount: 1,
+ min_value: Some(10),
+ };
+ let bytes1 = bincode::encode_to_vec(&original, CONFIG).unwrap();
+ let (decoded, _): (DistributionFunction, _) =
+ bincode::decode_from_slice(&bytes1, CONFIG).unwrap();
+ let bytes2 = bincode::encode_to_vec(&decoded, CONFIG).unwrap();
+ assert_eq!(bytes1, bytes2);
+ }
+}
diff --git a/packages/rs-platform-value/src/index.rs b/packages/rs-platform-value/src/index.rs
index fd0f7a505dc..a39ccdd8e92 100644
--- a/packages/rs-platform-value/src/index.rs
+++ b/packages/rs-platform-value/src/index.rs
@@ -272,3 +272,372 @@ where
index.index_or_insert(self)
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::platform_value;
+
+ // ===============================================================
+ // Index for Value — access array element
+ // ===============================================================
+
+ #[test]
+ fn index_usize_access_array_element() {
+ let value = platform_value!([10, 20, 30]);
+ assert_eq!(value[0], platform_value!(10));
+ assert_eq!(value[1], platform_value!(20));
+ assert_eq!(value[2], platform_value!(30));
+ }
+
+ // ===============================================================
+ // Index for Value — out-of-bounds returns Null
+ // ===============================================================
+
+ #[test]
+ fn index_usize_out_of_bounds_returns_null() {
+ let value = platform_value!([10, 20]);
+ // The ops::Index impl returns &NULL for missing indices
+ assert_eq!(value[99], Value::Null);
+ }
+
+ // ===============================================================
+ // Index for Value — non-array returns Null
+ // ===============================================================
+
+ #[test]
+ fn index_usize_on_non_array_returns_null() {
+ let value = platform_value!(42);
+ // ops::Index returns &NULL when index_into returns None
+ assert_eq!(value[0], Value::Null);
+ }
+
+ #[test]
+ fn index_usize_on_map_returns_null() {
+ let value = platform_value!({ "key": "val" });
+ assert_eq!(value[0], Value::Null);
+ }
+
+ // ===============================================================
+ // IndexMut — panic on out-of-bounds
+ // ===============================================================
+
+ #[test]
+ #[should_panic(expected = "cannot access index 5 of JSON array of length 2")]
+ fn index_mut_usize_out_of_bounds_panics() {
+ let mut value = platform_value!([10, 20]);
+ value[5] = platform_value!(99);
+ }
+
+ // ===============================================================
+ // IndexMut — panic on non-array
+ // ===============================================================
+
+ #[test]
+ #[should_panic(expected = "cannot access index 0 of JSON")]
+ fn index_mut_usize_on_non_array_panics() {
+ let mut value = platform_value!(42);
+ value[0] = platform_value!(99);
+ }
+
+ // ===============================================================
+ // IndexMut — successfully write
+ // ===============================================================
+
+ #[test]
+ fn index_mut_usize_write() {
+ let mut value = platform_value!([10, 20, 30]);
+ value[1] = platform_value!(99);
+ assert_eq!(value[1], platform_value!(99));
+ }
+
+ // ===============================================================
+ // Index<&str> for Value — access map key
+ // ===============================================================
+
+ #[test]
+ fn index_str_access_map_key() {
+ let value = platform_value!({ "name": "Alice", "age": 30 });
+ assert_eq!(value["name"], platform_value!("Alice"));
+ assert_eq!(value["age"], platform_value!(30));
+ }
+
+ // ===============================================================
+ // Index<&str> for Value — missing key returns Null
+ // ===============================================================
+
+ #[test]
+ fn index_str_missing_key_returns_null() {
+ let value = platform_value!({ "name": "Alice" });
+ assert_eq!(value["missing"], Value::Null);
+ }
+
+ // ===============================================================
+ // Index<&str> for Value — non-map returns Null
+ // ===============================================================
+
+ #[test]
+ fn index_str_on_non_map_returns_null() {
+ let value = platform_value!(42);
+ assert_eq!(value["key"], Value::Null);
+ }
+
+ #[test]
+ fn index_str_on_array_returns_null() {
+ let value = platform_value!([1, 2, 3]);
+ assert_eq!(value["key"], Value::Null);
+ }
+
+ // ===============================================================
+ // Index<&str> for Value — nested access
+ // ===============================================================
+
+ #[test]
+ fn index_str_nested_access() {
+ let value = platform_value!({
+ "outer": {
+ "inner": {
+ "deep": 42
+ }
+ }
+ });
+ assert_eq!(value["outer"]["inner"]["deep"], platform_value!(42));
+ }
+
+ #[test]
+ fn index_str_nested_missing_returns_null_chain() {
+ let value = platform_value!({ "a": { "b": 1 } });
+ // "a" -> "c" -> doesn't exist, returns Null
+ // then Null["anything"] also returns Null
+ assert_eq!(value["a"]["c"], Value::Null);
+ assert_eq!(value["a"]["c"]["d"], Value::Null);
+ }
+
+ // ===============================================================
+ // IndexMut<&str> — write to existing key
+ // ===============================================================
+
+ #[test]
+ fn index_mut_str_write_existing() {
+ let mut value = platform_value!({ "x": 0 });
+ value["x"] = platform_value!(42);
+ assert_eq!(value["x"], platform_value!(42));
+ }
+
+ // ===============================================================
+ // IndexMut<&str> — insert new key
+ // ===============================================================
+
+ #[test]
+ fn index_mut_str_insert_new_key() {
+ let mut value = platform_value!({ "x": 0 });
+ value["y"] = platform_value!("hello");
+ assert_eq!(value["y"], platform_value!("hello"));
+ }
+
+ // ===============================================================
+ // IndexMut<&str> — Null becomes empty map
+ // ===============================================================
+
+ #[test]
+ fn index_mut_str_null_becomes_map() {
+ let mut value = Value::Null;
+ value["key"] = platform_value!(1);
+ assert_eq!(value["key"], platform_value!(1));
+ assert!(value.is_map());
+ }
+
+ // ===============================================================
+ // IndexMut<&str> — deeply nested insert via Null
+ // ===============================================================
+
+ #[test]
+ fn index_mut_str_deeply_nested_insert() {
+ let mut value = platform_value!({ "x": 0 });
+ // "a" -> inserts Null, then Null becomes map for "b", etc.
+ value["a"]["b"]["c"] = platform_value!(true);
+ assert_eq!(value["a"]["b"]["c"], platform_value!(true));
+ }
+
+ // ===============================================================
+ // IndexMut<&str> — panic on non-map non-null
+ // ===============================================================
+
+ #[test]
+ #[should_panic(expected = "cannot access key")]
+ fn index_mut_str_on_non_map_panics() {
+ let mut value = platform_value!(42);
+ value["key"] = platform_value!(1);
+ }
+
+ // ===============================================================
+ // Index delegates to str
+ // ===============================================================
+
+ #[test]
+ fn index_string_delegates_to_str() {
+ let value = platform_value!({ "name": "Bob" });
+ let key = String::from("name");
+ assert_eq!(value[&key], platform_value!("Bob"));
+ }
+
+ // ===============================================================
+ // IndexMut delegates to str
+ // ===============================================================
+
+ #[test]
+ fn index_mut_string_delegates_to_str() {
+ let mut value = platform_value!({ "name": "Bob" });
+ let key = String::from("name");
+ value[&key] = platform_value!("Alice");
+ assert_eq!(value["name"], platform_value!("Alice"));
+ }
+
+ // ===============================================================
+ // index_into — returns None for various non-matching types
+ // ===============================================================
+
+ #[test]
+ fn index_into_usize_returns_none_for_non_array() {
+ let value = Value::Text("hello".into());
+ assert!(0usize.index_into(&value).is_none());
+ }
+
+ #[test]
+ fn index_into_str_returns_none_for_non_map() {
+ let value = Value::Array(vec![Value::U32(1)]);
+ assert!("key".index_into(&value).is_none());
+ }
+
+ // ===============================================================
+ // index_into_mut — returns None for non-matching types
+ // ===============================================================
+
+ #[test]
+ fn index_into_mut_usize_returns_none_for_non_array() {
+ let mut value = Value::Bool(true);
+ assert!(0usize.index_into_mut(&mut value).is_none());
+ }
+
+ #[test]
+ fn index_into_mut_str_returns_none_for_non_map() {
+ let mut value = Value::U64(100);
+ assert!("key".index_into_mut(&mut value).is_none());
+ }
+
+ // ===============================================================
+ // index_into_mut — returns Some for valid accesses
+ // ===============================================================
+
+ #[test]
+ fn index_into_mut_usize_returns_some() {
+ let mut value = platform_value!([10, 20]);
+ let got = 0usize.index_into_mut(&mut value);
+ assert!(got.is_some());
+ *got.unwrap() = platform_value!(99);
+ assert_eq!(value[0], platform_value!(99));
+ }
+
+ #[test]
+ fn index_into_mut_str_returns_some() {
+ let mut value = platform_value!({ "k": 1 });
+ let got = "k".index_into_mut(&mut value);
+ assert!(got.is_some());
+ *got.unwrap() = platform_value!(42);
+ assert_eq!(value["k"], platform_value!(42));
+ }
+
+ // ===============================================================
+ // Combined array + map indexing
+ // ===============================================================
+
+ #[test]
+ fn combined_array_map_indexing() {
+ let value = platform_value!({
+ "items": [
+ { "name": "first" },
+ { "name": "second" }
+ ]
+ });
+ assert_eq!(value["items"][0]["name"], platform_value!("first"));
+ assert_eq!(value["items"][1]["name"], platform_value!("second"));
+ }
+
+ #[test]
+ fn combined_array_map_indexing_mut() {
+ let mut value = platform_value!({
+ "items": [
+ { "name": "first" },
+ { "name": "second" }
+ ]
+ });
+ value["items"][0]["name"] = platform_value!("updated");
+ assert_eq!(value["items"][0]["name"], platform_value!("updated"));
+ }
+
+ // ===============================================================
+ // get() method — returns Some for existing, None for missing
+ // ===============================================================
+
+ #[test]
+ fn get_method_returns_some_for_existing_key() {
+ let value = platform_value!({ "x": 10 });
+ let result = value.get("x").unwrap();
+ assert!(result.is_some());
+ assert_eq!(result.unwrap(), &platform_value!(10));
+ }
+
+ #[test]
+ fn get_method_returns_none_for_missing_key() {
+ let value = platform_value!({ "x": 10 });
+ let result = value.get("y").unwrap();
+ assert!(result.is_none());
+ }
+
+ #[test]
+ fn get_method_errors_on_non_map() {
+ let value = platform_value!(42);
+ let result = value.get("key");
+ assert!(result.is_err());
+ }
+
+ // ===============================================================
+ // Type display coverage (used in panic messages)
+ // ===============================================================
+
+ #[test]
+ fn type_display_covers_all_variants() {
+ use core::fmt::Write;
+ let variants: Vec = vec![
+ Value::Null,
+ Value::Bool(true),
+ Value::Float(1.0),
+ Value::Text("s".into()),
+ Value::Array(vec![]),
+ Value::Map(vec![]),
+ Value::U128(1),
+ Value::I128(1),
+ Value::U64(1),
+ Value::I64(1),
+ Value::U32(1),
+ Value::I32(1),
+ Value::U16(1),
+ Value::I16(1),
+ Value::U8(1),
+ Value::I8(1),
+ Value::Bytes(vec![]),
+ Value::Bytes20([0u8; 20]),
+ Value::Bytes32([0u8; 32]),
+ Value::Bytes36([0u8; 36]),
+ Value::Identifier([0u8; 32]),
+ Value::EnumU8(vec![]),
+ Value::EnumString(vec![]),
+ ];
+ for v in &variants {
+ let t = Type(v);
+ let mut buf = String::new();
+ write!(buf, "{}", t).unwrap();
+ assert!(!buf.is_empty());
+ }
+ }
+}
diff --git a/packages/rs-platform-value/src/replace.rs b/packages/rs-platform-value/src/replace.rs
index 5dcfcfcb6d8..c3088f1d470 100644
--- a/packages/rs-platform-value/src/replace.rs
+++ b/packages/rs-platform-value/src/replace.rs
@@ -497,3 +497,664 @@ impl Value {
))
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{IntegerReplacementType, ReplacementType, Value};
+
+ // ---------------------------------------------------------------
+ // Helper: builds a 32-byte base58-encoded string from a seed byte
+ // ---------------------------------------------------------------
+ fn base58_of_32_bytes(seed: u8) -> String {
+ bs58::encode([seed; 32]).into_string()
+ }
+
+ fn make_32_u8_array(seed: u8) -> Vec {
+ vec![Value::U8(seed); 32]
+ }
+
+ // ===============================================================
+ // replace_at_path — single segment, ReplacementType::Identifier
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_single_segment_identifier_from_text() {
+ let b58 = base58_of_32_bytes(1);
+ let mut value = Value::Map(vec![(Value::Text("id".into()), Value::Text(b58))]);
+ value
+ .replace_at_path("id", ReplacementType::Identifier)
+ .unwrap();
+ assert_eq!(
+ value.get_value_at_path("id").unwrap(),
+ &Value::Identifier([1u8; 32])
+ );
+ }
+
+ #[test]
+ fn replace_at_path_single_segment_identifier_from_u8_array() {
+ let mut value = Value::Map(vec![(
+ Value::Text("id".into()),
+ Value::Array(make_32_u8_array(7)),
+ )]);
+ value
+ .replace_at_path("id", ReplacementType::Identifier)
+ .unwrap();
+ assert_eq!(
+ value.get_value_at_path("id").unwrap(),
+ &Value::Identifier([7u8; 32])
+ );
+ }
+
+ // ===============================================================
+ // replace_at_path — single segment, ReplacementType::BinaryBytes
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_single_segment_binary_bytes_from_base64() {
+ use base64::prelude::*;
+ let raw = vec![10u8, 20, 30];
+ let b64 = BASE64_STANDARD.encode(&raw);
+ let mut value = Value::Map(vec![(Value::Text("data".into()), Value::Text(b64))]);
+ value
+ .replace_at_path("data", ReplacementType::BinaryBytes)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("data").unwrap(), &Value::Bytes(raw));
+ }
+
+ // ===============================================================
+ // replace_at_path — single segment, ReplacementType::TextBase58
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_single_segment_text_base58() {
+ let mut value = Value::Map(vec![(
+ Value::Text("id".into()),
+ Value::Bytes(vec![1, 2, 3]),
+ )]);
+ value
+ .replace_at_path("id", ReplacementType::TextBase58)
+ .unwrap();
+ let expected_b58 = bs58::encode(vec![1u8, 2, 3]).into_string();
+ assert_eq!(
+ value.get_value_at_path("id").unwrap(),
+ &Value::Text(expected_b58)
+ );
+ }
+
+ // ===============================================================
+ // replace_at_path — single segment, ReplacementType::TextBase64
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_single_segment_text_base64() {
+ use base64::prelude::*;
+ let raw = vec![1u8, 2, 3];
+ let mut value = Value::Map(vec![(Value::Text("bin".into()), Value::Bytes(raw.clone()))]);
+ value
+ .replace_at_path("bin", ReplacementType::TextBase64)
+ .unwrap();
+ let expected_b64 = BASE64_STANDARD.encode(&raw);
+ assert_eq!(
+ value.get_value_at_path("bin").unwrap(),
+ &Value::Text(expected_b64)
+ );
+ }
+
+ // ===============================================================
+ // replace_at_path — multi-segment nested path
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_multi_segment_nested() {
+ let b58 = base58_of_32_bytes(5);
+ let inner = Value::Map(vec![(Value::Text("owner_id".into()), Value::Text(b58))]);
+ let mut value = Value::Map(vec![(Value::Text("doc".into()), inner)]);
+
+ value
+ .replace_at_path("doc.owner_id", ReplacementType::Identifier)
+ .unwrap();
+ assert_eq!(
+ value.get_value_at_path("doc.owner_id").unwrap(),
+ &Value::Identifier([5u8; 32])
+ );
+ }
+
+ // ===============================================================
+ // replace_at_path — array path with [] (all members)
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_array_all_members() {
+ let b58 = base58_of_32_bytes(3);
+ let elem1 = Value::Map(vec![(Value::Text("id".into()), Value::Text(b58.clone()))]);
+ let elem2 = Value::Map(vec![(Value::Text("id".into()), Value::Text(b58))]);
+ let arr = Value::Array(vec![elem1, elem2]);
+ let mut value = Value::Map(vec![(Value::Text("items".into()), arr)]);
+
+ value
+ .replace_at_path("items[].id", ReplacementType::Identifier)
+ .unwrap();
+ assert_eq!(
+ value.get_value_at_path("items[0].id").unwrap(),
+ &Value::Identifier([3u8; 32])
+ );
+ assert_eq!(
+ value.get_value_at_path("items[1].id").unwrap(),
+ &Value::Identifier([3u8; 32])
+ );
+ }
+
+ // ===============================================================
+ // replace_at_path — optional key missing returns Ok
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_missing_key_returns_ok() {
+ let mut value = Value::Map(vec![(Value::Text("a".into()), Value::U32(42))]);
+ // "b" does not exist — filter_map filters it out
+ let result = value.replace_at_path("b", ReplacementType::Identifier);
+ assert!(result.is_ok());
+ }
+
+ // ===============================================================
+ // replace_at_path — non-map at root gives error
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_on_non_map_errors() {
+ let mut value = Value::U32(42);
+ let result = value.replace_at_path("key", ReplacementType::Identifier);
+ assert!(result.is_err());
+ }
+
+ // ===============================================================
+ // replace_at_path — non-32-byte data for Identifier errors
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_identifier_wrong_length_errors() {
+ // base58-encode only 10 bytes, not 32
+ let b58 = bs58::encode([1u8; 10]).into_string();
+ let mut value = Value::Map(vec![(Value::Text("id".into()), Value::Text(b58))]);
+ let result = value.replace_at_path("id", ReplacementType::Identifier);
+ assert!(result.is_err());
+ }
+
+ // ===============================================================
+ // replace_at_paths — multiple paths
+ // ===============================================================
+
+ #[test]
+ fn replace_at_paths_replaces_multiple() {
+ let b58 = base58_of_32_bytes(9);
+ let inner = Value::Map(vec![
+ (Value::Text("a".into()), Value::Text(b58.clone())),
+ (Value::Text("b".into()), Value::Text(b58)),
+ ]);
+ let mut value = Value::Map(vec![(Value::Text("root".into()), inner)]);
+ value
+ .replace_at_paths(vec!["root.a", "root.b"], ReplacementType::Identifier)
+ .unwrap();
+ assert_eq!(
+ value.get_value_at_path("root.a").unwrap(),
+ &Value::Identifier([9u8; 32])
+ );
+ assert_eq!(
+ value.get_value_at_path("root.b").unwrap(),
+ &Value::Identifier([9u8; 32])
+ );
+ }
+
+ // ===============================================================
+ // replace_integer_type_at_path — single segment, U32
+ // ===============================================================
+
+ #[test]
+ fn replace_integer_type_single_segment_u32() {
+ let mut value = Value::Map(vec![(Value::Text("count".into()), Value::U8(5))]);
+ value
+ .replace_integer_type_at_path("count", IntegerReplacementType::U32)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("count").unwrap(), &Value::U32(5));
+ }
+
+ // ===============================================================
+ // replace_integer_type_at_path — single segment, various types
+ // ===============================================================
+
+ #[test]
+ fn replace_integer_type_single_segment_u16() {
+ let mut value = Value::Map(vec![(Value::Text("v".into()), Value::U128(100))]);
+ value
+ .replace_integer_type_at_path("v", IntegerReplacementType::U16)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("v").unwrap(), &Value::U16(100));
+ }
+
+ #[test]
+ fn replace_integer_type_single_segment_u64() {
+ let mut value = Value::Map(vec![(Value::Text("v".into()), Value::U8(42))]);
+ value
+ .replace_integer_type_at_path("v", IntegerReplacementType::U64)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("v").unwrap(), &Value::U64(42));
+ }
+
+ #[test]
+ fn replace_integer_type_single_segment_i32() {
+ let mut value = Value::Map(vec![(Value::Text("v".into()), Value::I8(-3))]);
+ value
+ .replace_integer_type_at_path("v", IntegerReplacementType::I32)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("v").unwrap(), &Value::I32(-3));
+ }
+
+ #[test]
+ fn replace_integer_type_single_segment_u128() {
+ let mut value = Value::Map(vec![(Value::Text("v".into()), Value::U64(999))]);
+ value
+ .replace_integer_type_at_path("v", IntegerReplacementType::U128)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("v").unwrap(), &Value::U128(999));
+ }
+
+ #[test]
+ fn replace_integer_type_single_segment_i128() {
+ let mut value = Value::Map(vec![(Value::Text("v".into()), Value::I64(-500))]);
+ value
+ .replace_integer_type_at_path("v", IntegerReplacementType::I128)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("v").unwrap(), &Value::I128(-500));
+ }
+
+ #[test]
+ fn replace_integer_type_single_segment_u8() {
+ let mut value = Value::Map(vec![(Value::Text("v".into()), Value::U16(7))]);
+ value
+ .replace_integer_type_at_path("v", IntegerReplacementType::U8)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("v").unwrap(), &Value::U8(7));
+ }
+
+ #[test]
+ fn replace_integer_type_single_segment_i8() {
+ let mut value = Value::Map(vec![(Value::Text("v".into()), Value::I16(-2))]);
+ value
+ .replace_integer_type_at_path("v", IntegerReplacementType::I8)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("v").unwrap(), &Value::I8(-2));
+ }
+
+ #[test]
+ fn replace_integer_type_single_segment_i16() {
+ let mut value = Value::Map(vec![(Value::Text("v".into()), Value::I32(-100))]);
+ value
+ .replace_integer_type_at_path("v", IntegerReplacementType::I16)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("v").unwrap(), &Value::I16(-100));
+ }
+
+ #[test]
+ fn replace_integer_type_single_segment_i64() {
+ let mut value = Value::Map(vec![(Value::Text("v".into()), Value::I128(-9999))]);
+ value
+ .replace_integer_type_at_path("v", IntegerReplacementType::I64)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("v").unwrap(), &Value::I64(-9999));
+ }
+
+ // ===============================================================
+ // replace_integer_type_at_path — multi-segment
+ // ===============================================================
+
+ #[test]
+ fn replace_integer_type_multi_segment() {
+ let inner = Value::Map(vec![(Value::Text("level".into()), Value::U8(10))]);
+ let mut value = Value::Map(vec![(Value::Text("nested".into()), inner)]);
+ value
+ .replace_integer_type_at_path("nested.level", IntegerReplacementType::U32)
+ .unwrap();
+ assert_eq!(
+ value.get_value_at_path("nested.level").unwrap(),
+ &Value::U32(10)
+ );
+ }
+
+ // ===============================================================
+ // replace_integer_type_at_path — array path
+ // ===============================================================
+
+ #[test]
+ fn replace_integer_type_array_all_members() {
+ let elem1 = Value::Map(vec![(Value::Text("n".into()), Value::U128(8))]);
+ let elem2 = Value::Map(vec![(Value::Text("n".into()), Value::U32(2))]);
+ let arr = Value::Array(vec![elem1, elem2]);
+ let mut value = Value::Map(vec![(Value::Text("items".into()), arr)]);
+ value
+ .replace_integer_type_at_path("items[].n", IntegerReplacementType::U16)
+ .unwrap();
+ assert_eq!(
+ value.get_value_at_path("items[0].n").unwrap(),
+ &Value::U16(8)
+ );
+ assert_eq!(
+ value.get_value_at_path("items[1].n").unwrap(),
+ &Value::U16(2)
+ );
+ }
+
+ // ===============================================================
+ // replace_integer_type_at_path — missing key returns Ok
+ // ===============================================================
+
+ #[test]
+ fn replace_integer_type_missing_key_returns_ok() {
+ let mut value = Value::Map(vec![(Value::Text("a".into()), Value::U32(1))]);
+ let result = value.replace_integer_type_at_path("missing", IntegerReplacementType::U32);
+ assert!(result.is_ok());
+ }
+
+ // ===============================================================
+ // replace_integer_type_at_path — non-map errors
+ // ===============================================================
+
+ #[test]
+ fn replace_integer_type_on_non_map_errors() {
+ let mut value = Value::U32(42);
+ let result = value.replace_integer_type_at_path("key", IntegerReplacementType::U32);
+ assert!(result.is_err());
+ }
+
+ // ===============================================================
+ // replace_integer_type_at_paths — multiple paths
+ // ===============================================================
+
+ #[test]
+ fn replace_integer_type_at_paths_replaces_multiple() {
+ let inner = Value::Map(vec![
+ (Value::Text("x".into()), Value::U16(5)),
+ (Value::Text("y".into()), Value::I32(6)),
+ ]);
+ let mut value = Value::Map(vec![(Value::Text("root".into()), inner)]);
+ value
+ .replace_integer_type_at_paths(vec!["root.x", "root.y"], IntegerReplacementType::U32)
+ .unwrap();
+ assert_eq!(value.get_value_at_path("root.x").unwrap(), &Value::U32(5));
+ assert_eq!(value.get_value_at_path("root.y").unwrap(), &Value::U32(6));
+ }
+
+ // ===============================================================
+ // replace_to_binary_types_of_root_value_when_setting_at_path
+ // — identifier match (exact path in identifier_paths)
+ // ===============================================================
+
+ #[test]
+ fn replace_root_binary_types_identifier_exact_match() {
+ let b58 = base58_of_32_bytes(2);
+ let mut value = Value::Text(b58);
+ let identifier_paths = HashSet::from(["my_id"]);
+ value
+ .replace_to_binary_types_of_root_value_when_setting_at_path(
+ "my_id",
+ identifier_paths,
+ HashSet::new(),
+ )
+ .unwrap();
+ assert_eq!(value, Value::Identifier([2u8; 32]));
+ }
+
+ // ===============================================================
+ // replace_to_binary_types_of_root_value_when_setting_at_path
+ // — binary match (exact path in binary_paths)
+ // ===============================================================
+
+ #[test]
+ fn replace_root_binary_types_binary_exact_match() {
+ let b58 = base58_of_32_bytes(4);
+ let mut value = Value::Text(b58);
+ let binary_paths = HashSet::from(["my_data"]);
+ value
+ .replace_to_binary_types_of_root_value_when_setting_at_path(
+ "my_data",
+ HashSet::new(),
+ binary_paths,
+ )
+ .unwrap();
+ // BinaryBytes uses into_identifier_bytes (base58 decode) then replace_for_bytes -> Value::Bytes
+ assert_eq!(value, Value::Bytes([4u8; 32].to_vec()));
+ }
+
+ // ===============================================================
+ // replace_to_binary_types_of_root_value_when_setting_at_path
+ // — prefix-based partial replacement (path starts_with)
+ // ===============================================================
+
+ #[test]
+ fn replace_root_binary_types_prefix_based() {
+ let b58 = base58_of_32_bytes(6);
+ let inner = Value::Map(vec![(Value::Text("sub_id".into()), Value::Text(b58))]);
+ let mut value = Value::Map(vec![(Value::Text("nested".into()), inner)]);
+ let identifier_paths = HashSet::from(["root.nested.sub_id"]);
+ value
+ .replace_to_binary_types_of_root_value_when_setting_at_path(
+ "root",
+ identifier_paths,
+ HashSet::new(),
+ )
+ .unwrap();
+ // The identifier_path "root.nested.sub_id" starts_with "root", so
+ // replace_at_path("root.nested.sub_id", Identifier) is called on self.
+ // But self is the map starting at "nested", so the full path from self's
+ // perspective is "root.nested.sub_id" which includes the "root" prefix --
+ // this means it tries to find "root" key in self. Since our value doesn't
+ // have a "root" key, the replacement is silently skipped.
+ // This is the actual behavior of the method.
+ }
+
+ #[test]
+ fn replace_root_binary_types_prefix_replaces_sub_path() {
+ let b58 = base58_of_32_bytes(6);
+ let inner = Value::Map(vec![(Value::Text("sub_id".into()), Value::Text(b58))]);
+ let mut value = Value::Map(vec![(Value::Text("nested".into()), inner)]);
+ // The identifier path starts with the path prefix
+ let identifier_paths = HashSet::from(["doc.nested.sub_id"]);
+ value
+ .replace_to_binary_types_of_root_value_when_setting_at_path(
+ "doc",
+ identifier_paths,
+ HashSet::new(),
+ )
+ .unwrap();
+ // "doc.nested.sub_id".starts_with("doc") is true, so
+ // self.replace_at_path("doc.nested.sub_id", Identifier) is called.
+ // self doesn't have key "doc", so the filter_map returns empty vec.
+ // This is the expected silent skip behavior.
+ }
+
+ // ===============================================================
+ // replace_to_binary_types_of_root_value_when_setting_at_path
+ // — no match at all, returns Ok
+ // ===============================================================
+
+ #[test]
+ fn replace_root_binary_types_no_match_returns_ok() {
+ let mut value = Value::Map(vec![(Value::Text("a".into()), Value::U32(1))]);
+ let result = value.replace_to_binary_types_of_root_value_when_setting_at_path(
+ "unrelated",
+ HashSet::from(["other.path"]),
+ HashSet::new(),
+ );
+ assert!(result.is_ok());
+ }
+
+ // ===============================================================
+ // replace_to_binary_types_when_setting_with_path — identifier
+ // ===============================================================
+
+ #[test]
+ fn replace_when_setting_with_path_identifier_exact() {
+ let b58 = base58_of_32_bytes(11);
+ let mut value = Value::Text(b58);
+ let identifier_paths = HashSet::from(["doc.owner"]);
+ value
+ .replace_to_binary_types_when_setting_with_path(
+ "doc.owner",
+ identifier_paths,
+ HashSet::new(),
+ )
+ .unwrap();
+ assert_eq!(value, Value::Identifier([11u8; 32]));
+ }
+
+ // ===============================================================
+ // replace_to_binary_types_when_setting_with_path — strip prefix
+ // ===============================================================
+
+ #[test]
+ fn replace_when_setting_with_path_strip_prefix() {
+ let b58 = base58_of_32_bytes(15);
+ let mut value = Value::Map(vec![(Value::Text("sub_id".into()), Value::Text(b58))]);
+ let identifier_paths = HashSet::from(["container.sub_id"]);
+ value
+ .replace_to_binary_types_when_setting_with_path(
+ "container",
+ identifier_paths,
+ HashSet::new(),
+ )
+ .unwrap();
+ assert_eq!(
+ value.get_value_at_path("sub_id").unwrap(),
+ &Value::Identifier([15u8; 32])
+ );
+ }
+
+ // ===============================================================
+ // replace_to_binary_types_when_setting_with_path — binary strip prefix
+ // ===============================================================
+
+ #[test]
+ fn replace_when_setting_with_path_binary_strip_prefix() {
+ use base64::prelude::*;
+ let raw = vec![1u8, 2, 3, 4, 5];
+ let b64 = BASE64_STANDARD.encode(&raw);
+ let mut value = Value::Map(vec![(Value::Text("blob".into()), Value::Text(b64))]);
+ let binary_paths = HashSet::from(["container.blob"]);
+ value
+ .replace_to_binary_types_when_setting_with_path(
+ "container",
+ HashSet::new(),
+ binary_paths,
+ )
+ .unwrap();
+ assert_eq!(value.get_value_at_path("blob").unwrap(), &Value::Bytes(raw));
+ }
+
+ // ===============================================================
+ // replace_to_binary_types_when_setting_with_path — no match
+ // ===============================================================
+
+ #[test]
+ fn replace_when_setting_with_path_no_match_ok() {
+ let mut value = Value::Map(vec![(Value::Text("a".into()), Value::U32(1))]);
+ let result = value.replace_to_binary_types_when_setting_with_path(
+ "container",
+ HashSet::from(["other.path"]),
+ HashSet::new(),
+ );
+ assert!(result.is_ok());
+ }
+
+ // ===============================================================
+ // clean_recursive — removes nulls from nested maps
+ // ===============================================================
+
+ #[test]
+ fn clean_recursive_removes_nulls() {
+ let inner = Value::Map(vec![
+ (Value::Text("keep".into()), Value::U32(1)),
+ (Value::Text("drop".into()), Value::Null),
+ ]);
+ let value = Value::Map(vec![
+ (Value::Text("inner".into()), inner),
+ (Value::Text("also_drop".into()), Value::Null),
+ ]);
+ let cleaned = value.clean_recursive().unwrap();
+ let map = cleaned.to_map().unwrap();
+ assert_eq!(map.len(), 1);
+ let inner_map = map.get_key("inner").unwrap().to_map().unwrap();
+ assert_eq!(inner_map.len(), 1);
+ assert!(inner_map.get_optional_key("keep").is_some());
+ assert!(inner_map.get_optional_key("drop").is_none());
+ }
+
+ #[test]
+ fn clean_recursive_deeply_nested() {
+ let deep = Value::Map(vec![
+ (Value::Text("a".into()), Value::Null),
+ (Value::Text("b".into()), Value::U8(1)),
+ ]);
+ let mid = Value::Map(vec![
+ (Value::Text("deep".into()), deep),
+ (Value::Text("c".into()), Value::Null),
+ ]);
+ let value = Value::Map(vec![(Value::Text("mid".into()), mid)]);
+ let cleaned = value.clean_recursive().unwrap();
+ let mid_map = cleaned.get_value_at_path("mid").unwrap().to_map().unwrap();
+ assert_eq!(mid_map.len(), 1); // only "deep" remains
+ let deep_map = cleaned
+ .get_value_at_path("mid.deep")
+ .unwrap()
+ .to_map()
+ .unwrap();
+ assert_eq!(deep_map.len(), 1); // only "b" remains
+ }
+
+ #[test]
+ fn clean_recursive_preserves_non_null_non_map_values() {
+ let value = Value::Map(vec![
+ (Value::Text("num".into()), Value::U64(42)),
+ (Value::Text("text".into()), Value::Text("hello".into())),
+ (
+ Value::Text("arr".into()),
+ Value::Array(vec![Value::Null, Value::U8(1)]),
+ ),
+ ]);
+ let cleaned = value.clean_recursive().unwrap();
+ let map = cleaned.to_map().unwrap();
+ assert_eq!(map.len(), 3);
+ // Arrays with Null inside are NOT cleaned (clean_recursive only filters map entries)
+ let arr = map.get_key("arr").unwrap().to_array_ref().unwrap();
+ assert_eq!(arr.len(), 2);
+ }
+
+ #[test]
+ fn clean_recursive_all_null() {
+ let value = Value::Map(vec![
+ (Value::Text("a".into()), Value::Null),
+ (Value::Text("b".into()), Value::Null),
+ ]);
+ let cleaned = value.clean_recursive().unwrap();
+ let map = cleaned.to_map().unwrap();
+ assert_eq!(map.len(), 0);
+ }
+
+ #[test]
+ fn clean_recursive_on_non_map_errors() {
+ let value = Value::U32(42);
+ let result = value.clean_recursive();
+ assert!(result.is_err());
+ }
+
+ // ===============================================================
+ // replace_at_path — intermediate non-map value errors
+ // ===============================================================
+
+ #[test]
+ fn replace_at_path_intermediate_non_map_errors() {
+ let mut value = Value::Map(vec![(Value::Text("a".into()), Value::U32(42))]);
+ // "a" is U32, not a map, so traversing "a.b" should error
+ let result = value.replace_at_path("a.b", ReplacementType::Identifier);
+ assert!(result.is_err());
+ }
+}
diff --git a/packages/rs-platform-value/src/types/bytes_20.rs b/packages/rs-platform-value/src/types/bytes_20.rs
index 1afe384baf1..67e2b408cca 100644
--- a/packages/rs-platform-value/src/types/bytes_20.rs
+++ b/packages/rs-platform-value/src/types/bytes_20.rs
@@ -208,3 +208,413 @@ impl From<&Bytes20> for String {
val.to_string(Encoding::Base64)
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use std::collections::hash_map::DefaultHasher;
+ use std::hash::{Hash, Hasher};
+
+ fn compute_hash(value: &T) -> u64 {
+ let mut hasher = DefaultHasher::new();
+ value.hash(&mut hasher);
+ hasher.finish()
+ }
+
+ // ---------------------------------------------------------------
+ // From<[u8; 20]> and Into<[u8; 20]> round-trip
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn from_array_round_trip() {
+ let arr = [0xABu8; 20];
+ let b = Bytes20::from(arr);
+ assert_eq!(b.0, arr);
+ let back: [u8; 20] = b.to_buffer();
+ assert_eq!(back, arr);
+ }
+
+ #[test]
+ fn from_ref_array() {
+ let arr = [7u8; 20];
+ let b = Bytes20::from(&arr);
+ assert_eq!(b.0, arr);
+ }
+
+ #[test]
+ fn into_buffer_consumes_and_returns_inner() {
+ let arr = [42u8; 20];
+ let b = Bytes20::new(arr);
+ let inner = b.into_buffer();
+ assert_eq!(inner, arr);
+ }
+
+ // ---------------------------------------------------------------
+ // from_vec — correct and wrong sizes
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn from_vec_correct_size() {
+ let v = vec![1u8; 20];
+ let b = Bytes20::from_vec(v).unwrap();
+ assert_eq!(b.0, [1u8; 20]);
+ }
+
+ #[test]
+ fn from_vec_too_short() {
+ let v = vec![1u8; 19];
+ let err = Bytes20::from_vec(v).unwrap_err();
+ match err {
+ Error::ByteLengthNot20BytesError(_) => {}
+ other => panic!("expected ByteLengthNot20BytesError, got {:?}", other),
+ }
+ }
+
+ #[test]
+ fn from_vec_too_long() {
+ let v = vec![1u8; 21];
+ let err = Bytes20::from_vec(v).unwrap_err();
+ match err {
+ Error::ByteLengthNot20BytesError(_) => {}
+ other => panic!("expected ByteLengthNot20BytesError, got {:?}", other),
+ }
+ }
+
+ #[test]
+ fn from_vec_empty() {
+ let v = vec![];
+ assert!(Bytes20::from_vec(v).is_err());
+ }
+
+ // ---------------------------------------------------------------
+ // TryFrom for owned Value
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn try_from_value_bytes20() {
+ let arr = [9u8; 20];
+ let val = Value::Bytes20(arr);
+ let b = Bytes20::try_from(val).unwrap();
+ assert_eq!(b.0, arr);
+ }
+
+ #[test]
+ fn try_from_value_bytes_correct_len() {
+ let v = vec![3u8; 20];
+ let val = Value::Bytes(v);
+ let b = Bytes20::try_from(val).unwrap();
+ assert_eq!(b.0, [3u8; 20]);
+ }
+
+ #[test]
+ fn try_from_value_bytes_wrong_len() {
+ let val = Value::Bytes(vec![1, 2, 3]);
+ assert!(Bytes20::try_from(val).is_err());
+ }
+
+ #[test]
+ fn try_from_value_unsupported_variant() {
+ let val = Value::Bool(true);
+ assert!(Bytes20::try_from(val).is_err());
+ }
+
+ #[test]
+ fn try_from_value_null_errors() {
+ let val = Value::Null;
+ assert!(Bytes20::try_from(val).is_err());
+ }
+
+ // ---------------------------------------------------------------
+ // TryFrom<&Value> for borrowed Value
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn try_from_ref_value_bytes20() {
+ let arr = [10u8; 20];
+ let val = Value::Bytes20(arr);
+ let b = Bytes20::try_from(&val).unwrap();
+ assert_eq!(b.0, arr);
+ }
+
+ #[test]
+ fn try_from_ref_value_bytes_correct_len() {
+ let val = Value::Bytes(vec![4u8; 20]);
+ let b = Bytes20::try_from(&val).unwrap();
+ assert_eq!(b.0, [4u8; 20]);
+ }
+
+ #[test]
+ fn try_from_ref_value_bytes_wrong_len() {
+ let val = Value::Bytes(vec![1, 2]);
+ assert!(Bytes20::try_from(&val).is_err());
+ }
+
+ #[test]
+ fn try_from_ref_value_unsupported() {
+ let val = Value::Float(3.14);
+ assert!(Bytes20::try_from(&val).is_err());
+ }
+
+ // ---------------------------------------------------------------
+ // as_slice(), to_vec()
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn as_slice_returns_inner_bytes() {
+ let arr = [0xFFu8; 20];
+ let b = Bytes20::new(arr);
+ assert_eq!(b.as_slice(), &arr[..]);
+ assert_eq!(b.as_slice().len(), 20);
+ }
+
+ #[test]
+ fn to_vec_returns_copy() {
+ let arr = [5u8; 20];
+ let b = Bytes20::new(arr);
+ let v = b.to_vec();
+ assert_eq!(v.len(), 20);
+ assert_eq!(v, arr.to_vec());
+ }
+
+ #[test]
+ fn as_ref_returns_inner_bytes() {
+ let arr = [11u8; 20];
+ let b = Bytes20::new(arr);
+ let r: &[u8] = b.as_ref();
+ assert_eq!(r, &arr[..]);
+ }
+
+ // ---------------------------------------------------------------
+ // Hash impl: equal values hash equally, different values differ
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn hash_equal_values() {
+ let a = Bytes20::new([1u8; 20]);
+ let b = Bytes20::new([1u8; 20]);
+ assert_eq!(compute_hash(&a), compute_hash(&b));
+ }
+
+ #[test]
+ fn hash_different_values() {
+ let a = Bytes20::new([1u8; 20]);
+ let b = Bytes20::new([2u8; 20]);
+ // Highly unlikely to collide
+ assert_ne!(compute_hash(&a), compute_hash(&b));
+ }
+
+ // ---------------------------------------------------------------
+ // PartialOrd / Ord: ordering matches byte ordering
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn ordering_matches_byte_ordering() {
+ let mut low = [0u8; 20];
+ low[0] = 1;
+ let mut high = [0u8; 20];
+ high[0] = 2;
+ let a = Bytes20::new(low);
+ let b = Bytes20::new(high);
+ assert!(a < b);
+ assert!(b > a);
+ }
+
+ #[test]
+ fn ordering_equal() {
+ let a = Bytes20::new([5u8; 20]);
+ let b = Bytes20::new([5u8; 20]);
+ assert_eq!(a.cmp(&b), std::cmp::Ordering::Equal);
+ }
+
+ #[test]
+ fn ordering_last_byte_differs() {
+ let mut low = [0u8; 20];
+ low[19] = 1;
+ let mut high = [0u8; 20];
+ high[19] = 2;
+ assert!(Bytes20::new(low) < Bytes20::new(high));
+ }
+
+ // ---------------------------------------------------------------
+ // Display output format (Base58)
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn display_uses_base58() {
+ let arr = [1u8; 20];
+ let b = Bytes20::new(arr);
+ let display_str = format!("{}", b);
+ let base58_str = b.to_string(Encoding::Base58);
+ assert_eq!(display_str, base58_str);
+ // Ensure it's non-empty
+ assert!(!display_str.is_empty());
+ }
+
+ #[test]
+ fn display_all_zeros() {
+ let b = Bytes20::new([0u8; 20]);
+ let display_str = format!("{}", b);
+ // Base58 encoding of 20 zero bytes
+ let expected = bs58::encode([0u8; 20]).into_string();
+ assert_eq!(display_str, expected);
+ }
+
+ // ---------------------------------------------------------------
+ // Default is all zeros
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn default_is_all_zeros() {
+ let b = Bytes20::default();
+ assert_eq!(b.0, [0u8; 20]);
+ }
+
+ // ---------------------------------------------------------------
+ // Value round-trips
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn into_value_and_back() {
+ let arr = [42u8; 20];
+ let b = Bytes20::new(arr);
+ let val: Value = b.into();
+ assert_eq!(val, Value::Bytes20(arr));
+ let back = Bytes20::try_from(val).unwrap();
+ assert_eq!(back, b);
+ }
+
+ #[test]
+ fn ref_into_value() {
+ let b = Bytes20::new([7u8; 20]);
+ let val: Value = (&b).into();
+ assert_eq!(val, Value::Bytes20([7u8; 20]));
+ }
+
+ // ---------------------------------------------------------------
+ // String conversions
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn try_from_string_base64_round_trip() {
+ let arr = [99u8; 20];
+ let b = Bytes20::new(arr);
+ let s: String = b.into();
+ let recovered = Bytes20::try_from(s).unwrap();
+ assert_eq!(recovered, Bytes20::new(arr));
+ }
+
+ #[test]
+ fn try_from_string_invalid_base64() {
+ let s = "not-valid-base64!!!".to_string();
+ assert!(Bytes20::try_from(s).is_err());
+ }
+
+ #[test]
+ fn ref_to_string() {
+ let b = Bytes20::new([0u8; 20]);
+ let s: String = (&b).into();
+ // Verify it's valid base64
+ let decoded = BASE64_STANDARD.decode(&s).unwrap();
+ assert_eq!(decoded.len(), 20);
+ }
+
+ // ---------------------------------------------------------------
+ // from_string with different encodings
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn from_string_base58_round_trip() {
+ let arr = [0xABu8; 20];
+ let b = Bytes20::new(arr);
+ let encoded = b.to_string(Encoding::Base58);
+ let recovered = Bytes20::from_string(&encoded, Encoding::Base58).unwrap();
+ assert_eq!(recovered, b);
+ }
+
+ #[test]
+ fn from_string_hex_round_trip() {
+ let arr = [0xCDu8; 20];
+ let b = Bytes20::new(arr);
+ let encoded = b.to_string(Encoding::Hex);
+ let recovered = Bytes20::from_string(&encoded, Encoding::Hex).unwrap();
+ assert_eq!(recovered, b);
+ }
+
+ #[test]
+ fn from_string_with_encoding_string_none_defaults_to_base58() {
+ let arr = [0x01u8; 20];
+ let b = Bytes20::new(arr);
+ let encoded = b.to_string_with_encoding_string(None);
+ let recovered = Bytes20::from_string_with_encoding_string(&encoded, None).unwrap();
+ assert_eq!(recovered, b);
+ }
+
+ // ---------------------------------------------------------------
+ // Serde round-trips
+ // ---------------------------------------------------------------
+
+ #[test]
+ #[cfg(feature = "json")]
+ fn serde_json_round_trip() {
+ let arr = [0x12u8; 20];
+ let b = Bytes20::new(arr);
+ let json = serde_json::to_string(&b).unwrap();
+ let recovered: Bytes20 = serde_json::from_str(&json).unwrap();
+ assert_eq!(recovered, b);
+ }
+
+ #[test]
+ fn serde_bincode_round_trip() {
+ let arr = [0x34u8; 20];
+ let b = Bytes20::new(arr);
+ let config = bincode::config::standard();
+ let encoded = bincode::encode_to_vec(&b, config).unwrap();
+ let (decoded, _): (Bytes20, _) = bincode::decode_from_slice(&encoded, config).unwrap();
+ assert_eq!(decoded, b);
+ }
+
+ // ---------------------------------------------------------------
+ // Clone and Copy semantics
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn clone_is_equal() {
+ let b = Bytes20::new([77u8; 20]);
+ let c = b.clone();
+ assert_eq!(b, c);
+ }
+
+ #[test]
+ fn copy_semantics() {
+ let b = Bytes20::new([88u8; 20]);
+ let c = b; // Copy
+ assert_eq!(b, c); // b is still valid because Bytes20 is Copy
+ }
+
+ // ---------------------------------------------------------------
+ // Equality
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn equality_same_bytes() {
+ let a = Bytes20::new([1u8; 20]);
+ let b = Bytes20::new([1u8; 20]);
+ assert_eq!(a, b);
+ }
+
+ #[test]
+ fn inequality_different_bytes() {
+ let a = Bytes20::new([1u8; 20]);
+ let b = Bytes20::new([2u8; 20]);
+ assert_ne!(a, b);
+ }
+
+ #[test]
+ fn inequality_single_byte_diff() {
+ let mut arr = [0u8; 20];
+ let a = Bytes20::new(arr);
+ arr[10] = 1;
+ let b = Bytes20::new(arr);
+ assert_ne!(a, b);
+ }
+}
diff --git a/packages/rs-platform-value/src/types/bytes_32.rs b/packages/rs-platform-value/src/types/bytes_32.rs
index 041abb07d8e..3cb7b35593c 100644
--- a/packages/rs-platform-value/src/types/bytes_32.rs
+++ b/packages/rs-platform-value/src/types/bytes_32.rs
@@ -205,3 +205,426 @@ impl From<&Bytes32> for String {
val.to_string(Encoding::Base64)
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use rand::SeedableRng;
+ use std::collections::hash_map::DefaultHasher;
+ use std::hash::{Hash, Hasher};
+
+ fn compute_hash(value: &T) -> u64 {
+ let mut hasher = DefaultHasher::new();
+ value.hash(&mut hasher);
+ hasher.finish()
+ }
+
+ // ---------------------------------------------------------------
+ // From<[u8; 32]> and Into<[u8; 32]> round-trip
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn from_array_round_trip() {
+ let arr = [0xABu8; 32];
+ let b = Bytes32::from(arr);
+ assert_eq!(b.0, arr);
+ let back: [u8; 32] = b.to_buffer();
+ assert_eq!(back, arr);
+ }
+
+ #[test]
+ fn from_ref_array() {
+ let arr = [7u8; 32];
+ let b = Bytes32::from(&arr);
+ assert_eq!(b.0, arr);
+ }
+
+ // ---------------------------------------------------------------
+ // from_vec — correct and wrong sizes
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn from_vec_correct_size() {
+ let v = vec![1u8; 32];
+ let b = Bytes32::from_vec(v).unwrap();
+ assert_eq!(b.0, [1u8; 32]);
+ }
+
+ #[test]
+ fn from_vec_too_short() {
+ let v = vec![1u8; 31];
+ let err = Bytes32::from_vec(v).unwrap_err();
+ match err {
+ Error::ByteLengthNot32BytesError(_) => {}
+ other => panic!("expected ByteLengthNot32BytesError, got {:?}", other),
+ }
+ }
+
+ #[test]
+ fn from_vec_too_long() {
+ let v = vec![1u8; 33];
+ let err = Bytes32::from_vec(v).unwrap_err();
+ match err {
+ Error::ByteLengthNot32BytesError(_) => {}
+ other => panic!("expected ByteLengthNot32BytesError, got {:?}", other),
+ }
+ }
+
+ #[test]
+ fn from_vec_empty() {
+ let v = vec![];
+ assert!(Bytes32::from_vec(v).is_err());
+ }
+
+ // ---------------------------------------------------------------
+ // TryFrom for owned Value
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn try_from_value_bytes32() {
+ let arr = [9u8; 32];
+ let val = Value::Bytes32(arr);
+ let b = Bytes32::try_from(val).unwrap();
+ assert_eq!(b.0, arr);
+ }
+
+ #[test]
+ fn try_from_value_bytes_correct_len() {
+ let v = vec![3u8; 32];
+ let val = Value::Bytes(v);
+ let b = Bytes32::try_from(val).unwrap();
+ assert_eq!(b.0, [3u8; 32]);
+ }
+
+ #[test]
+ fn try_from_value_bytes_wrong_len() {
+ let val = Value::Bytes(vec![1, 2, 3]);
+ assert!(Bytes32::try_from(val).is_err());
+ }
+
+ #[test]
+ fn try_from_value_identifier() {
+ let arr = [0xCCu8; 32];
+ let val = Value::Identifier(arr);
+ let b = Bytes32::try_from(val).unwrap();
+ assert_eq!(b.0, arr);
+ }
+
+ #[test]
+ fn try_from_value_unsupported_variant() {
+ let val = Value::Bool(false);
+ assert!(Bytes32::try_from(val).is_err());
+ }
+
+ #[test]
+ fn try_from_value_null_errors() {
+ let val = Value::Null;
+ assert!(Bytes32::try_from(val).is_err());
+ }
+
+ // ---------------------------------------------------------------
+ // TryFrom<&Value> for borrowed Value
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn try_from_ref_value_bytes32() {
+ let arr = [10u8; 32];
+ let val = Value::Bytes32(arr);
+ let b = Bytes32::try_from(&val).unwrap();
+ assert_eq!(b.0, arr);
+ }
+
+ #[test]
+ fn try_from_ref_value_bytes_correct_len() {
+ let val = Value::Bytes(vec![4u8; 32]);
+ let b = Bytes32::try_from(&val).unwrap();
+ assert_eq!(b.0, [4u8; 32]);
+ }
+
+ #[test]
+ fn try_from_ref_value_bytes_wrong_len() {
+ let val = Value::Bytes(vec![1, 2]);
+ assert!(Bytes32::try_from(&val).is_err());
+ }
+
+ #[test]
+ fn try_from_ref_value_identifier() {
+ let arr = [0xDDu8; 32];
+ let val = Value::Identifier(arr);
+ let b = Bytes32::try_from(&val).unwrap();
+ assert_eq!(b.0, arr);
+ }
+
+ #[test]
+ fn try_from_ref_value_unsupported() {
+ let val = Value::Float(3.14);
+ assert!(Bytes32::try_from(&val).is_err());
+ }
+
+ // ---------------------------------------------------------------
+ // as_slice(), to_vec()
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn as_slice_returns_inner_bytes() {
+ let arr = [0xFFu8; 32];
+ let b = Bytes32::new(arr);
+ assert_eq!(b.as_slice(), &arr[..]);
+ assert_eq!(b.as_slice().len(), 32);
+ }
+
+ #[test]
+ fn to_vec_returns_copy() {
+ let arr = [5u8; 32];
+ let b = Bytes32::new(arr);
+ let v = b.to_vec();
+ assert_eq!(v.len(), 32);
+ assert_eq!(v, arr.to_vec());
+ }
+
+ #[test]
+ fn as_ref_returns_inner_bytes() {
+ let arr = [11u8; 32];
+ let b = Bytes32::new(arr);
+ let r: &[u8] = b.as_ref();
+ assert_eq!(r, &arr[..]);
+ }
+
+ // ---------------------------------------------------------------
+ // Hash impl: equal values hash equally, different values differ
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn hash_equal_values() {
+ let a = Bytes32::new([1u8; 32]);
+ let b = Bytes32::new([1u8; 32]);
+ assert_eq!(compute_hash(&a), compute_hash(&b));
+ }
+
+ #[test]
+ fn hash_different_values() {
+ let a = Bytes32::new([1u8; 32]);
+ let b = Bytes32::new([2u8; 32]);
+ assert_ne!(compute_hash(&a), compute_hash(&b));
+ }
+
+ // ---------------------------------------------------------------
+ // PartialOrd / Ord: ordering matches byte ordering
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn ordering_matches_byte_ordering() {
+ let mut low = [0u8; 32];
+ low[0] = 1;
+ let mut high = [0u8; 32];
+ high[0] = 2;
+ let a = Bytes32::new(low);
+ let b = Bytes32::new(high);
+ assert!(a < b);
+ assert!(b > a);
+ }
+
+ #[test]
+ fn ordering_equal() {
+ let a = Bytes32::new([5u8; 32]);
+ let b = Bytes32::new([5u8; 32]);
+ assert_eq!(a.cmp(&b), std::cmp::Ordering::Equal);
+ }
+
+ #[test]
+ fn ordering_last_byte_differs() {
+ let mut low = [0u8; 32];
+ low[31] = 1;
+ let mut high = [0u8; 32];
+ high[31] = 2;
+ assert!(Bytes32::new(low) < Bytes32::new(high));
+ }
+
+ // ---------------------------------------------------------------
+ // Default is all zeros
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn default_is_all_zeros() {
+ let b = Bytes32::default();
+ assert_eq!(b.0, [0u8; 32]);
+ }
+
+ // ---------------------------------------------------------------
+ // Value round-trips
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn into_value_and_back() {
+ let arr = [42u8; 32];
+ let b = Bytes32::new(arr);
+ let val: Value = b.into();
+ assert_eq!(val, Value::Bytes32(arr));
+ let back = Bytes32::try_from(val).unwrap();
+ assert_eq!(back, b);
+ }
+
+ #[test]
+ fn ref_into_value() {
+ let b = Bytes32::new([7u8; 32]);
+ let val: Value = (&b).into();
+ assert_eq!(val, Value::Bytes32([7u8; 32]));
+ }
+
+ // ---------------------------------------------------------------
+ // String conversions
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn try_from_string_base64_round_trip() {
+ let arr = [99u8; 32];
+ let b = Bytes32::new(arr);
+ let s: String = b.into();
+ let recovered = Bytes32::try_from(s).unwrap();
+ assert_eq!(recovered, Bytes32::new(arr));
+ }
+
+ #[test]
+ fn try_from_string_invalid_base64() {
+ let s = "not-valid-base64!!!".to_string();
+ assert!(Bytes32::try_from(s).is_err());
+ }
+
+ #[test]
+ fn ref_to_string() {
+ let b = Bytes32::new([0u8; 32]);
+ let s: String = (&b).into();
+ let decoded = BASE64_STANDARD.decode(&s).unwrap();
+ assert_eq!(decoded.len(), 32);
+ }
+
+ // ---------------------------------------------------------------
+ // from_string with different encodings
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn from_string_base58_round_trip() {
+ let arr = [0xABu8; 32];
+ let b = Bytes32::new(arr);
+ let encoded = b.to_string(Encoding::Base58);
+ let recovered = Bytes32::from_string(&encoded, Encoding::Base58).unwrap();
+ assert_eq!(recovered, b);
+ }
+
+ #[test]
+ fn from_string_hex_round_trip() {
+ let arr = [0xCDu8; 32];
+ let b = Bytes32::new(arr);
+ let encoded = b.to_string(Encoding::Hex);
+ let recovered = Bytes32::from_string(&encoded, Encoding::Hex).unwrap();
+ assert_eq!(recovered, b);
+ }
+
+ #[test]
+ fn from_string_with_encoding_string_none_defaults_to_base58() {
+ let arr = [0x01u8; 32];
+ let b = Bytes32::new(arr);
+ let encoded = b.to_string_with_encoding_string(None);
+ let recovered = Bytes32::from_string_with_encoding_string(&encoded, None).unwrap();
+ assert_eq!(recovered, b);
+ }
+
+ // ---------------------------------------------------------------
+ // Serde round-trips
+ // ---------------------------------------------------------------
+
+ #[test]
+ #[cfg(feature = "json")]
+ fn serde_json_round_trip() {
+ let arr = [0x12u8; 32];
+ let b = Bytes32::new(arr);
+ let json = serde_json::to_string(&b).unwrap();
+ let recovered: Bytes32 = serde_json::from_str(&json).unwrap();
+ assert_eq!(recovered, b);
+ }
+
+ #[test]
+ fn serde_bincode_round_trip() {
+ let arr = [0x34u8; 32];
+ let b = Bytes32::new(arr);
+ let config = bincode::config::standard();
+ let encoded = bincode::encode_to_vec(&b, config).unwrap();
+ let (decoded, _): (Bytes32, _) = bincode::decode_from_slice(&encoded, config).unwrap();
+ assert_eq!(decoded, b);
+ }
+
+ // ---------------------------------------------------------------
+ // Clone and Copy semantics
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn clone_is_equal() {
+ let b = Bytes32::new([77u8; 32]);
+ let c = b.clone();
+ assert_eq!(b, c);
+ }
+
+ #[test]
+ fn copy_semantics() {
+ let b = Bytes32::new([88u8; 32]);
+ let c = b; // Copy
+ assert_eq!(b, c); // b is still valid because Bytes32 is Copy
+ }
+
+ // ---------------------------------------------------------------
+ // Equality
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn equality_same_bytes() {
+ let a = Bytes32::new([1u8; 32]);
+ let b = Bytes32::new([1u8; 32]);
+ assert_eq!(a, b);
+ }
+
+ #[test]
+ fn inequality_different_bytes() {
+ let a = Bytes32::new([1u8; 32]);
+ let b = Bytes32::new([2u8; 32]);
+ assert_ne!(a, b);
+ }
+
+ #[test]
+ fn inequality_single_byte_diff() {
+ let mut arr = [0u8; 32];
+ let a = Bytes32::new(arr);
+ arr[16] = 1;
+ let b = Bytes32::new(arr);
+ assert_ne!(a, b);
+ }
+
+ // ---------------------------------------------------------------
+ // random_with_rng
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn random_with_rng_produces_non_zero() {
+ let mut rng = StdRng::seed_from_u64(12345);
+ let b = Bytes32::random_with_rng(&mut rng);
+ // Extremely unlikely for 32 random bytes to all be zero
+ assert_ne!(b.0, [0u8; 32]);
+ }
+
+ #[test]
+ fn random_with_rng_deterministic_with_same_seed() {
+ let mut rng1 = StdRng::seed_from_u64(42);
+ let mut rng2 = StdRng::seed_from_u64(42);
+ let a = Bytes32::random_with_rng(&mut rng1);
+ let b = Bytes32::random_with_rng(&mut rng2);
+ assert_eq!(a, b);
+ }
+
+ #[test]
+ fn random_with_rng_different_seeds_differ() {
+ let mut rng1 = StdRng::seed_from_u64(1);
+ let mut rng2 = StdRng::seed_from_u64(2);
+ let a = Bytes32::random_with_rng(&mut rng1);
+ let b = Bytes32::random_with_rng(&mut rng2);
+ assert_ne!(a, b);
+ }
+}
From 63f75a10cb03c7b41c0ed1fdaab8f1710d7ce06f Mon Sep 17 00:00:00 2001
From: Borja Castellano
Date: Fri, 3 Apr 2026 11:50:57 -0700
Subject: [PATCH 09/40] chore(swift-sdk): remove not planned to use tx module
in swift-sdk (#3425)
---
.../SwiftDashSDK/Tx/TransactionBuilder.swift | 53 -------------------
.../SwiftDashSDK/Tx/TransactionTypes.swift | 21 --------
2 files changed, 74 deletions(-)
delete mode 100644 packages/swift-sdk/Sources/SwiftDashSDK/Tx/TransactionBuilder.swift
delete mode 100644 packages/swift-sdk/Sources/SwiftDashSDK/Tx/TransactionTypes.swift
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/Tx/TransactionBuilder.swift b/packages/swift-sdk/Sources/SwiftDashSDK/Tx/TransactionBuilder.swift
deleted file mode 100644
index 068d186769c..00000000000
--- a/packages/swift-sdk/Sources/SwiftDashSDK/Tx/TransactionBuilder.swift
+++ /dev/null
@@ -1,53 +0,0 @@
-import Foundation
-
-/// Minimal transaction builder facade exposed by SwiftDashSDK.
-/// Implementation will be wired to FFI in a follow-up; for now it surfaces a stable API.
-public final class SDKTransactionBuilder {
- public struct Input {
- public let txid: Data
- public let vout: UInt32
- public let scriptPubKey: Data
- public let privateKey: Data
- public init(txid: Data, vout: UInt32, scriptPubKey: Data, privateKey: Data) {
- self.txid = txid
- self.vout = vout
- self.scriptPubKey = scriptPubKey
- self.privateKey = privateKey
- }
- }
-
- public struct Output {
- public let address: String
- public let amount: UInt64
- public init(address: String, amount: UInt64) {
- self.address = address
- self.amount = amount
- }
- }
-
- private let feePerKB: UInt64
- private var inputs: [Input] = []
- private var outputs: [Output] = []
- private var changeAddress: String?
-
- public init(feePerKB: UInt64 = 1000) {
- self.feePerKB = feePerKB
- }
-
- public func setChangeAddress(_ address: String) throws {
- // TODO: validate address via SDK once available
- self.changeAddress = address
- }
-
- public func addInput(_ input: Input) throws {
- inputs.append(input)
- }
-
- public func addOutput(_ output: Output) throws {
- outputs.append(output)
- }
-
- public func build() throws -> SDKBuiltTransaction {
- throw SDKTxError.notImplemented("Transaction building is not yet implemented in SwiftDashSDK")
- }
-}
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/Tx/TransactionTypes.swift b/packages/swift-sdk/Sources/SwiftDashSDK/Tx/TransactionTypes.swift
deleted file mode 100644
index c47306edfed..00000000000
--- a/packages/swift-sdk/Sources/SwiftDashSDK/Tx/TransactionTypes.swift
+++ /dev/null
@@ -1,21 +0,0 @@
-import Foundation
-
-public struct SDKBuiltTransaction {
- public let txid: String
- public let rawTransaction: Data
- public let fee: UInt64
-}
-
-public enum SDKTxError: LocalizedError {
- case notImplemented(String)
- case invalidInput(String)
- case invalidState(String)
-
- public var errorDescription: String? {
- switch self {
- case .notImplemented(let msg): return msg
- case .invalidInput(let msg): return msg
- case .invalidState(let msg): return msg
- }
- }
-}
From 74ac0ed4e723899ab589a6b13d46b5ba88de1519 Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Fri, 3 Apr 2026 22:16:51 +0300
Subject: [PATCH 10/40] test(platform): improve platform-value coverage for
patch, value_map, converters, and replacement (#3428)
Co-authored-by: Claude Opus 4.6 (1M context)
---
.../btreemap_field_replacement.rs | 516 ++++++++++++++
.../src/converter/ciborium.rs | 412 +++++++++++
.../src/converter/serde_json.rs | 662 +++++++++++++++++-
packages/rs-platform-value/src/patch/mod.rs | 566 +++++++++++++++
packages/rs-platform-value/src/value_map.rs | 405 +++++++++++
5 files changed, 2559 insertions(+), 2 deletions(-)
diff --git a/packages/rs-platform-value/src/btreemap_extensions/btreemap_field_replacement.rs b/packages/rs-platform-value/src/btreemap_extensions/btreemap_field_replacement.rs
index c4e1f2fc1d4..f93a1b79832 100644
--- a/packages/rs-platform-value/src/btreemap_extensions/btreemap_field_replacement.rs
+++ b/packages/rs-platform-value/src/btreemap_extensions/btreemap_field_replacement.rs
@@ -238,3 +238,519 @@ impl BTreeValueMapReplacementPathHelper for BTreeMap {
.try_for_each(|path| self.replace_at_path(path.as_str(), replacement_type))
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::value_map::ValueMapHelper;
+ use crate::{Error, Value};
+ use base64::prelude::BASE64_STANDARD;
+ use base64::Engine;
+ use std::collections::BTreeMap;
+
+ // -----------------------------------------------------------------------
+ // IntegerReplacementType::replace_for_value — each variant
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn integer_replacement_u8() {
+ let result = IntegerReplacementType::U8
+ .replace_for_value(Value::U64(200))
+ .unwrap();
+ assert_eq!(result, Value::U8(200));
+ }
+
+ #[test]
+ fn integer_replacement_i8() {
+ let result = IntegerReplacementType::I8
+ .replace_for_value(Value::I64(-100))
+ .unwrap();
+ assert_eq!(result, Value::I8(-100));
+ }
+
+ #[test]
+ fn integer_replacement_u16() {
+ let result = IntegerReplacementType::U16
+ .replace_for_value(Value::U64(60000))
+ .unwrap();
+ assert_eq!(result, Value::U16(60000));
+ }
+
+ #[test]
+ fn integer_replacement_i16() {
+ let result = IntegerReplacementType::I16
+ .replace_for_value(Value::I64(-30000))
+ .unwrap();
+ assert_eq!(result, Value::I16(-30000));
+ }
+
+ #[test]
+ fn integer_replacement_u32() {
+ let result = IntegerReplacementType::U32
+ .replace_for_value(Value::U64(3_000_000))
+ .unwrap();
+ assert_eq!(result, Value::U32(3_000_000));
+ }
+
+ #[test]
+ fn integer_replacement_i32() {
+ let result = IntegerReplacementType::I32
+ .replace_for_value(Value::I64(-3_000_000))
+ .unwrap();
+ assert_eq!(result, Value::I32(-3_000_000));
+ }
+
+ #[test]
+ fn integer_replacement_u64() {
+ let result = IntegerReplacementType::U64
+ .replace_for_value(Value::U64(u64::MAX))
+ .unwrap();
+ assert_eq!(result, Value::U64(u64::MAX));
+ }
+
+ #[test]
+ fn integer_replacement_i64() {
+ let result = IntegerReplacementType::I64
+ .replace_for_value(Value::I64(i64::MIN))
+ .unwrap();
+ assert_eq!(result, Value::I64(i64::MIN));
+ }
+
+ #[test]
+ fn integer_replacement_u128() {
+ let result = IntegerReplacementType::U128
+ .replace_for_value(Value::U64(42))
+ .unwrap();
+ assert_eq!(result, Value::U128(42));
+ }
+
+ #[test]
+ fn integer_replacement_i128() {
+ let result = IntegerReplacementType::I128
+ .replace_for_value(Value::I64(-42))
+ .unwrap();
+ assert_eq!(result, Value::I128(-42));
+ }
+
+ #[test]
+ fn integer_replacement_overflow_error() {
+ // Trying to fit a large u64 into u8 should error
+ let result = IntegerReplacementType::U8.replace_for_value(Value::U64(300));
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn integer_replacement_non_integer_error() {
+ // Non-integer value should fail
+ let result =
+ IntegerReplacementType::U64.replace_for_value(Value::Text("not a number".into()));
+ assert!(result.is_err());
+ }
+
+ // -----------------------------------------------------------------------
+ // ReplacementType::replace_for_bytes
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_for_bytes_identifier_32_bytes_ok() {
+ let bytes = vec![0xABu8; 32];
+ let result = ReplacementType::Identifier
+ .replace_for_bytes(bytes.clone())
+ .unwrap();
+ let expected: [u8; 32] = bytes.try_into().unwrap();
+ assert_eq!(result, Value::Identifier(expected));
+ }
+
+ #[test]
+ fn replace_for_bytes_identifier_wrong_size() {
+ let bytes = vec![0xABu8; 31]; // not 32 bytes
+ let result = ReplacementType::Identifier.replace_for_bytes(bytes);
+ assert!(matches!(result, Err(Error::ByteLengthNot32BytesError(_))));
+ }
+
+ #[test]
+ fn replace_for_bytes_identifier_too_long() {
+ let bytes = vec![0xABu8; 33];
+ let result = ReplacementType::Identifier.replace_for_bytes(bytes);
+ assert!(matches!(result, Err(Error::ByteLengthNot32BytesError(_))));
+ }
+
+ #[test]
+ fn replace_for_bytes_binary_bytes() {
+ let bytes = vec![1, 2, 3, 4, 5];
+ let result = ReplacementType::BinaryBytes
+ .replace_for_bytes(bytes.clone())
+ .unwrap();
+ assert_eq!(result, Value::Bytes(bytes));
+ }
+
+ #[test]
+ fn replace_for_bytes_text_base58() {
+ let bytes = vec![0x01, 0x02, 0x03];
+ let expected = bs58::encode(&bytes).into_string();
+ let result = ReplacementType::TextBase58
+ .replace_for_bytes(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Text(expected));
+ }
+
+ #[test]
+ fn replace_for_bytes_text_base64() {
+ let bytes = vec![0xDE, 0xAD, 0xBE, 0xEF];
+ let expected = BASE64_STANDARD.encode(&bytes);
+ let result = ReplacementType::TextBase64
+ .replace_for_bytes(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Text(expected));
+ }
+
+ // -----------------------------------------------------------------------
+ // replace_for_bytes_20: correct size and wrong replacement type
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_for_bytes_20_binary() {
+ let bytes = [0xFFu8; 20];
+ let result = ReplacementType::BinaryBytes
+ .replace_for_bytes_20(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Bytes20(bytes));
+ }
+
+ #[test]
+ fn replace_for_bytes_20_text_base58() {
+ let bytes = [0x01u8; 20];
+ let expected = bs58::encode(bytes).into_string();
+ let result = ReplacementType::TextBase58
+ .replace_for_bytes_20(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Text(expected));
+ }
+
+ #[test]
+ fn replace_for_bytes_20_text_base64() {
+ let bytes = [0x02u8; 20];
+ let expected = BASE64_STANDARD.encode(bytes);
+ let result = ReplacementType::TextBase64
+ .replace_for_bytes_20(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Text(expected));
+ }
+
+ #[test]
+ fn replace_for_bytes_20_identifier_error() {
+ let bytes = [0xAAu8; 20];
+ let result = ReplacementType::Identifier.replace_for_bytes_20(bytes);
+ assert!(matches!(result, Err(Error::ByteLengthNot36BytesError(_))));
+ }
+
+ // -----------------------------------------------------------------------
+ // replace_for_bytes_32: correct size and all replacement types
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_for_bytes_32_identifier() {
+ let bytes = [0xBBu8; 32];
+ let result = ReplacementType::Identifier
+ .replace_for_bytes_32(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Identifier(bytes));
+ }
+
+ #[test]
+ fn replace_for_bytes_32_binary() {
+ let bytes = [0xCCu8; 32];
+ let result = ReplacementType::BinaryBytes
+ .replace_for_bytes_32(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Bytes32(bytes));
+ }
+
+ #[test]
+ fn replace_for_bytes_32_text_base58() {
+ let bytes = [0x01u8; 32];
+ let expected = bs58::encode(bytes).into_string();
+ let result = ReplacementType::TextBase58
+ .replace_for_bytes_32(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Text(expected));
+ }
+
+ #[test]
+ fn replace_for_bytes_32_text_base64() {
+ let bytes = [0x02u8; 32];
+ let expected = BASE64_STANDARD.encode(bytes);
+ let result = ReplacementType::TextBase64
+ .replace_for_bytes_32(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Text(expected));
+ }
+
+ // -----------------------------------------------------------------------
+ // replace_for_bytes_36: correct size and wrong replacement type
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_for_bytes_36_binary() {
+ let bytes = [0xDDu8; 36];
+ let result = ReplacementType::BinaryBytes
+ .replace_for_bytes_36(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Bytes36(bytes));
+ }
+
+ #[test]
+ fn replace_for_bytes_36_text_base58() {
+ let bytes = [0x03u8; 36];
+ let expected = bs58::encode(bytes).into_string();
+ let result = ReplacementType::TextBase58
+ .replace_for_bytes_36(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Text(expected));
+ }
+
+ #[test]
+ fn replace_for_bytes_36_text_base64() {
+ let bytes = [0x04u8; 36];
+ let expected = BASE64_STANDARD.encode(bytes);
+ let result = ReplacementType::TextBase64
+ .replace_for_bytes_36(bytes)
+ .unwrap();
+ assert_eq!(result, Value::Text(expected));
+ }
+
+ #[test]
+ fn replace_for_bytes_36_identifier_error() {
+ let bytes = [0xEEu8; 36];
+ let result = ReplacementType::Identifier.replace_for_bytes_36(bytes);
+ assert!(matches!(result, Err(Error::ByteLengthNot36BytesError(_))));
+ }
+
+ // -----------------------------------------------------------------------
+ // replace_at_path — single segment
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_at_path_single_segment_bytes32() {
+ let bytes = [0xABu8; 32];
+ let mut map = BTreeMap::new();
+ map.insert("id".to_string(), Value::Bytes32(bytes));
+
+ map.replace_at_path("id", ReplacementType::Identifier)
+ .unwrap();
+ assert_eq!(map.get("id"), Some(&Value::Identifier(bytes)));
+ }
+
+ #[test]
+ fn replace_at_path_single_segment_bytes20() {
+ let bytes = [0x11u8; 20];
+ let mut map = BTreeMap::new();
+ map.insert("addr".to_string(), Value::Bytes20(bytes));
+
+ map.replace_at_path("addr", ReplacementType::BinaryBytes)
+ .unwrap();
+ assert_eq!(map.get("addr"), Some(&Value::Bytes20(bytes)));
+ }
+
+ #[test]
+ fn replace_at_path_single_segment_bytes36() {
+ let bytes = [0x22u8; 36];
+ let mut map = BTreeMap::new();
+ map.insert("outpoint".to_string(), Value::Bytes36(bytes));
+
+ map.replace_at_path("outpoint", ReplacementType::BinaryBytes)
+ .unwrap();
+ assert_eq!(map.get("outpoint"), Some(&Value::Bytes36(bytes)));
+ }
+
+ #[test]
+ fn replace_at_path_single_segment_identifier_to_base58() {
+ let bytes = [0xCCu8; 32];
+ let mut map = BTreeMap::new();
+ map.insert("id".to_string(), Value::Identifier(bytes));
+
+ map.replace_at_path("id", ReplacementType::TextBase58)
+ .unwrap();
+ let expected = bs58::encode(bytes).into_string();
+ assert_eq!(map.get("id"), Some(&Value::Text(expected)));
+ }
+
+ // -----------------------------------------------------------------------
+ // replace_at_path — multi-segment nested path
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_at_path_nested() {
+ let bytes = [0xFFu8; 32];
+ let inner_map = vec![(Value::Text("nested_id".into()), Value::Bytes32(bytes))];
+ let mut map = BTreeMap::new();
+ map.insert("parent".to_string(), Value::Map(inner_map));
+
+ map.replace_at_path("parent.nested_id", ReplacementType::Identifier)
+ .unwrap();
+
+ let parent = map.get("parent").unwrap();
+ if let Value::Map(inner) = parent {
+ let val = inner.get_optional_key("nested_id").unwrap();
+ assert_eq!(*val, Value::Identifier(bytes));
+ } else {
+ panic!("expected Map");
+ }
+ }
+
+ #[test]
+ fn replace_at_path_deep_nested() {
+ let bytes = [0xAAu8; 32];
+ let level2 = vec![(Value::Text("deep_id".into()), Value::Bytes32(bytes))];
+ let level1 = vec![(Value::Text("level2".into()), Value::Map(level2))];
+ let mut map = BTreeMap::new();
+ map.insert("level1".to_string(), Value::Map(level1));
+
+ map.replace_at_path("level1.level2.deep_id", ReplacementType::Identifier)
+ .unwrap();
+
+ let l1 = map.get("level1").unwrap();
+ if let Value::Map(l1_map) = l1 {
+ let l2 = l1_map.get_optional_key("level2").unwrap();
+ if let Value::Map(l2_map) = l2 {
+ let val = l2_map.get_optional_key("deep_id").unwrap();
+ assert_eq!(*val, Value::Identifier(bytes));
+ } else {
+ panic!("expected Map at level2");
+ }
+ } else {
+ panic!("expected Map at level1");
+ }
+ }
+
+ // -----------------------------------------------------------------------
+ // replace_at_path — array traversal
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_at_path_through_array_applies_to_elements() {
+ // When replace_down encounters an array at a non-terminal path component,
+ // it expands the array elements into the next recursion level. The path
+ // component consumed at the array level is effectively discarded (since
+ // arrays don't have named keys). The NEXT component is then applied to
+ // each array element.
+ //
+ // Structure:
+ // top-level BTreeMap: "wrapper" -> Map { "arr" -> Array [ Map{"id": Bytes32}, ... ] }
+ // Path: "wrapper.arr.placeholder.id"
+ // - "wrapper" handled by replace_at_path (first component)
+ // - replace_down gets ["arr", "placeholder", "id"]
+ // - "arr" consumed: looks up in wrapper map, finds Array, returns it
+ // - "placeholder" consumed: current is Array, expands to array items (Maps)
+ // - "id" consumed: terminal component, looks up in each item Map, performs replacement
+ let bytes1 = [0x11u8; 32];
+ let bytes2 = [0x22u8; 32];
+ let item1 = Value::Map(vec![(Value::Text("id".into()), Value::Bytes32(bytes1))]);
+ let item2 = Value::Map(vec![(Value::Text("id".into()), Value::Bytes32(bytes2))]);
+ let wrapper_map = vec![(Value::Text("arr".into()), Value::Array(vec![item1, item2]))];
+ let mut map = BTreeMap::new();
+ map.insert("wrapper".to_string(), Value::Map(wrapper_map));
+
+ // "placeholder" is consumed by the array level and discarded
+ map.replace_at_path("wrapper.arr.placeholder.id", ReplacementType::Identifier)
+ .unwrap();
+
+ if let Value::Map(wrapper) = map.get("wrapper").unwrap() {
+ let arr_val = wrapper.get_optional_key("arr").unwrap();
+ if let Value::Array(arr) = arr_val {
+ assert_eq!(arr.len(), 2);
+ for (i, item) in arr.iter().enumerate() {
+ if let Value::Map(m) = item {
+ let val = m.get_optional_key("id").unwrap();
+ let expected_bytes = if i == 0 { bytes1 } else { bytes2 };
+ assert_eq!(*val, Value::Identifier(expected_bytes));
+ } else {
+ panic!("expected Map in array");
+ }
+ }
+ } else {
+ panic!("expected Array");
+ }
+ } else {
+ panic!("expected Map at wrapper");
+ }
+ }
+
+ // -----------------------------------------------------------------------
+ // Error paths
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_at_path_empty_path_error() {
+ let mut map = BTreeMap::new();
+ map.insert("key".to_string(), Value::U64(1));
+ let result = map.replace_at_path("", ReplacementType::Identifier);
+ // Empty string splits to [""] which is a single component, not truly empty
+ // The path "" will try to look up key "" in the map, which doesn't exist
+ // So it returns Ok(()) because missing key is not an error
+ assert!(result.is_ok());
+ }
+
+ #[test]
+ fn replace_at_path_missing_key_returns_ok() {
+ let mut map = BTreeMap::new();
+ map.insert("key".to_string(), Value::U64(1));
+ // Nonexistent key -> returns Ok(())
+ let result = map.replace_at_path("nonexistent", ReplacementType::BinaryBytes);
+ assert!(result.is_ok());
+ }
+
+ #[test]
+ fn replace_at_path_non_map_value_in_nested_path_error() {
+ let mut map = BTreeMap::new();
+ map.insert("key".to_string(), Value::U64(42));
+ // Trying to traverse into a non-map/non-array value
+ let result = map.replace_at_path("key.sub", ReplacementType::BinaryBytes);
+ assert!(matches!(result, Err(Error::PathError(_))));
+ }
+
+ // -----------------------------------------------------------------------
+ // replace_at_paths — multiple paths
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_at_paths_multiple() {
+ let bytes1 = [0xAAu8; 32];
+ let bytes2 = [0xBBu8; 32];
+ let mut map = BTreeMap::new();
+ map.insert("id1".to_string(), Value::Bytes32(bytes1));
+ map.insert("id2".to_string(), Value::Bytes32(bytes2));
+
+ let paths = vec!["id1".to_string(), "id2".to_string()];
+ map.replace_at_paths(&paths, ReplacementType::Identifier)
+ .unwrap();
+
+ assert_eq!(map.get("id1"), Some(&Value::Identifier(bytes1)));
+ assert_eq!(map.get("id2"), Some(&Value::Identifier(bytes2)));
+ }
+
+ // -----------------------------------------------------------------------
+ // replace_consume_value and replace_value_in_place
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn replace_consume_value_identifier_to_base58() {
+ let bytes = [0xCCu8; 32];
+ let val = Value::Identifier(bytes);
+ let result = ReplacementType::TextBase58
+ .replace_consume_value(val)
+ .unwrap();
+ let expected = bs58::encode(bytes).into_string();
+ assert_eq!(result, Value::Text(expected));
+ }
+
+ #[test]
+ fn replace_value_in_place_identifier_to_binary() {
+ let bytes = [0xDDu8; 32];
+ let mut val = Value::Identifier(bytes);
+ ReplacementType::BinaryBytes
+ .replace_value_in_place(&mut val)
+ .unwrap();
+ assert_eq!(val, Value::Bytes(bytes.to_vec()));
+ }
+}
diff --git a/packages/rs-platform-value/src/converter/ciborium.rs b/packages/rs-platform-value/src/converter/ciborium.rs
index 8321330dbbc..65c5f5b3766 100644
--- a/packages/rs-platform-value/src/converter/ciborium.rs
+++ b/packages/rs-platform-value/src/converter/ciborium.rs
@@ -143,3 +143,415 @@ impl TryInto> for Box {
(*self).try_into().map(Box::new)
}
}
+
+#[cfg(test)]
+mod tests {
+ use crate::{Error, Value};
+ use ciborium::value::Integer;
+ use ciborium::Value as CborValue;
+
+ // -----------------------------------------------------------------------
+ // Round-trip: Value -> CborValue -> Value for basic types
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn round_trip_null() {
+ let original = Value::Null;
+ let cbor: CborValue = original.clone().try_into().unwrap();
+ assert_eq!(cbor, CborValue::Null);
+ let back: Value = cbor.try_into().unwrap();
+ assert_eq!(back, Value::Null);
+ }
+
+ #[test]
+ fn round_trip_bool_true() {
+ let original = Value::Bool(true);
+ let cbor: CborValue = original.clone().try_into().unwrap();
+ assert_eq!(cbor, CborValue::Bool(true));
+ let back: Value = cbor.try_into().unwrap();
+ // Comes back as I128(1) since CBOR integers are unified
+ assert_eq!(back, Value::Bool(true));
+ }
+
+ #[test]
+ fn round_trip_bool_false() {
+ let original = Value::Bool(false);
+ let cbor: CborValue = original.clone().try_into().unwrap();
+ let back: Value = cbor.try_into().unwrap();
+ assert_eq!(back, Value::Bool(false));
+ }
+
+ #[test]
+ fn round_trip_text() {
+ let original = Value::Text("hello world".into());
+ let cbor: CborValue = original.clone().try_into().unwrap();
+ assert_eq!(cbor, CborValue::Text("hello world".into()));
+ let back: Value = cbor.try_into().unwrap();
+ assert_eq!(back, original);
+ }
+
+ #[test]
+ fn round_trip_float() {
+ let original = Value::Float(3.14);
+ let cbor: CborValue = original.clone().try_into().unwrap();
+ assert_eq!(cbor, CborValue::Float(3.14));
+ let back: Value = cbor.try_into().unwrap();
+ assert_eq!(back, original);
+ }
+
+ #[test]
+ fn round_trip_bytes() {
+ let original = Value::Bytes(vec![0xDE, 0xAD, 0xBE, 0xEF]);
+ let cbor: CborValue = original.clone().try_into().unwrap();
+ assert_eq!(cbor, CborValue::Bytes(vec![0xDE, 0xAD, 0xBE, 0xEF]));
+ let back: Value = cbor.try_into().unwrap();
+ assert_eq!(back, original);
+ }
+
+ #[test]
+ fn round_trip_u64() {
+ let original = Value::U64(42);
+ let cbor: CborValue = original.clone().try_into().unwrap();
+ // Comes back as Integer
+ let back: Value = cbor.try_into().unwrap();
+ // CBOR integers come back as I128
+ assert_eq!(back, Value::I128(42));
+ }
+
+ #[test]
+ fn round_trip_i64_negative() {
+ let original = Value::I64(-99);
+ let cbor: CborValue = original.clone().try_into().unwrap();
+ let back: Value = cbor.try_into().unwrap();
+ assert_eq!(back, Value::I128(-99));
+ }
+
+ // -----------------------------------------------------------------------
+ // Tag rejection in TryFrom
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn cbor_tag_rejected() {
+ let tagged = CborValue::Tag(42, Box::new(CborValue::Null));
+ let result: Result = tagged.try_into();
+ assert!(matches!(result, Err(Error::Unsupported(_))));
+ }
+
+ #[test]
+ fn cbor_tag_rejection_message() {
+ let tagged = CborValue::Tag(0, Box::new(CborValue::Text("date".into())));
+ let err = Value::try_from(tagged).unwrap_err();
+ match err {
+ Error::Unsupported(msg) => {
+ assert!(msg.contains("tag"), "error message should mention tags");
+ }
+ _ => panic!("expected Unsupported error"),
+ }
+ }
+
+ // -----------------------------------------------------------------------
+ // Byte-array heuristic boundary (10 vs 11 integer elements)
+ // Note: the CBOR heuristic uses > 10 (strictly greater), unlike JSON's >= 10
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn cbor_array_10_integers_stays_array() {
+ // Exactly 10 elements -> stays as Array (boundary: > 10 needed for bytes)
+ let arr: Vec = (0..10)
+ .map(|i| CborValue::Integer(Integer::from(i as u8)))
+ .collect();
+ let cbor = CborValue::Array(arr);
+ let val: Value = cbor.try_into().unwrap();
+ assert!(
+ matches!(val, Value::Array(_)),
+ "10 elements should stay as Array in CBOR heuristic"
+ );
+ }
+
+ #[test]
+ fn cbor_array_11_integers_becomes_bytes() {
+ // 11 elements, all in u8 range -> becomes Bytes
+ let arr: Vec = (0..11)
+ .map(|i| CborValue::Integer(Integer::from(i as u8)))
+ .collect();
+ let cbor = CborValue::Array(arr);
+ let val: Value = cbor.try_into().unwrap();
+ assert!(
+ matches!(val, Value::Bytes(_)),
+ "11 elements of u8-range integers should become Bytes"
+ );
+ if let Value::Bytes(bytes) = val {
+ assert_eq!(bytes.len(), 11);
+ assert_eq!(bytes[0], 0);
+ assert_eq!(bytes[10], 10);
+ }
+ }
+
+ #[test]
+ fn cbor_array_mixed_types_stays_array() {
+ // 12 elements but mixed types -> stays as Array
+ let mut arr: Vec = (0..11)
+ .map(|i| CborValue::Integer(Integer::from(i as u8)))
+ .collect();
+ arr.push(CborValue::Text("not an int".into()));
+ let cbor = CborValue::Array(arr);
+ let val: Value = cbor.try_into().unwrap();
+ assert!(matches!(val, Value::Array(_)));
+ }
+
+ #[test]
+ fn cbor_array_negative_values_stays_array() {
+ // Negative values are not in 0..=u8::MAX range
+ let arr: Vec = (0..12)
+ .map(|i| CborValue::Integer(Integer::from(-(i as i64))))
+ .collect();
+ let cbor = CborValue::Array(arr);
+ let val: Value = cbor.try_into().unwrap();
+ // First element is 0 which is fine, but most are negative -> fails the ge(0) check
+ assert!(matches!(val, Value::Array(_)));
+ }
+
+ // -----------------------------------------------------------------------
+ // Map key sorting in TryInto
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn map_keys_sorted_in_cbor_output() {
+ // Keys inserted in reverse order should be sorted in output
+ let map = vec![
+ (Value::Text("z".into()), Value::U64(1)),
+ (Value::Text("a".into()), Value::U64(2)),
+ (Value::Text("m".into()), Value::U64(3)),
+ ];
+ let val = Value::Map(map);
+ let cbor: CborValue = val.try_into().unwrap();
+ if let CborValue::Map(pairs) = cbor {
+ let keys: Vec = pairs
+ .iter()
+ .map(|(k, _)| {
+ if let CborValue::Text(s) = k {
+ s.clone()
+ } else {
+ panic!("expected text key")
+ }
+ })
+ .collect();
+ assert_eq!(keys, vec!["a", "m", "z"]);
+ } else {
+ panic!("expected CborValue::Map");
+ }
+ }
+
+ // -----------------------------------------------------------------------
+ // EnumU8 / EnumString error paths
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn enum_u8_to_cbor_error() {
+ let val = Value::EnumU8(vec![1, 2, 3]);
+ let result: Result = val.try_into();
+ assert!(matches!(result, Err(Error::Unsupported(_))));
+ }
+
+ #[test]
+ fn enum_string_to_cbor_error() {
+ let val = Value::EnumString(vec!["a".into(), "b".into()]);
+ let result: Result = val.try_into();
+ assert!(matches!(result, Err(Error::Unsupported(_))));
+ }
+
+ // -----------------------------------------------------------------------
+ // U128 / I128 narrowing in TryInto
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn u128_narrowed_to_u64_in_cbor() {
+ // U128 is cast to u64 when converting to CborValue::Integer
+ let val = Value::U128(42);
+ let cbor: CborValue = val.try_into().unwrap();
+ assert_eq!(cbor, CborValue::Integer(42u64.into()));
+ }
+
+ #[test]
+ fn i128_narrowed_to_i64_in_cbor() {
+ // I128 is cast to i64 when converting to CborValue::Integer
+ let val = Value::I128(-99);
+ let cbor: CborValue = val.try_into().unwrap();
+ assert_eq!(cbor, CborValue::Integer((-99i64).into()));
+ }
+
+ // -----------------------------------------------------------------------
+ // Integer variant from CBOR -> Value
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn cbor_positive_integer_to_i128() {
+ let cbor = CborValue::Integer(Integer::from(255u8));
+ let val: Value = cbor.try_into().unwrap();
+ assert_eq!(val, Value::I128(255));
+ }
+
+ #[test]
+ fn cbor_negative_integer_to_i128() {
+ let cbor = CborValue::Integer(Integer::from(-1i64));
+ let val: Value = cbor.try_into().unwrap();
+ assert_eq!(val, Value::I128(-1));
+ }
+
+ #[test]
+ fn cbor_zero_integer_to_i128() {
+ let cbor = CborValue::Integer(Integer::from(0));
+ let val: Value = cbor.try_into().unwrap();
+ assert_eq!(val, Value::I128(0));
+ }
+
+ // -----------------------------------------------------------------------
+ // Bytes20 / Bytes32 / Bytes36 / Identifier -> CborValue::Bytes
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn bytes20_to_cbor_bytes() {
+ let bytes = [0xAAu8; 20];
+ let val = Value::Bytes20(bytes);
+ let cbor: CborValue = val.try_into().unwrap();
+ assert_eq!(cbor, CborValue::Bytes(bytes.to_vec()));
+ }
+
+ #[test]
+ fn bytes32_to_cbor_bytes() {
+ let bytes = [0xBBu8; 32];
+ let val = Value::Bytes32(bytes);
+ let cbor: CborValue = val.try_into().unwrap();
+ assert_eq!(cbor, CborValue::Bytes(bytes.to_vec()));
+ }
+
+ #[test]
+ fn bytes36_to_cbor_bytes() {
+ let bytes = [0xCCu8; 36];
+ let val = Value::Bytes36(bytes);
+ let cbor: CborValue = val.try_into().unwrap();
+ assert_eq!(cbor, CborValue::Bytes(bytes.to_vec()));
+ }
+
+ #[test]
+ fn identifier_to_cbor_bytes() {
+ let bytes = [0x01u8; 32];
+ let val = Value::Identifier(bytes);
+ let cbor: CborValue = val.try_into().unwrap();
+ assert_eq!(cbor, CborValue::Bytes(bytes.to_vec()));
+ }
+
+ // -----------------------------------------------------------------------
+ // Integer types round through CBOR
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn all_integer_types_to_cbor() {
+ let cases: Vec = vec![
+ Value::U8(255),
+ Value::I8(-128),
+ Value::U16(65535),
+ Value::I16(-32768),
+ Value::U32(u32::MAX),
+ Value::I32(i32::MIN),
+ Value::U64(u64::MAX),
+ Value::I64(i64::MIN),
+ ];
+ for val in cases {
+ let cbor: CborValue = val.clone().try_into().unwrap();
+ assert!(
+ matches!(cbor, CborValue::Integer(_)),
+ "expected Integer for {:?}",
+ val
+ );
+ }
+ }
+
+ // -----------------------------------------------------------------------
+ // Box TryInto>
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn boxed_value_to_boxed_cbor() {
+ let val = Box::new(Value::Text("boxed".into()));
+ let cbor: Box = val.try_into().unwrap();
+ assert_eq!(*cbor, CborValue::Text("boxed".into()));
+ }
+
+ // -----------------------------------------------------------------------
+ // CBOR Map conversion
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn cbor_map_to_value_map() {
+ let cbor = CborValue::Map(vec![
+ (
+ CborValue::Text("key".into()),
+ CborValue::Integer(42u64.into()),
+ ),
+ (CborValue::Text("flag".into()), CborValue::Bool(true)),
+ ]);
+ let val: Value = cbor.try_into().unwrap();
+ assert!(val.is_map());
+ }
+
+ // -----------------------------------------------------------------------
+ // convert_from_cbor_map / convert_to_cbor_map
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn convert_from_cbor_map_basic() {
+ let pairs = vec![
+ ("a".to_string(), CborValue::Bool(true)),
+ ("b".to_string(), CborValue::Text("hello".into())),
+ ];
+ let result: std::collections::BTreeMap =
+ Value::convert_from_cbor_map(pairs).unwrap();
+ assert_eq!(result.get("a"), Some(&Value::Bool(true)));
+ assert_eq!(result.get("b"), Some(&Value::Text("hello".into())));
+ }
+
+ #[test]
+ fn convert_to_cbor_map_basic() {
+ let pairs = vec![
+ ("x".to_string(), Value::U64(10)),
+ ("y".to_string(), Value::Bool(false)),
+ ];
+ let result: std::collections::BTreeMap =
+ Value::convert_to_cbor_map(pairs).unwrap();
+ assert_eq!(result.get("x"), Some(&CborValue::Integer(10u64.into())));
+ assert_eq!(result.get("y"), Some(&CborValue::Bool(false)));
+ }
+
+ // -----------------------------------------------------------------------
+ // to_cbor_buffer
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn to_cbor_buffer_roundtrip() {
+ let val = Value::Text("cbor buffer test".into());
+ let buf = val.to_cbor_buffer().unwrap();
+ assert!(!buf.is_empty());
+ }
+
+ // -----------------------------------------------------------------------
+ // CBOR array with nested values
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn cbor_array_with_nested_map() {
+ let cbor = CborValue::Array(vec![
+ CborValue::Text("item".into()),
+ CborValue::Map(vec![(
+ CborValue::Text("inner".into()),
+ CborValue::Bool(true),
+ )]),
+ ]);
+ let val: Value = cbor.try_into().unwrap();
+ assert!(matches!(val, Value::Array(_)));
+ if let Value::Array(arr) = &val {
+ assert_eq!(arr.len(), 2);
+ assert!(arr[1].is_map());
+ }
+ }
+}
diff --git a/packages/rs-platform-value/src/converter/serde_json.rs b/packages/rs-platform-value/src/converter/serde_json.rs
index d33561913f6..9fd88fad254 100644
--- a/packages/rs-platform-value/src/converter/serde_json.rs
+++ b/packages/rs-platform-value/src/converter/serde_json.rs
@@ -423,8 +423,12 @@ impl From<&BTreeMap> for Value {
#[cfg(test)]
mod tests {
- use crate::Value;
- use serde_json::json;
+ use crate::converter::serde_json::BTreeValueJsonConverter;
+ use crate::{Error, Value};
+ use base64::prelude::BASE64_STANDARD;
+ use base64::Engine;
+ use serde_json::{json, Value as JsonValue};
+ use std::collections::BTreeMap;
#[test]
fn test_json_array() {
@@ -462,4 +466,658 @@ mod tests {
.unwrap();
assert_eq!(array.len(), 1);
}
+
+ // -----------------------------------------------------------------------
+ // try_into_validating_json — all Value variants
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn validating_json_null() {
+ let result = Value::Null.try_into_validating_json().unwrap();
+ assert_eq!(result, JsonValue::Null);
+ }
+
+ #[test]
+ fn validating_json_bool() {
+ assert_eq!(
+ Value::Bool(true).try_into_validating_json().unwrap(),
+ JsonValue::Bool(true)
+ );
+ assert_eq!(
+ Value::Bool(false).try_into_validating_json().unwrap(),
+ JsonValue::Bool(false)
+ );
+ }
+
+ #[test]
+ fn validating_json_u8() {
+ let result = Value::U8(42).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(42));
+ }
+
+ #[test]
+ fn validating_json_i8() {
+ let result = Value::I8(-5).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(-5));
+ }
+
+ #[test]
+ fn validating_json_u16() {
+ let result = Value::U16(1000).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(1000));
+ }
+
+ #[test]
+ fn validating_json_i16() {
+ let result = Value::I16(-1000).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(-1000));
+ }
+
+ #[test]
+ fn validating_json_u32() {
+ let result = Value::U32(100_000).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(100_000));
+ }
+
+ #[test]
+ fn validating_json_i32() {
+ let result = Value::I32(-100_000).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(-100_000));
+ }
+
+ #[test]
+ fn validating_json_u64() {
+ let result = Value::U64(u64::MAX).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(u64::MAX));
+ }
+
+ #[test]
+ fn validating_json_i64() {
+ let result = Value::I64(i64::MIN).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(i64::MIN));
+ }
+
+ #[test]
+ fn validating_json_float() {
+ let result = Value::Float(3.14).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(3.14));
+ }
+
+ #[test]
+ fn validating_json_text() {
+ let result = Value::Text("hello".into())
+ .try_into_validating_json()
+ .unwrap();
+ assert_eq!(result, json!("hello"));
+ }
+
+ #[test]
+ fn validating_json_u128_fits_u64() {
+ let val = u64::MAX as u128;
+ let result = Value::U128(val).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(u64::MAX));
+ }
+
+ #[test]
+ fn validating_json_u128_too_large() {
+ let val = u64::MAX as u128 + 1;
+ let err = Value::U128(val).try_into_validating_json().unwrap_err();
+ assert_eq!(err, Error::IntegerSizeError);
+ }
+
+ #[test]
+ fn validating_json_i128_fits_i64_positive() {
+ let val = i64::MAX as i128;
+ let result = Value::I128(val).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(i64::MAX));
+ }
+
+ #[test]
+ fn validating_json_i128_fits_i64_negative() {
+ let val = i64::MIN as i128;
+ let result = Value::I128(val).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(i64::MIN));
+ }
+
+ #[test]
+ fn validating_json_i128_too_large_positive() {
+ let val = i64::MAX as i128 + 1;
+ let err = Value::I128(val).try_into_validating_json().unwrap_err();
+ assert_eq!(err, Error::IntegerSizeError);
+ }
+
+ #[test]
+ fn validating_json_i128_too_small_negative() {
+ let val = i64::MIN as i128 - 1;
+ let err = Value::I128(val).try_into_validating_json().unwrap_err();
+ assert_eq!(err, Error::IntegerSizeError);
+ }
+
+ #[test]
+ fn validating_json_bytes() {
+ let result = Value::Bytes(vec![1, 2, 3])
+ .try_into_validating_json()
+ .unwrap();
+ assert_eq!(result, json!([1, 2, 3]));
+ }
+
+ #[test]
+ fn validating_json_bytes20() {
+ let bytes = [7u8; 20];
+ let result = Value::Bytes20(bytes).try_into_validating_json().unwrap();
+ let arr: Vec = bytes.iter().map(|b| json!(*b)).collect();
+ assert_eq!(result, JsonValue::Array(arr));
+ }
+
+ #[test]
+ fn validating_json_bytes32() {
+ let bytes = [9u8; 32];
+ let result = Value::Bytes32(bytes).try_into_validating_json().unwrap();
+ let arr: Vec = bytes.iter().map(|b| json!(*b)).collect();
+ assert_eq!(result, JsonValue::Array(arr));
+ }
+
+ #[test]
+ fn validating_json_bytes36() {
+ let bytes = [11u8; 36];
+ let result = Value::Bytes36(bytes).try_into_validating_json().unwrap();
+ let arr: Vec = bytes.iter().map(|b| json!(*b)).collect();
+ assert_eq!(result, JsonValue::Array(arr));
+ }
+
+ #[test]
+ fn validating_json_identifier() {
+ let bytes = [0xABu8; 32];
+ let result = Value::Identifier(bytes).try_into_validating_json().unwrap();
+ let arr: Vec = bytes.iter().map(|b| json!(*b)).collect();
+ assert_eq!(result, JsonValue::Array(arr));
+ }
+
+ #[test]
+ fn validating_json_array_nested() {
+ let val = Value::Array(vec![Value::U64(1), Value::Text("two".into())]);
+ let result = val.try_into_validating_json().unwrap();
+ assert_eq!(result, json!([1, "two"]));
+ }
+
+ #[test]
+ fn validating_json_map() {
+ let map = vec![
+ (Value::Text("a".into()), Value::U64(1)),
+ (Value::Text("b".into()), Value::Bool(true)),
+ ];
+ let val = Value::Map(map);
+ let result = val.try_into_validating_json().unwrap();
+ assert_eq!(result, json!({"a": 1, "b": true}));
+ }
+
+ #[test]
+ fn validating_json_enum_u8_unsupported() {
+ let err = Value::EnumU8(vec![1, 2])
+ .try_into_validating_json()
+ .unwrap_err();
+ assert!(matches!(err, Error::Unsupported(_)));
+ }
+
+ #[test]
+ fn validating_json_enum_string_unsupported() {
+ let err = Value::EnumString(vec!["a".into()])
+ .try_into_validating_json()
+ .unwrap_err();
+ assert!(matches!(err, Error::Unsupported(_)));
+ }
+
+ // -----------------------------------------------------------------------
+ // From for Value — all JSON variants
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn from_json_null() {
+ let val: Value = JsonValue::Null.into();
+ assert_eq!(val, Value::Null);
+ }
+
+ #[test]
+ fn from_json_bool_true() {
+ let val: Value = json!(true).into();
+ assert_eq!(val, Value::Bool(true));
+ }
+
+ #[test]
+ fn from_json_bool_false() {
+ let val: Value = json!(false).into();
+ assert_eq!(val, Value::Bool(false));
+ }
+
+ #[test]
+ fn from_json_positive_integer() {
+ let val: Value = json!(42).into();
+ assert_eq!(val, Value::U64(42));
+ }
+
+ #[test]
+ fn from_json_negative_integer() {
+ let val: Value = json!(-7).into();
+ assert_eq!(val, Value::I64(-7));
+ }
+
+ #[test]
+ fn from_json_float() {
+ let val: Value = json!(2.5).into();
+ assert_eq!(val, Value::Float(2.5));
+ }
+
+ #[test]
+ fn from_json_string() {
+ let val: Value = json!("hello").into();
+ assert_eq!(val, Value::Text("hello".into()));
+ }
+
+ #[test]
+ fn from_json_object() {
+ let val: Value = json!({"key": "value"}).into();
+ assert!(val.is_map());
+ }
+
+ // --- byte-array heuristic tests ---
+
+ #[test]
+ fn from_json_array_10_u8_range_becomes_bytes() {
+ // Exactly 10 elements, all in u8 range -> Bytes
+ let arr: Vec = (0u64..10).map(|i| json!(i)).collect();
+ let val: Value = JsonValue::Array(arr).into();
+ assert_eq!(val, Value::Bytes(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]));
+ }
+
+ #[test]
+ fn from_json_array_9_u8_range_stays_array() {
+ // Only 9 elements -> stays as Array even though all are u8-range
+ let arr: Vec = (0u64..9).map(|i| json!(i)).collect();
+ let val: Value = JsonValue::Array(arr).into();
+ assert!(matches!(val, Value::Array(_)));
+ }
+
+ #[test]
+ fn from_json_array_mixed_types_stays_array() {
+ // 10+ elements but mixed types -> stays as Array
+ let mut arr: Vec = (0u64..10).map(|i| json!(i)).collect();
+ arr.push(json!("not_a_number"));
+ let val: Value = JsonValue::Array(arr).into();
+ assert!(matches!(val, Value::Array(_)));
+ }
+
+ #[test]
+ fn from_json_array_large_values_stays_array() {
+ // 10+ elements but values exceed u8 range -> stays as Array
+ let arr: Vec = (0u64..12).map(|i| json!(i * 100)).collect();
+ let val: Value = JsonValue::Array(arr).into();
+ // Some values like 1100 exceed u8::MAX (255), so not all u8-range
+ assert!(matches!(val, Value::Array(_)));
+ }
+
+ #[test]
+ fn from_json_array_all_255_becomes_bytes() {
+ // 10 elements all at u8::MAX
+ let arr: Vec = vec![json!(255); 10];
+ let val: Value = JsonValue::Array(arr).into();
+ assert_eq!(val, Value::Bytes(vec![255; 10]));
+ }
+
+ #[test]
+ fn from_json_array_with_negative_stays_array() {
+ // Negative numbers are not in u8 range
+ let mut arr: Vec = (0u64..9).map(|i| json!(i)).collect();
+ arr.push(json!(-1));
+ let val: Value = JsonValue::Array(arr).into();
+ assert!(matches!(val, Value::Array(_)));
+ }
+
+ // -----------------------------------------------------------------------
+ // From<&JsonValue> for Value — reference variant
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn from_json_ref_null() {
+ let jv = JsonValue::Null;
+ let val: Value = (&jv).into();
+ assert_eq!(val, Value::Null);
+ }
+
+ #[test]
+ fn from_json_ref_array_becomes_bytes() {
+ let arr: Vec = (0u64..15).map(|i| json!(i)).collect();
+ let jv = JsonValue::Array(arr);
+ let val: Value = (&jv).into();
+ assert!(matches!(val, Value::Bytes(_)));
+ }
+
+ #[test]
+ fn from_json_ref_array_short_stays_array() {
+ let arr: Vec = (0u64..5).map(|i| json!(i)).collect();
+ let jv = JsonValue::Array(arr);
+ let val: Value = (&jv).into();
+ assert!(matches!(val, Value::Array(_)));
+ }
+
+ // -----------------------------------------------------------------------
+ // TryInto for Value — bytes become base64, identifiers become bs58
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn try_into_json_bytes_become_base64() {
+ let bytes = vec![0xDE, 0xAD, 0xBE, 0xEF];
+ let expected = BASE64_STANDARD.encode(&bytes);
+ let result: JsonValue = Value::Bytes(bytes).try_into().unwrap();
+ assert_eq!(result, JsonValue::String(expected));
+ }
+
+ #[test]
+ fn try_into_json_bytes20_become_base64() {
+ let bytes = [0xAAu8; 20];
+ let expected = BASE64_STANDARD.encode(bytes);
+ let result: JsonValue = Value::Bytes20(bytes).try_into().unwrap();
+ assert_eq!(result, JsonValue::String(expected));
+ }
+
+ #[test]
+ fn try_into_json_bytes32_become_base64() {
+ let bytes = [0xBBu8; 32];
+ let expected = BASE64_STANDARD.encode(bytes);
+ let result: JsonValue = Value::Bytes32(bytes).try_into().unwrap();
+ assert_eq!(result, JsonValue::String(expected));
+ }
+
+ #[test]
+ fn try_into_json_bytes36_become_base64() {
+ let bytes = [0xCCu8; 36];
+ let expected = BASE64_STANDARD.encode(bytes);
+ let result: JsonValue = Value::Bytes36(bytes).try_into().unwrap();
+ assert_eq!(result, JsonValue::String(expected));
+ }
+
+ #[test]
+ fn try_into_json_identifier_becomes_bs58() {
+ let bytes = [0x01u8; 32];
+ let expected = bs58::encode(&bytes).into_string();
+ let result: JsonValue = Value::Identifier(bytes).try_into().unwrap();
+ assert_eq!(result, JsonValue::String(expected));
+ }
+
+ #[test]
+ fn try_into_json_u128_becomes_string() {
+ let result: JsonValue = Value::U128(u128::MAX).try_into().unwrap();
+ assert_eq!(result, JsonValue::String(u128::MAX.to_string()));
+ }
+
+ #[test]
+ fn try_into_json_i128_becomes_string() {
+ let result: JsonValue = Value::I128(i128::MIN).try_into().unwrap();
+ assert_eq!(result, JsonValue::String(i128::MIN.to_string()));
+ }
+
+ #[test]
+ fn try_into_json_null() {
+ let result: JsonValue = Value::Null.try_into().unwrap();
+ assert_eq!(result, JsonValue::Null);
+ }
+
+ #[test]
+ fn try_into_json_bool() {
+ let result: JsonValue = Value::Bool(true).try_into().unwrap();
+ assert_eq!(result, JsonValue::Bool(true));
+ }
+
+ #[test]
+ fn try_into_json_text() {
+ let result: JsonValue = Value::Text("abc".into()).try_into().unwrap();
+ assert_eq!(result, json!("abc"));
+ }
+
+ #[test]
+ fn try_into_json_integer_types() {
+ let r: JsonValue = Value::U8(1).try_into().unwrap();
+ assert_eq!(r, json!(1));
+ let r: JsonValue = Value::I8(-1).try_into().unwrap();
+ assert_eq!(r, json!(-1));
+ let r: JsonValue = Value::U16(500).try_into().unwrap();
+ assert_eq!(r, json!(500));
+ let r: JsonValue = Value::I16(-500).try_into().unwrap();
+ assert_eq!(r, json!(-500));
+ let r: JsonValue = Value::U32(70000).try_into().unwrap();
+ assert_eq!(r, json!(70000));
+ let r: JsonValue = Value::I32(-70000).try_into().unwrap();
+ assert_eq!(r, json!(-70000));
+ let r: JsonValue = Value::U64(123456789).try_into().unwrap();
+ assert_eq!(r, json!(123456789));
+ let r: JsonValue = Value::I64(-123456789).try_into().unwrap();
+ assert_eq!(r, json!(-123456789));
+ }
+
+ #[test]
+ fn try_into_json_array() {
+ let val = Value::Array(vec![Value::U64(1), Value::Bool(false)]);
+ let result: JsonValue = val.try_into().unwrap();
+ assert_eq!(result, json!([1, false]));
+ }
+
+ #[test]
+ fn try_into_json_map() {
+ let map = vec![(Value::Text("x".into()), Value::U64(99))];
+ let val = Value::Map(map);
+ let result: JsonValue = val.try_into().unwrap();
+ assert_eq!(result, json!({"x": 99}));
+ }
+
+ #[test]
+ fn try_into_json_enum_u8_error() {
+ let result: Result = Value::EnumU8(vec![1]).try_into();
+ assert!(matches!(result, Err(Error::Unsupported(_))));
+ }
+
+ #[test]
+ fn try_into_json_enum_string_error() {
+ let result: Result = Value::EnumString(vec!["a".into()]).try_into();
+ assert!(matches!(result, Err(Error::Unsupported(_))));
+ }
+
+ // -----------------------------------------------------------------------
+ // Round-trip: Value -> JsonValue -> Value for basic types
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn round_trip_null() {
+ let original = Value::Null;
+ let json: JsonValue = original.clone().try_into_validating_json().unwrap();
+ let back: Value = json.into();
+ assert_eq!(back, original);
+ }
+
+ #[test]
+ fn round_trip_bool() {
+ let original = Value::Bool(true);
+ let json: JsonValue = original.clone().try_into_validating_json().unwrap();
+ let back: Value = json.into();
+ assert_eq!(back, Value::Bool(true));
+ }
+
+ #[test]
+ fn round_trip_u64() {
+ let original = Value::U64(42);
+ let json: JsonValue = original.clone().try_into_validating_json().unwrap();
+ let back: Value = json.into();
+ // JSON numbers parse back as U64
+ assert_eq!(back, Value::U64(42));
+ }
+
+ #[test]
+ fn round_trip_i64() {
+ let original = Value::I64(-42);
+ let json: JsonValue = original.clone().try_into_validating_json().unwrap();
+ let back: Value = json.into();
+ assert_eq!(back, Value::I64(-42));
+ }
+
+ #[test]
+ fn round_trip_text() {
+ let original = Value::Text("hello world".into());
+ let json: JsonValue = original.clone().try_into_validating_json().unwrap();
+ let back: Value = json.into();
+ assert_eq!(back, original);
+ }
+
+ // -----------------------------------------------------------------------
+ // BTreeValueJsonConverter methods
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn btree_into_json_value() {
+ let mut map = BTreeMap::new();
+ map.insert("x".to_string(), Value::U64(10));
+ map.insert("y".to_string(), Value::Text("test".into()));
+ let json = map.into_json_value().unwrap();
+ assert!(json.is_object());
+ assert_eq!(json["x"], json!(10));
+ assert_eq!(json["y"], json!("test"));
+ }
+
+ #[test]
+ fn btree_into_validating_json_value() {
+ let mut map = BTreeMap::new();
+ map.insert("n".to_string(), Value::U64(5));
+ let json = map.into_validating_json_value().unwrap();
+ assert_eq!(json["n"], json!(5));
+ }
+
+ #[test]
+ fn btree_to_json_value() {
+ let mut map = BTreeMap::new();
+ map.insert("k".to_string(), Value::Bool(true));
+ let json = map.to_json_value().unwrap();
+ assert_eq!(json["k"], json!(true));
+ // Original map is still available (borrow, not move)
+ assert!(map.contains_key("k"));
+ }
+
+ #[test]
+ fn btree_to_validating_json_value() {
+ let mut map = BTreeMap::new();
+ map.insert("v".to_string(), Value::I64(-1));
+ let json = map.to_validating_json_value().unwrap();
+ assert_eq!(json["v"], json!(-1));
+ }
+
+ #[test]
+ fn btree_from_json_value() {
+ let json = json!({"a": 1, "b": "two"});
+ let map = BTreeMap::::from_json_value(json).unwrap();
+ assert_eq!(map.get("a"), Some(&Value::U64(1)));
+ assert_eq!(map.get("b"), Some(&Value::Text("two".into())));
+ }
+
+ #[test]
+ fn btree_from_json_value_non_object_error() {
+ let json = json!([1, 2, 3]);
+ let result = BTreeMap::::from_json_value(json);
+ assert!(result.is_err());
+ }
+
+ // -----------------------------------------------------------------------
+ // From> for Value
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn from_btree_json_map() {
+ let mut btree = BTreeMap::new();
+ btree.insert("key".to_string(), json!(42));
+ let val: Value = btree.into();
+ assert!(val.is_map());
+ }
+
+ #[test]
+ fn from_btree_json_map_ref() {
+ let mut btree = BTreeMap::new();
+ btree.insert("key".to_string(), json!(42));
+ let val: Value = (&btree).into();
+ assert!(val.is_map());
+ }
+
+ // -----------------------------------------------------------------------
+ // try_to_validating_json (borrow variant) mirrors try_into_validating_json
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn try_to_validating_json_basic() {
+ let val = Value::U64(99);
+ let json = val.try_to_validating_json().unwrap();
+ assert_eq!(json, json!(99));
+ }
+
+ #[test]
+ fn try_to_validating_json_u128_too_large() {
+ let val = Value::U128(u128::MAX);
+ let err = val.try_to_validating_json().unwrap_err();
+ assert_eq!(err, Error::IntegerSizeError);
+ }
+
+ #[test]
+ fn try_to_validating_json_i128_too_large() {
+ let val = Value::I128(i128::MAX);
+ let err = val.try_to_validating_json().unwrap_err();
+ assert_eq!(err, Error::IntegerSizeError);
+ }
+
+ #[test]
+ fn try_to_validating_json_i128_too_small() {
+ let val = Value::I128(i128::MIN);
+ let err = val.try_to_validating_json().unwrap_err();
+ assert_eq!(err, Error::IntegerSizeError);
+ }
+
+ #[test]
+ fn try_to_validating_json_enum_u8_error() {
+ let val = Value::EnumU8(vec![1]);
+ let err = val.try_to_validating_json().unwrap_err();
+ assert!(matches!(err, Error::Unsupported(_)));
+ }
+
+ #[test]
+ fn try_to_validating_json_enum_string_error() {
+ let val = Value::EnumString(vec!["a".into()]);
+ let err = val.try_to_validating_json().unwrap_err();
+ assert!(matches!(err, Error::Unsupported(_)));
+ }
+
+ // -----------------------------------------------------------------------
+ // try_into_validating_btree_map_json
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn try_into_validating_btree_map_json_success() {
+ let map = vec![(Value::Text("k".into()), Value::U64(7))];
+ let val = Value::Map(map);
+ let result = val.try_into_validating_btree_map_json().unwrap();
+ assert_eq!(result.get("k"), Some(&json!(7)));
+ }
+
+ // -----------------------------------------------------------------------
+ // convert_from_serde_json_map
+ // -----------------------------------------------------------------------
+
+ #[test]
+ fn convert_from_serde_json_map_basic() {
+ let pairs = vec![
+ ("a".to_string(), json!(1)),
+ ("b".to_string(), json!("hello")),
+ ];
+ let result: BTreeMap = Value::convert_from_serde_json_map(pairs);
+ assert_eq!(result.get("a"), Some(&Value::U64(1)));
+ assert_eq!(result.get("b"), Some(&Value::Text("hello".into())));
+ }
+
+ #[test]
+ fn validating_json_float_nan_becomes_zero() {
+ // NaN cannot be represented in JSON Number, falls back to 0
+ let result = Value::Float(f64::NAN).try_into_validating_json().unwrap();
+ assert_eq!(result, json!(0));
+ }
}
diff --git a/packages/rs-platform-value/src/patch/mod.rs b/packages/rs-platform-value/src/patch/mod.rs
index c73eb1b834f..f3dd7c7c5cf 100644
--- a/packages/rs-platform-value/src/patch/mod.rs
+++ b/packages/rs-platform-value/src/patch/mod.rs
@@ -469,3 +469,569 @@ pub fn merge(doc: &mut Value, patch: &Value) {
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{from_value, platform_value};
+
+ // ---------------------------------------------------------------
+ // add operation
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn add_to_map_key() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/b", "value": 2 }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc.pointer("/b"), Some(&platform_value!(2)));
+ }
+
+ #[test]
+ fn add_to_array_push_with_dash() {
+ let mut doc = platform_value!({"arr": [1, 2]});
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/arr/-", "value": 3 }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc, platform_value!({"arr": [1, 2, 3]}));
+ }
+
+ #[test]
+ fn add_to_array_insert_at_index() {
+ let mut doc = platform_value!({"arr": [1, 3]});
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/arr/1", "value": 2 }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc, platform_value!({"arr": [1, 2, 3]}));
+ }
+
+ #[test]
+ fn add_empty_path_replaces_whole_document() {
+ let mut doc = platform_value!({"old": "value"});
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "", "value": "replaced" }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc, platform_value!("replaced"));
+ }
+
+ #[test]
+ fn add_to_nested_map() {
+ let mut doc = platform_value!({"a": {"b": 1}});
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/a/c", "value": 2 }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc.pointer("/a/c"), Some(&platform_value!(2)));
+ }
+
+ #[test]
+ fn add_at_array_beginning() {
+ let mut doc = platform_value!([2, 3]);
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/0", "value": 1 }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc, platform_value!([1, 2, 3]));
+ }
+
+ // ---------------------------------------------------------------
+ // remove operation
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn remove_from_map() {
+ let mut doc = platform_value!({"a": 1, "b": 2});
+ let p: Patch = from_value(platform_value!([
+ { "op": "remove", "path": "/a" }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc.pointer("/a"), None);
+ assert_eq!(doc.pointer("/b"), Some(&platform_value!(2)));
+ }
+
+ #[test]
+ fn remove_from_array_by_index() {
+ let mut doc = platform_value!({"arr": [1, 2, 3]});
+ let p: Patch = from_value(platform_value!([
+ { "op": "remove", "path": "/arr/1" }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc, platform_value!({"arr": [1, 3]}));
+ }
+
+ #[test]
+ fn remove_missing_key_errors() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "remove", "path": "/nonexistent" }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert!(matches!(err.kind, PatchErrorKind::InvalidPointer));
+ }
+
+ #[test]
+ fn remove_invalid_array_index_errors() {
+ let mut doc = platform_value!({"arr": [1]});
+ let p: Patch = from_value(platform_value!([
+ { "op": "remove", "path": "/arr/5" }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert!(matches!(err.kind, PatchErrorKind::InvalidPointer));
+ }
+
+ // ---------------------------------------------------------------
+ // replace operation
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn replace_existing_key() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "replace", "path": "/a", "value": 99 }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc, platform_value!({"a": 99}));
+ }
+
+ #[test]
+ fn replace_missing_key_errors() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "replace", "path": "/b", "value": 2 }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert!(matches!(err.kind, PatchErrorKind::InvalidPointer));
+ }
+
+ #[test]
+ fn replace_root_document() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "replace", "path": "", "value": [1, 2, 3] }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc, platform_value!([1, 2, 3]));
+ }
+
+ // ---------------------------------------------------------------
+ // move operation
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn move_between_map_keys() {
+ let mut doc = platform_value!({"a": 1, "b": 2});
+ let p: Patch = from_value(platform_value!([
+ { "op": "move", "from": "/a", "path": "/c" }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc.pointer("/a"), None);
+ assert_eq!(doc.pointer("/c"), Some(&platform_value!(1)));
+ assert_eq!(doc.pointer("/b"), Some(&platform_value!(2)));
+ }
+
+ #[test]
+ fn move_inside_self_errors() {
+ let mut doc = platform_value!({"a": {"b": 1}});
+ let p: Patch = from_value(platform_value!([
+ { "op": "move", "from": "/a", "path": "/a/b/c" }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert!(matches!(err.kind, PatchErrorKind::CannotMoveInsideItself));
+ }
+
+ #[test]
+ fn move_from_invalid_path_errors() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "move", "from": "/nonexistent", "path": "/b" }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert!(matches!(err.kind, PatchErrorKind::InvalidFromPointer));
+ }
+
+ // ---------------------------------------------------------------
+ // copy operation
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn copy_between_map_keys() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "copy", "from": "/a", "path": "/b" }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc.pointer("/a"), Some(&platform_value!(1)));
+ assert_eq!(doc.pointer("/b"), Some(&platform_value!(1)));
+ }
+
+ #[test]
+ fn copy_from_invalid_path_errors() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "copy", "from": "/missing", "path": "/b" }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert!(matches!(err.kind, PatchErrorKind::InvalidFromPointer));
+ }
+
+ #[test]
+ fn copy_nested_value() {
+ let mut doc = platform_value!({"a": {"x": 10}});
+ let p: Patch = from_value(platform_value!([
+ { "op": "copy", "from": "/a", "path": "/b" }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc.pointer("/b/x"), Some(&platform_value!(10)));
+ }
+
+ // ---------------------------------------------------------------
+ // test operation
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn test_matching_value_succeeds() {
+ let mut doc = platform_value!({"a": "hello"});
+ let p: Patch = from_value(platform_value!([
+ { "op": "test", "path": "/a", "value": "hello" }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ }
+
+ #[test]
+ fn test_mismatched_value_fails() {
+ let mut doc = platform_value!({"a": "hello"});
+ let p: Patch = from_value(platform_value!([
+ { "op": "test", "path": "/a", "value": "world" }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert!(matches!(err.kind, PatchErrorKind::TestFailed));
+ }
+
+ #[test]
+ fn test_missing_path_errors() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "test", "path": "/nope", "value": 1 }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert!(matches!(err.kind, PatchErrorKind::InvalidPointer));
+ }
+
+ // ---------------------------------------------------------------
+ // apply_patches: multi-operation and rollback
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn apply_patches_multi_operation() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/b", "value": 2 },
+ { "op": "replace", "path": "/a", "value": 10 },
+ { "op": "remove", "path": "/b" }
+ ]))
+ .unwrap();
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc, platform_value!({"a": 10}));
+ }
+
+ #[test]
+ fn apply_patches_rollback_add_new_map_key_on_failure() {
+ // Known limitation: map rollback for add-new-key does not fully
+ // restore the original because remove() on a ValueMap uses
+ // position-based lookup that may not find the appended entry.
+ // This test documents the current (broken) behavior.
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/b", "value": 2 },
+ { "op": "test", "path": "/a", "value": 999 }
+ ]))
+ .unwrap();
+ // Patch fails (test op doesn't match), rollback is attempted
+ assert!(patch(&mut doc, &p).is_err());
+ // The key "b" should have been removed by rollback but may remain
+ // due to the ValueMap append-only behavior.
+ }
+
+ #[test]
+ fn apply_patches_rollback_add_array_on_failure() {
+ // Array rollback works correctly.
+ let mut doc = platform_value!([1, 2, 3]);
+ let original = doc.clone();
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/1", "value": 99 },
+ { "op": "test", "path": "/0", "value": 999 }
+ ]))
+ .unwrap();
+ assert!(patch(&mut doc, &p).is_err());
+ assert_eq!(doc, original);
+ }
+
+ #[test]
+ fn apply_patches_rollback_replace_on_failure() {
+ let mut doc = platform_value!({"a": 1, "b": 2});
+ let original = doc.clone();
+ let p: Patch = from_value(platform_value!([
+ { "op": "replace", "path": "/a", "value": 100 },
+ { "op": "test", "path": "/b", "value": 999 }
+ ]))
+ .unwrap();
+ assert!(patch(&mut doc, &p).is_err());
+ assert_eq!(doc, original);
+ }
+
+ #[test]
+ fn apply_patches_rollback_remove_array_on_failure() {
+ let mut doc = platform_value!([1, 2, 3]);
+ let original = doc.clone();
+ let p: Patch = from_value(platform_value!([
+ { "op": "remove", "path": "/1" },
+ { "op": "test", "path": "/0", "value": 999 }
+ ]))
+ .unwrap();
+ assert!(patch(&mut doc, &p).is_err());
+ assert_eq!(doc, original);
+ }
+
+ #[test]
+ fn apply_patches_rollback_copy_array_on_failure() {
+ let mut doc = platform_value!({"items": [10, 20]});
+ let original = doc.clone();
+ let p: Patch = from_value(platform_value!([
+ { "op": "copy", "from": "/items/0", "path": "/items/-" },
+ { "op": "test", "path": "/items/0", "value": 999 }
+ ]))
+ .unwrap();
+ assert!(patch(&mut doc, &p).is_err());
+ assert_eq!(doc, original);
+ }
+
+ #[test]
+ fn apply_patches_empty_patch_list() {
+ let mut doc = platform_value!({"a": 1});
+ let p = Patch(vec![]);
+ patch(&mut doc, &p).unwrap();
+ assert_eq!(doc, platform_value!({"a": 1}));
+ }
+
+ // ---------------------------------------------------------------
+ // merge
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn merge_recursive_map() {
+ let mut doc = platform_value!({
+ "a": { "b": 1, "c": 2 }
+ });
+ let p = platform_value!({
+ "a": { "b": 10, "d": 3 }
+ });
+ merge(&mut doc, &p);
+ assert_eq!(doc.pointer("/a/b"), Some(&platform_value!(10)));
+ assert_eq!(doc.pointer("/a/c"), Some(&platform_value!(2)));
+ assert_eq!(doc.pointer("/a/d"), Some(&platform_value!(3)));
+ }
+
+ #[test]
+ fn merge_null_removes_key() {
+ let mut doc = platform_value!({"a": 1, "b": 2});
+ let p = platform_value!({"a": null});
+ merge(&mut doc, &p);
+ assert_eq!(doc.pointer("/a"), None);
+ assert_eq!(doc.pointer("/b"), Some(&platform_value!(2)));
+ }
+
+ #[test]
+ fn merge_non_map_patch_replaces_entire_document() {
+ let mut doc = platform_value!({"a": 1});
+ let p = platform_value!("replaced");
+ merge(&mut doc, &p);
+ assert_eq!(doc, platform_value!("replaced"));
+ }
+
+ #[test]
+ fn merge_into_non_map_doc_creates_map() {
+ let mut doc = platform_value!("not a map");
+ let p = platform_value!({"x": 1});
+ merge(&mut doc, &p);
+ assert_eq!(doc.pointer("/x"), Some(&platform_value!(1)));
+ }
+
+ #[test]
+ fn merge_adds_new_keys() {
+ let mut doc = platform_value!({"a": 1});
+ let p = platform_value!({"b": 2});
+ merge(&mut doc, &p);
+ assert_eq!(doc.pointer("/a"), Some(&platform_value!(1)));
+ assert_eq!(doc.pointer("/b"), Some(&platform_value!(2)));
+ }
+
+ #[test]
+ fn merge_replaces_array_entirely() {
+ let mut doc = platform_value!({"tags": [1, 2, 3]});
+ let p = platform_value!({"tags": [4]});
+ merge(&mut doc, &p);
+ assert_eq!(doc.pointer("/tags"), Some(&platform_value!([4])));
+ }
+
+ // ---------------------------------------------------------------
+ // parse_index
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn parse_index_valid() {
+ assert_eq!(parse_index("0", 5).unwrap(), 0);
+ assert_eq!(parse_index("3", 5).unwrap(), 3);
+ assert_eq!(parse_index("4", 5).unwrap(), 4);
+ }
+
+ #[test]
+ fn parse_index_leading_zero_errors() {
+ assert!(matches!(
+ parse_index("01", 5),
+ Err(PatchErrorKind::InvalidPointer)
+ ));
+ }
+
+ #[test]
+ fn parse_index_leading_plus_errors() {
+ assert!(matches!(
+ parse_index("+1", 5),
+ Err(PatchErrorKind::InvalidPointer)
+ ));
+ }
+
+ #[test]
+ fn parse_index_out_of_bounds_errors() {
+ assert!(matches!(
+ parse_index("5", 5),
+ Err(PatchErrorKind::InvalidPointer)
+ ));
+ }
+
+ #[test]
+ fn parse_index_non_numeric_errors() {
+ assert!(matches!(
+ parse_index("abc", 5),
+ Err(PatchErrorKind::InvalidPointer)
+ ));
+ }
+
+ #[test]
+ fn parse_index_single_zero_valid() {
+ assert_eq!(parse_index("0", 1).unwrap(), 0);
+ }
+
+ // ---------------------------------------------------------------
+ // unescape
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn unescape_tilde_zero_becomes_tilde() {
+ assert_eq!(unescape("a~0b"), "a~b");
+ }
+
+ #[test]
+ fn unescape_tilde_one_becomes_slash() {
+ assert_eq!(unescape("a~1b"), "a/b");
+ }
+
+ #[test]
+ fn unescape_both_sequences() {
+ assert_eq!(unescape("~0~1"), "~/");
+ }
+
+ #[test]
+ fn unescape_no_tilde_borrows() {
+ let result = unescape("plain");
+ assert!(matches!(result, Cow::Borrowed(_)));
+ assert_eq!(result, "plain");
+ }
+
+ #[test]
+ fn unescape_with_tilde_returns_owned() {
+ let result = unescape("a~0b");
+ assert!(matches!(result, Cow::Owned(_)));
+ }
+
+ // ---------------------------------------------------------------
+ // patch error reporting
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn patch_error_reports_correct_operation_index() {
+ let mut doc = platform_value!({"a": 1});
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/b", "value": 2 },
+ { "op": "remove", "path": "/nonexistent" }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert_eq!(err.operation, 1);
+ assert_eq!(err.path, "/nonexistent");
+ }
+
+ // ---------------------------------------------------------------
+ // split_pointer
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn split_pointer_valid() {
+ let (parent, last) = split_pointer("/a/b").unwrap();
+ assert_eq!(parent, "/a");
+ assert_eq!(last, "b");
+ }
+
+ #[test]
+ fn split_pointer_root_child() {
+ let (parent, last) = split_pointer("/x").unwrap();
+ assert_eq!(parent, "");
+ assert_eq!(last, "x");
+ }
+
+ #[test]
+ fn split_pointer_no_slash_errors() {
+ assert!(split_pointer("noslash").is_err());
+ }
+
+ // ---------------------------------------------------------------
+ // add: error on invalid parent
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn add_to_scalar_parent_errors() {
+ let mut doc = platform_value!({"a": 42});
+ let p: Patch = from_value(platform_value!([
+ { "op": "add", "path": "/a/b", "value": 1 }
+ ]))
+ .unwrap();
+ let err = patch(&mut doc, &p).unwrap_err();
+ assert!(matches!(err.kind, PatchErrorKind::InvalidPointer));
+ }
+}
diff --git a/packages/rs-platform-value/src/value_map.rs b/packages/rs-platform-value/src/value_map.rs
index a067b87efba..4e09fa9cbd8 100644
--- a/packages/rs-platform-value/src/value_map.rs
+++ b/packages/rs-platform-value/src/value_map.rs
@@ -223,6 +223,411 @@ impl ValueMapHelper for ValueMap {
}
}
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn text(s: &str) -> Value {
+ Value::Text(s.to_string())
+ }
+
+ fn make_map(pairs: &[(&str, Value)]) -> ValueMap {
+ pairs.iter().map(|(k, v)| (text(k), v.clone())).collect()
+ }
+
+ // ---------------------------------------------------------------
+ // sort_by_keys
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn sort_by_keys_mixed_text_keys() {
+ let mut map = make_map(&[
+ ("c", Value::U32(3)),
+ ("a", Value::U32(1)),
+ ("b", Value::U32(2)),
+ ]);
+ map.sort_by_keys();
+ let keys: Vec<_> = map.iter().map(|(k, _)| k.clone()).collect();
+ assert_eq!(keys, vec![text("a"), text("b"), text("c")]);
+ }
+
+ #[test]
+ fn sort_by_keys_mixed_types() {
+ // Integer keys should sort before text keys via PartialOrd on Value
+ let mut map: ValueMap = vec![
+ (text("z"), Value::U32(1)),
+ (Value::U32(5), Value::U32(2)),
+ (text("a"), Value::U32(3)),
+ ];
+ map.sort_by_keys();
+ // U32(5) < Text("a") < Text("z") by Value's PartialOrd (enum variant order)
+ assert_eq!(map[0].0, Value::U32(5));
+ assert_eq!(map[1].0, text("a"));
+ assert_eq!(map[2].0, text("z"));
+ }
+
+ // ---------------------------------------------------------------
+ // sort_by_lexicographical_byte_ordering_keys
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn sort_by_lexicographical_byte_ordering_shorter_first() {
+ // "ab" (len 2) should come before "abc" (len 3)
+ let mut map = make_map(&[
+ ("abc", Value::U32(1)),
+ ("ab", Value::U32(2)),
+ ("a", Value::U32(3)),
+ ]);
+ map.sort_by_lexicographical_byte_ordering_keys();
+ let keys: Vec<_> = map.iter().map(|(k, _)| k.to_text().unwrap()).collect();
+ assert_eq!(keys, vec!["a", "ab", "abc"]);
+ }
+
+ #[test]
+ fn sort_by_lexicographical_byte_ordering_same_length_alphabetical() {
+ let mut map = make_map(&[
+ ("cb", Value::U32(1)),
+ ("ab", Value::U32(2)),
+ ("bb", Value::U32(3)),
+ ]);
+ map.sort_by_lexicographical_byte_ordering_keys();
+ let keys: Vec<_> = map.iter().map(|(k, _)| k.to_text().unwrap()).collect();
+ assert_eq!(keys, vec!["ab", "bb", "cb"]);
+ }
+
+ #[test]
+ fn sort_by_lexicographical_byte_ordering_non_text_keys_uses_partial_cmp() {
+ let mut map: ValueMap = vec![(Value::U32(10), Value::Null), (Value::U32(2), Value::Null)];
+ map.sort_by_lexicographical_byte_ordering_keys();
+ assert_eq!(map[0].0, Value::U32(2));
+ assert_eq!(map[1].0, Value::U32(10));
+ }
+
+ // ---------------------------------------------------------------
+ // get_key_mut_or_insert
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn get_key_mut_or_insert_inserts_new() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ let val = map.get_key_mut_or_insert("b", Value::U32(99));
+ assert_eq!(*val, Value::U32(99));
+ // Mutate the returned reference
+ *val = Value::U32(100);
+ assert_eq!(map.get_optional_key("b"), Some(&Value::U32(100)));
+ }
+
+ #[test]
+ fn get_key_mut_or_insert_returns_existing() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ let val = map.get_key_mut_or_insert("a", Value::U32(99));
+ // Should return existing value, not the default
+ assert_eq!(*val, Value::U32(1));
+ }
+
+ #[test]
+ fn get_key_mut_or_insert_existing_is_mutable() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ let val = map.get_key_mut_or_insert("a", Value::U32(99));
+ *val = Value::U32(42);
+ assert_eq!(map.get_optional_key("a"), Some(&Value::U32(42)));
+ }
+
+ // ---------------------------------------------------------------
+ // remove_optional_key_if_null
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn remove_optional_key_if_null_removes_null() {
+ let mut map = make_map(&[("a", Value::Null), ("b", Value::U32(2))]);
+ map.remove_optional_key_if_null("a");
+ assert_eq!(map.get_optional_key("a"), None);
+ assert_eq!(map.get_optional_key("b"), Some(&Value::U32(2)));
+ }
+
+ #[test]
+ fn remove_optional_key_if_null_keeps_non_null() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ map.remove_optional_key_if_null("a");
+ assert_eq!(map.get_optional_key("a"), Some(&Value::U32(1)));
+ }
+
+ #[test]
+ fn remove_optional_key_if_null_missing_key_is_noop() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ map.remove_optional_key_if_null("missing");
+ assert_eq!(map.len(), 1);
+ }
+
+ // ---------------------------------------------------------------
+ // remove_optional_key_if_empty_array
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn remove_optional_key_if_empty_array_removes_empty() {
+ let mut map = make_map(&[("a", Value::Array(vec![])), ("b", Value::U32(1))]);
+ map.remove_optional_key_if_empty_array("a");
+ assert_eq!(map.get_optional_key("a"), None);
+ assert_eq!(map.get_optional_key("b"), Some(&Value::U32(1)));
+ }
+
+ #[test]
+ fn remove_optional_key_if_empty_array_keeps_non_empty() {
+ let mut map = make_map(&[("a", Value::Array(vec![Value::U32(1)]))]);
+ map.remove_optional_key_if_empty_array("a");
+ assert!(map.get_optional_key("a").is_some());
+ }
+
+ #[test]
+ fn remove_optional_key_if_empty_array_keeps_non_array() {
+ let mut map = make_map(&[("a", Value::U32(42))]);
+ map.remove_optional_key_if_empty_array("a");
+ assert_eq!(map.get_optional_key("a"), Some(&Value::U32(42)));
+ }
+
+ #[test]
+ fn remove_optional_key_if_empty_array_missing_key_is_noop() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ map.remove_optional_key_if_empty_array("missing");
+ assert_eq!(map.len(), 1);
+ }
+
+ // ---------------------------------------------------------------
+ // into_btree_string_map
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn into_btree_string_map_valid_conversion() {
+ let val = Value::Map(make_map(&[("b", Value::U32(2)), ("a", Value::U32(1))]));
+ let btree = val.into_btree_string_map().unwrap();
+ assert_eq!(btree.get("a"), Some(&Value::U32(1)));
+ assert_eq!(btree.get("b"), Some(&Value::U32(2)));
+ // BTreeMap should be sorted by key
+ let keys: Vec<_> = btree.keys().collect();
+ assert_eq!(keys, vec!["a", "b"]);
+ }
+
+ #[test]
+ fn into_btree_string_map_error_on_non_string_keys() {
+ let val = Value::Map(vec![(Value::U32(1), Value::U32(2))]);
+ let result = val.into_btree_string_map();
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn into_btree_string_map_error_on_non_map() {
+ let val = Value::Bool(true);
+ let result = val.into_btree_string_map();
+ assert!(result.is_err());
+ }
+
+ // ---------------------------------------------------------------
+ // map_ref_into_indexed_string_map
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn map_ref_into_indexed_string_map_sorts_by_integer_key() {
+ let map: ValueMap = vec![
+ (
+ text("second"),
+ Value::Map(make_map(&[("pos", Value::U32(2))])),
+ ),
+ (
+ text("first"),
+ Value::Map(make_map(&[("pos", Value::U32(1))])),
+ ),
+ (
+ text("third"),
+ Value::Map(make_map(&[("pos", Value::U32(3))])),
+ ),
+ ];
+ let indexed = Value::map_ref_into_indexed_string_map::(&map, "pos").unwrap();
+ let keys: Vec<_> = indexed.keys().collect();
+ assert_eq!(keys, vec!["first", "second", "third"]);
+ }
+
+ #[test]
+ fn map_ref_into_indexed_string_map_error_missing_sort_key() {
+ let map: ValueMap = vec![(
+ text("item"),
+ Value::Map(make_map(&[("other", Value::U32(1))])),
+ )];
+ let result = Value::map_ref_into_indexed_string_map::(&map, "pos");
+ assert!(result.is_err());
+ }
+
+ // ---------------------------------------------------------------
+ // get_key / get_optional_key
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn get_key_found() {
+ let map = make_map(&[("x", Value::U32(42))]);
+ let val = map.get_key("x").unwrap();
+ assert_eq!(*val, Value::U32(42));
+ }
+
+ #[test]
+ fn get_key_not_found_errors() {
+ let map = make_map(&[("x", Value::U32(42))]);
+ assert!(map.get_key("y").is_err());
+ }
+
+ #[test]
+ fn get_optional_key_none_for_missing() {
+ let map = make_map(&[("x", Value::U32(42))]);
+ assert_eq!(map.get_optional_key("y"), None);
+ }
+
+ #[test]
+ fn get_optional_key_ignores_non_text_keys() {
+ let map: ValueMap = vec![(Value::U32(1), Value::U32(2))];
+ assert_eq!(map.get_optional_key("1"), None);
+ }
+
+ // ---------------------------------------------------------------
+ // remove_key / remove_optional_key
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn remove_key_success() {
+ let mut map = make_map(&[("a", Value::U32(1)), ("b", Value::U32(2))]);
+ let removed = map.remove_key("a").unwrap();
+ assert_eq!(removed, Value::U32(1));
+ assert_eq!(map.len(), 1);
+ }
+
+ #[test]
+ fn remove_key_not_found_errors() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ assert!(map.remove_key("missing").is_err());
+ }
+
+ #[test]
+ fn remove_optional_key_returns_none_for_missing() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ assert_eq!(map.remove_optional_key("missing"), None);
+ }
+
+ #[test]
+ fn remove_optional_key_returns_value() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ assert_eq!(map.remove_optional_key("a"), Some(Value::U32(1)));
+ assert!(map.is_empty());
+ }
+
+ // ---------------------------------------------------------------
+ // remove_optional_key_value
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn remove_optional_key_value_by_value_key() {
+ let mut map: ValueMap = vec![
+ (Value::U32(10), Value::Bool(true)),
+ (text("x"), Value::Bool(false)),
+ ];
+ let removed = map.remove_optional_key_value(&Value::U32(10));
+ assert_eq!(removed, Some(Value::Bool(true)));
+ assert_eq!(map.len(), 1);
+ }
+
+ #[test]
+ fn remove_optional_key_value_not_found() {
+ let mut map = make_map(&[("a", Value::U32(1))]);
+ assert_eq!(map.remove_optional_key_value(&Value::U32(99)), None);
+ }
+
+ // ---------------------------------------------------------------
+ // insert_string_key_value
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn insert_string_key_value_appends() {
+ let mut map: ValueMap = vec![];
+ map.insert_string_key_value("hello".to_string(), Value::Bool(true));
+ assert_eq!(map.len(), 1);
+ assert_eq!(map.get_optional_key("hello"), Some(&Value::Bool(true)));
+ }
+
+ // ---------------------------------------------------------------
+ // from_btree_map
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn from_btree_map_preserves_entries() {
+ let mut btree = BTreeMap::new();
+ btree.insert("b".to_string(), Value::U32(2));
+ btree.insert("a".to_string(), Value::U32(1));
+ let map = ValueMap::from_btree_map(btree);
+ assert_eq!(map.len(), 2);
+ assert_eq!(map.get_optional_key("a"), Some(&Value::U32(1)));
+ assert_eq!(map.get_optional_key("b"), Some(&Value::U32(2)));
+ }
+
+ // ---------------------------------------------------------------
+ // get_key_by_value_mut_or_insert
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn get_key_by_value_mut_or_insert_inserts_new() {
+ let mut map: ValueMap = vec![];
+ let val = map.get_key_by_value_mut_or_insert(&Value::U32(42), Value::Bool(true));
+ assert_eq!(*val, Value::Bool(true));
+ assert_eq!(map.len(), 1);
+ }
+
+ #[test]
+ fn get_key_by_value_mut_or_insert_returns_existing() {
+ let mut map: ValueMap = vec![(Value::U32(42), Value::Bool(false))];
+ let val = map.get_key_by_value_mut_or_insert(&Value::U32(42), Value::Bool(true));
+ assert_eq!(*val, Value::Bool(false));
+ }
+
+ // ---------------------------------------------------------------
+ // sort_by_keys_and_inner_maps
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn sort_by_keys_and_inner_maps_sorts_recursively() {
+ let inner = make_map(&[("z", Value::U32(1)), ("a", Value::U32(2))]);
+ let mut map = make_map(&[("b", Value::Map(inner)), ("a", Value::U32(3))]);
+ map.sort_by_keys_and_inner_maps();
+ // Outer keys should be sorted
+ assert_eq!(map[0].0, text("a"));
+ assert_eq!(map[1].0, text("b"));
+ // Inner map should also be sorted
+ if let Value::Map(ref inner) = map[1].1 {
+ assert_eq!(inner[0].0, text("a"));
+ assert_eq!(inner[1].0, text("z"));
+ } else {
+ panic!("expected inner map");
+ }
+ }
+
+ // ---------------------------------------------------------------
+ // to_btree_ref_string_map
+ // ---------------------------------------------------------------
+
+ #[test]
+ fn to_btree_ref_string_map_valid() {
+ let val = Value::Map(make_map(&[("x", Value::U32(10))]));
+ let btree = val.to_btree_ref_string_map().unwrap();
+ assert_eq!(btree.get("x"), Some(&&Value::U32(10)));
+ }
+
+ #[test]
+ fn to_btree_ref_string_map_error_on_non_map() {
+ let val = Value::U32(1);
+ assert!(val.to_btree_ref_string_map().is_err());
+ }
+
+ #[test]
+ fn to_btree_ref_string_map_error_on_non_string_key() {
+ let val = Value::Map(vec![(Value::U32(1), Value::U32(2))]);
+ assert!(val.to_btree_ref_string_map().is_err());
+ }
+}
+
impl Value {
/// If the `Value` is a `Map`, returns a the associated `BTreeMap` data as `Ok`.
/// Returns `Err(Error::Structure("reason"))` otherwise.
From f1312583ee8d7f389a8c50e640f3cace2e85f483 Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Fri, 3 Apr 2026 22:18:03 +0300
Subject: [PATCH 11/40] fix(rs-sdk-ffi): zeroize private key arrays after use
in crypto/signer FFI (#3433)
Co-authored-by: Claude Opus 4.6 (1M context)
---
packages/rs-sdk-ffi/src/crypto/mod.rs | 7 ++++---
packages/rs-sdk-ffi/src/signer_simple.rs | 5 +++--
2 files changed, 7 insertions(+), 5 deletions(-)
diff --git a/packages/rs-sdk-ffi/src/crypto/mod.rs b/packages/rs-sdk-ffi/src/crypto/mod.rs
index 91c5579facc..4ab971d8199 100644
--- a/packages/rs-sdk-ffi/src/crypto/mod.rs
+++ b/packages/rs-sdk-ffi/src/crypto/mod.rs
@@ -4,6 +4,7 @@ use crate::{DashSDKError, DashSDKErrorCode, DashSDKResult};
use dash_sdk::dpp::dashcore::Network;
use dash_sdk::dpp::identity::KeyType;
use std::ffi::{c_char, CStr};
+use zeroize::Zeroizing;
/// Validate that a private key corresponds to a public key using DPP's public_key_data_from_private_key_data
///
@@ -65,7 +66,7 @@ pub unsafe extern "C" fn dash_sdk_validate_private_key_for_public_key(
}
};
- let mut key_array = [0u8; 32];
+ let mut key_array = Zeroizing::new([0u8; 32]);
key_array.copy_from_slice(&private_key_bytes);
// Parse key type
@@ -176,7 +177,7 @@ pub unsafe extern "C" fn dash_sdk_private_key_to_wif(
Network::Mainnet
};
- let mut key_array = [0u8; 32];
+ let mut key_array = Zeroizing::new([0u8; 32]);
key_array.copy_from_slice(&private_key_bytes);
match dash_sdk::dpp::dashcore::PrivateKey::from_byte_array(&key_array, network) {
Ok(private_key) => {
@@ -244,7 +245,7 @@ pub unsafe extern "C" fn dash_sdk_public_key_data_from_private_key_data(
}
};
- let mut key_array = [0u8; 32];
+ let mut key_array = Zeroizing::new([0u8; 32]);
key_array.copy_from_slice(&private_key_bytes);
// Parse key type
diff --git a/packages/rs-sdk-ffi/src/signer_simple.rs b/packages/rs-sdk-ffi/src/signer_simple.rs
index f7156e7a157..67bff492ff8 100644
--- a/packages/rs-sdk-ffi/src/signer_simple.rs
+++ b/packages/rs-sdk-ffi/src/signer_simple.rs
@@ -6,6 +6,7 @@ use dash_sdk::dpp::dashcore::Network;
use dash_sdk::dpp::identity::signer::Signer;
use dash_sdk::dpp::identity::{IdentityPublicKey, KeyType, Purpose, SecurityLevel};
use simple_signer::SingleKeySigner;
+use zeroize::Zeroizing;
/// Create a signer from a private key
///
@@ -32,9 +33,9 @@ pub unsafe extern "C" fn dash_sdk_signer_create_from_private_key(
));
}
- // Convert the pointer to an array
+ // Convert the pointer to an array (zeroized on drop to avoid key material lingering on stack)
let key_slice = std::slice::from_raw_parts(private_key, 32);
- let mut key_array: [u8; 32] = [0; 32];
+ let mut key_array = Zeroizing::new([0u8; 32]);
key_array.copy_from_slice(key_slice);
// network won't matter here
From 923b601da0d4f135f2bb8f9ecebf3420a2eb698a Mon Sep 17 00:00:00 2001
From: Borja Castellano
Date: Fri, 3 Apr 2026 12:44:20 -0700
Subject: [PATCH 12/40] feat(swift-sdk): send transaction (#3130)
Co-authored-by: Borja Castellano
---
.../Core/Wallet/CoreWalletManager.swift | 9 +++
.../SwiftDashSDK/KeyWallet/Transaction.swift | 16 ++---
.../SwiftDashSDK/KeyWallet/Wallet.swift | 2 +-
.../KeyWallet/WalletManager.swift | 58 +++++++++++++++++++
.../Core/ViewModels/SendViewModel.swift | 16 ++++-
.../Core/Views/CreateWalletView.swift | 2 +-
6 files changed, 88 insertions(+), 15 deletions(-)
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/Core/Wallet/CoreWalletManager.swift b/packages/swift-sdk/Sources/SwiftDashSDK/Core/Wallet/CoreWalletManager.swift
index 7638b7f439c..a51ee7dab6c 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/Core/Wallet/CoreWalletManager.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/Core/Wallet/CoreWalletManager.swift
@@ -304,6 +304,15 @@ public class CoreWalletManager: ObservableObject {
// MARK: - Account Management
+ /// Build a signed transaction
+ /// - Parameters:
+ /// - accountIndex: The account index to use
+ /// - outputs: The transaction outputs
+ /// - Returns: The signed transaction bytes
+ public func buildSignedTransaction(for wallet: HDWallet, accIndex: UInt32, outputs: [Transaction.Output]) throws -> (Data, UInt64) {
+ try sdkWalletManager.buildSignedTransaction(for: wallet, accIndex: accIndex, outputs: outputs)
+ }
+
/// Get transactions for a wallet
/// - Parameters:
/// - wallet: The wallet to get transactions for
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/Transaction.swift b/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/Transaction.swift
index be4aa9824c3..e7d8f7b3875 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/Transaction.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/Transaction.swift
@@ -1,14 +1,6 @@
import Foundation
import DashSDKFFI
-/// Result of building and signing a transaction
-public struct BuildAndSignResult: Sendable {
- /// The signed transaction bytes
- public let transactionData: Data
- /// The fee paid in duffs
- public let fee: UInt64
-}
-
/// Transaction utilities for wallet operations
public class Transaction {
@@ -23,9 +15,11 @@ public class Transaction {
}
func toFFI() -> FFITxOutput {
- return address.withCString { addressCStr in
- FFITxOutput(address: addressCStr, amount: amount)
- }
+ // TODO: This memory is not being freed, FFI must free FFITxOutput
+ // or expose a method to do it
+ let cString = strdup(address)
+
+ return FFITxOutput(address: cString, amount: amount)
}
}
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/Wallet.swift b/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/Wallet.swift
index 9be6cfd0c25..c8a9e79c78f 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/Wallet.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/Wallet.swift
@@ -406,7 +406,7 @@ public class Wallet {
return count
}
-
+
// MARK: - Key Derivation
/// Get the extended public key for an account
diff --git a/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/WalletManager.swift b/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/WalletManager.swift
index ab34303e23b..a1407da6040 100644
--- a/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/WalletManager.swift
+++ b/packages/swift-sdk/Sources/SwiftDashSDK/KeyWallet/WalletManager.swift
@@ -387,6 +387,64 @@ public class WalletManager {
return success
}
+ /// Build a signed transaction
+ /// - Parameters:
+ /// - accIndex: The account index to use
+ /// - outputs: The transaction outputs
+ /// - Returns: The signed transaction bytes and the fee
+ public func buildSignedTransaction(for wallet: HDWallet, accIndex: UInt32, outputs: [Transaction.Output]) throws -> (Data, UInt64) {
+ guard !outputs.isEmpty else {
+ throw KeyWalletError.invalidInput("Transaction must have at least one output")
+ }
+
+ var error = FFIError()
+ var txBytesPtr: UnsafeMutablePointer?
+ var txLen: size_t = 0
+
+ var fee: UInt64 = 0
+
+ guard let wallet = try self.getWallet(id: wallet.walletId) else {
+ throw KeyWalletError.walletError("Wallet not found in manager")
+ }
+
+ let ffiOutputs = outputs.map { $0.toFFI() }
+
+ let success = ffiOutputs.withUnsafeBufferPointer { outputsPtr in
+ wallet_build_and_sign_transaction(
+ self.handle,
+ wallet.ffiHandle,
+ accIndex,
+ outputsPtr.baseAddress,
+ outputs.count,
+ 1000,
+ &fee,
+ &txBytesPtr,
+ &txLen,
+ &error)
+ }
+
+ defer {
+ if error.message != nil {
+ error_message_free(error.message)
+ }
+ for _ in ffiOutputs {
+ // TODO: Memory leak, FFI doesnt expose a way to free the address
+ }
+ if let ptr = txBytesPtr {
+ transaction_bytes_free(ptr)
+ }
+ }
+
+ guard success, let ptr = txBytesPtr else {
+ throw KeyWalletError(ffiError: error)
+ }
+
+ // Copy the transaction data before freeing
+ let txData = Data(bytes: ptr, count: txLen)
+
+ return (txData, fee)
+ }
+
// MARK: - Block Height Management
/// Get the current block height for a network
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/ViewModels/SendViewModel.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/ViewModels/SendViewModel.swift
index c2f0f583f7f..cd017dc9085 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/ViewModels/SendViewModel.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/ViewModels/SendViewModel.swift
@@ -270,8 +270,20 @@ class SendViewModel: ObservableObject {
successMessage = "Transfer to Platform complete"
case .coreToCore:
- // TODO: Implement standard Core → Core transaction
- error = "Core to Core transfer not yet implemented"
+ let outputs = [
+ Transaction.Output(address: recipientAddress, amount: amount)
+ ]
+
+ // TODO: The model is using hardoced estimated fees
+ let (tx, _) = try walletService.walletManager
+ .buildSignedTransaction(
+ for: wallet,
+ accIndex: 0,
+ outputs: outputs
+ )
+
+ try walletService.broadcastTransaction(tx)
+ successMessage = "Transfer to Core complete"
}
// Refresh shielded balance
diff --git a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CreateWalletView.swift b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CreateWalletView.swift
index e1e7479ed42..f763ca8f06d 100644
--- a/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CreateWalletView.swift
+++ b/packages/swift-sdk/SwiftExampleApp/SwiftExampleApp/Core/Views/CreateWalletView.swift
@@ -285,7 +285,7 @@ struct CreateWalletView: View {
Task {
do {
print("=== STARTING WALLET CREATION ===")
-
+
let mnemonic = (showImportOption ? importMnemonic : mnemonic)
print("PIN length: \(walletPin.count)")
print("Import option enabled: \(showImportOption)")
From 4ae905482c8026bfc997a0646ffd778b3e91bd66 Mon Sep 17 00:00:00 2001
From: QuantumExplorer
Date: Fri, 3 Apr 2026 22:50:44 +0300
Subject: [PATCH 13/40] fix(wallet-lib): fix broadcast retry not matching DAPI
error message (#3434)
Co-authored-by: Claude Opus 4.6 (1M context)
---
NIGHTLY_STATUS.md | 6 ++++--
.../src/types/Account/methods/broadcastTransaction.js | 2 +-
2 files changed, 5 insertions(+), 3 deletions(-)
diff --git a/NIGHTLY_STATUS.md b/NIGHTLY_STATUS.md
index 35a16c424c4..43efc08683f 100644
--- a/NIGHTLY_STATUS.md
+++ b/NIGHTLY_STATUS.md
@@ -37,11 +37,13 @@ These jobs only run on nightly if relevant files changed in the latest commit. T
### Test Suite: `bad-txns-inputs-missingorspent` (since ~Mar 16)
-Two withdrawal-related tests fail because Core rejects a transaction whose inputs are missing or already spent. The local network starts and processes blocks normally -- the failure is specific to the withdrawal test scenario.
+Seven tests fail because Core rejects faucet wallet funding transactions whose inputs are already in the mempool. The failures are in the Data Contract and Contacts test groups -- 1 `before all` hook failure cascades into 6 dependent Contacts tests.
-- **63 tests pass**, 2 fail
+- **65 tests pass**, 7 fail (1 Data Contract funding + 6 Contacts cascade)
- Error: `InvalidRequestError: Transaction is rejected: bad-txns-inputs-missingorspent`
+- **Root cause:** The wallet-lib retry logic at `broadcastTransaction.js:181` checks for `'invalid transaction: bad-txns-inputs-missingorspent'` but DAPI returns `'Transaction is rejected: bad-txns-inputs-missingorspent'` -- the retry never matches, so UTXO conflicts are not retried.
- **Not caused by** the `ssh2`/`nan` compilation warnings (those are non-fatal)
+- **Fix:** PR #3434 updates the check to use `.includes('bad-txns-inputs-missingorspent')`
### Functional tests: long-standing flakiness
diff --git a/packages/wallet-lib/src/types/Account/methods/broadcastTransaction.js b/packages/wallet-lib/src/types/Account/methods/broadcastTransaction.js
index 08fc2ddb4d5..814b311e594 100644
--- a/packages/wallet-lib/src/types/Account/methods/broadcastTransaction.js
+++ b/packages/wallet-lib/src/types/Account/methods/broadcastTransaction.js
@@ -178,7 +178,7 @@ async function broadcastTransaction(transaction, options = {
} catch (error) {
cancelMempoolSubscription();
- if (error.message === 'invalid transaction: bad-txns-inputs-missingorspent') {
+ if (error.message && error.message.includes('bad-txns-inputs-missingorspent')) {
if (this.broadcastRetryAttempts === MAX_RETRY_ATTEMPTS) {
throw error;
}
From 40f1e8b472d911ac66126ea92d5173b1997c33ac Mon Sep 17 00:00:00 2001
From: Pasta Lil Claw
Date: Fri, 3 Apr 2026 14:55:22 -0500
Subject: [PATCH 14/40] docs(sdk): fix platform book evo-sdk tutorial code to
match 3.1.0-dev API (#3423)
Co-authored-by: PastaClaw
Co-authored-by: Claude Opus 4.6 (1M context)
---
book/src/evo-sdk/tutorials/basic-token.md | 206 +++++++------
book/src/evo-sdk/tutorials/car-sales.md | 122 ++++----
book/src/evo-sdk/tutorials/card-game.md | 282 +++++++++++-------
.../evo-sdk/tutorials/react-integration.md | 43 +--
4 files changed, 372 insertions(+), 281 deletions(-)
diff --git a/book/src/evo-sdk/tutorials/basic-token.md b/book/src/evo-sdk/tutorials/basic-token.md
index 13125dd4da9..84a30dacc29 100644
--- a/book/src/evo-sdk/tutorials/basic-token.md
+++ b/book/src/evo-sdk/tutorials/basic-token.md
@@ -31,7 +31,12 @@ A token is defined as part of a data contract. The contract schema includes a
`tokens` section alongside the usual document schemas.
```typescript
-import { EvoSDK, wallet } from '@dashevo/evo-sdk';
+import {
+ EvoSDK, DataContract, Identifier, IdentitySigner,
+ TokenConfigurationConvention, TokenConfigurationLocalization, TokenConfiguration,
+ ChangeControlRules, AuthorizedActionTakers, TokenDistributionRules,
+ TokenKeepsHistoryRules, TokenMarketplaceRules, TokenTradeMode,
+} from '@dashevo/evo-sdk';
const sdk = EvoSDK.testnetTrusted();
await sdk.connect();
@@ -46,62 +51,75 @@ const contractSchema = {
tokenMetadata: {
type: 'object',
properties: {
- tokenName: { type: 'string', maxLength: 64 },
- description: { type: 'string', maxLength: 256 },
+ tokenName: { type: 'string', maxLength: 63, position: 0 },
+ description: { type: 'string', maxLength: 256, position: 1 },
},
additionalProperties: false,
},
};
-// Token configuration is passed separately when publishing
-const tokenConfig = {
- // Position 0 = first token in this contract
- conventions: {
- localizations: {
- en: {
- shouldCapitalize: true,
- singularForm: 'CoffeeCoin',
- pluralForm: 'CoffeeCoins',
- },
- },
- decimals: 2,
- },
- // The contract owner can mint manually
- manualMinting: {
- rules: {
- // Allow the contract owner to mint
- type: 'ownerOnly',
- },
- },
- // The contract owner can burn their own tokens
- manualBurning: {
- rules: {
- type: 'ownerOnly',
- },
- },
- // Maximum supply (optional)
- maxSupply: 1_000_000_00, // 1,000,000.00 with 2 decimals
-};
+// Build the token configuration using SDK classes
+const localization = new TokenConfigurationLocalization(true, 'CoffeeCoin', 'CoffeeCoins');
+const conventions = new TokenConfigurationConvention({ en: localization }, 2);
+
+const ownerOnly = new ChangeControlRules({
+ authorizedToMakeChange: AuthorizedActionTakers.ContractOwner(),
+ adminActionTakers: AuthorizedActionTakers.ContractOwner(),
+});
+const noOne = new ChangeControlRules({
+ authorizedToMakeChange: AuthorizedActionTakers.NoOne(),
+ adminActionTakers: AuthorizedActionTakers.NoOne(),
+});
+
+const tokenConfig = new TokenConfiguration({
+ conventions,
+ conventionsChangeRules: noOne,
+ baseSupply: 0n,
+ maxSupply: 1_000_000_00n, // 1,000,000.00 with 2 decimals
+ maxSupplyChangeRules: noOne,
+ keepsHistory: new TokenKeepsHistoryRules({
+ isKeepingMintingHistory: true,
+ isKeepingBurningHistory: true,
+ isKeepingTransferHistory: true,
+ }),
+ distributionRules: new TokenDistributionRules({
+ perpetualDistributionRules: noOne,
+ newTokensDestinationIdentityRules: noOne,
+ mintingAllowChoosingDestination: true,
+ mintingAllowChoosingDestinationRules: noOne,
+ changeDirectPurchasePricingRules: noOne,
+ }),
+ marketplaceRules: new TokenMarketplaceRules(TokenTradeMode.NotTradeable(), noOne),
+ manualMintingRules: ownerOnly,
+ manualBurningRules: ownerOnly,
+ freezeRules: noOne,
+ unfreezeRules: noOne,
+ destroyFrozenFundsRules: noOne,
+ emergencyActionRules: noOne,
+ mainControlGroupCanBeModified: AuthorizedActionTakers.NoOne(),
+});
```
## Step 2: Publish the contract
```typescript
-const contract = await sdk.contracts.publish({
- identityId,
- documentSchemas: contractSchema,
- tokens: [tokenConfig],
- privateKeyWif,
- signingKeyIndex,
- nonce: await sdk.identities.nonce(identityId),
+// Set up signing
+const identity = await sdk.identities.fetch(identityId);
+const identityKey = identity.publicKeys[signingKeyIndex];
+const signer = new IdentitySigner();
+signer.addKeyFromWif(privateKeyWif);
+
+const nonce = await sdk.identities.nonce(identityId);
+const dataContract = new DataContract({
+ ownerId: new Identifier(identityId),
+ identityNonce: nonce + 1n,
+ schemas: contractSchema,
+ tokens: { 0: tokenConfig },
});
+const contract = await sdk.contracts.publish({ dataContract, identityKey, signer });
-const contractId = contract.getId().toString();
+const contractId = contract.id.toString();
console.log('Contract published:', contractId);
-
-// Calculate the token ID (derived from contract ID + position)
-const tokenId = await sdk.tokens.calculateId(contractId, 0);
-console.log('Token ID:', tokenId);
```
## Step 3: Mint tokens
@@ -109,15 +127,21 @@ console.log('Token ID:', tokenId);
The contract owner can mint tokens to any identity:
```typescript
+// Token operations require a CRITICAL security level key.
+// Fetch a key with the appropriate security level from the identity.
+const criticalKey = identity.publicKeys[signingKeyIndex];
+const criticalSigner = new IdentitySigner();
+criticalSigner.addKeyFromWif(privateKeyWif);
+
// Mint 10,000.00 CoffeeCoins to yourself
await sdk.tokens.mint({
- tokenId,
- amount: 10_000_00, // 10,000.00 (2 decimal places)
- recipientId: identityId, // mint to yourself
- identityId,
- privateKeyWif,
- signingKeyIndex,
- nonce: await sdk.identities.nonce(identityId),
+ dataContractId: new Identifier(contractId),
+ tokenPosition: 0,
+ amount: 10_000_00n, // 10,000.00 (2 decimal places) — must be bigint
+ recipientId: new Identifier(identityId),
+ identityId: new Identifier(identityId),
+ identityKey: criticalKey,
+ signer: criticalSigner,
});
console.log('Minted 10,000 CoffeeCoins');
@@ -127,13 +151,13 @@ console.log('Minted 10,000 CoffeeCoins');
```typescript
await sdk.tokens.mint({
- tokenId,
- amount: 500_00, // 500.00 CoffeeCoins
- recipientId: 'RECIPIENT_IDENTITY_ID',
- identityId,
- privateKeyWif,
- signingKeyIndex,
- nonce: await sdk.identities.nonce(identityId),
+ dataContractId: new Identifier(contractId),
+ tokenPosition: 0,
+ amount: 500_00n, // 500.00 CoffeeCoins
+ recipientId: new Identifier('RECIPIENT_IDENTITY_ID'),
+ identityId: new Identifier(identityId),
+ identityKey: criticalKey,
+ signer: criticalSigner,
});
```
@@ -141,14 +165,17 @@ await sdk.tokens.mint({
```typescript
// Check your own balance
-const myBalances = await sdk.tokens.identityBalances(identityId, [tokenId]);
-const myBalance = myBalances.get(tokenId) ?? 0n;
+const myBalances = await sdk.tokens.identityBalances(identityId, [contractId]);
+let myBalance = 0n;
+for (const [id, balance] of myBalances) {
+ if (id.toString() === contractId) myBalance = balance;
+}
console.log('My balance:', Number(myBalance) / 100, 'CoffeeCoins');
// Check multiple identities at once
const balances = await sdk.tokens.balances(
[identityId, 'OTHER_IDENTITY_ID'],
- tokenId,
+ contractId,
);
for (const [id, balance] of balances) {
@@ -159,6 +186,7 @@ for (const [id, balance] of balances) {
### Check total supply
```typescript
+const tokenId = await sdk.tokens.calculateId(contractId, 0);
const supply = await sdk.tokens.totalSupply(tokenId);
if (supply) {
console.log('Total supply:', Number(supply.totalSupply) / 100, 'CoffeeCoins');
@@ -169,13 +197,13 @@ if (supply) {
```typescript
await sdk.tokens.transfer({
- tokenId,
- amount: 25_00, // 25.00 CoffeeCoins
- recipientId: 'RECIPIENT_IDENTITY_ID',
- identityId,
- privateKeyWif,
- signingKeyIndex,
- nonce: await sdk.identities.nonce(identityId),
+ dataContractId: new Identifier(contractId),
+ tokenPosition: 0,
+ amount: 25_00n, // 25.00 CoffeeCoins
+ recipientId: new Identifier('RECIPIENT_IDENTITY_ID'),
+ senderId: new Identifier(identityId),
+ identityKey: criticalKey,
+ signer: criticalSigner,
});
console.log('Transferred 25 CoffeeCoins');
@@ -187,12 +215,12 @@ Reduce the supply by burning tokens you own:
```typescript
await sdk.tokens.burn({
- tokenId,
- amount: 100_00, // 100.00 CoffeeCoins
- identityId,
- privateKeyWif,
- signingKeyIndex,
- nonce: await sdk.identities.nonce(identityId),
+ dataContractId: new Identifier(contractId),
+ tokenPosition: 0,
+ amount: 100_00n, // 100.00 CoffeeCoins
+ identityId: new Identifier(identityId),
+ identityKey: criticalKey,
+ signer: criticalSigner,
});
console.log('Burned 100 CoffeeCoins');
@@ -203,7 +231,7 @@ console.log('Burned 100 CoffeeCoins');
Putting it all together as a complete script:
```typescript
-import { EvoSDK } from '@dashevo/evo-sdk';
+import { EvoSDK, Identifier, IdentitySigner } from '@dashevo/evo-sdk';
async function main() {
const sdk = EvoSDK.testnetTrusted();
@@ -211,21 +239,29 @@ async function main() {
const identityId = 'YOUR_IDENTITY_ID';
const privateKeyWif = 'YOUR_PRIVATE_KEY_WIF';
- const tokenId = 'YOUR_TOKEN_ID'; // from step 2
+ const contractId = 'YOUR_CONTRACT_ID'; // from step 2
+
+ // Set up signing (token ops require a CRITICAL security level key)
+ const identity = await sdk.identities.fetch(identityId);
+ const identityKey = identity.publicKeys[0];
+ const signer = new IdentitySigner();
+ signer.addKeyFromWif(privateKeyWif);
// Check balance
- const balances = await sdk.tokens.identityBalances(identityId, [tokenId]);
- console.log('Balance:', balances.get(tokenId) ?? 0n);
+ const balances = await sdk.tokens.identityBalances(identityId, [contractId]);
+ for (const [id, balance] of balances) {
+ if (id.toString() === contractId) console.log('Balance:', balance);
+ }
// Transfer
await sdk.tokens.transfer({
- tokenId,
- amount: 10_00,
- recipientId: 'FRIEND_IDENTITY_ID',
- identityId,
- privateKeyWif,
- signingKeyIndex: 0,
- nonce: await sdk.identities.nonce(identityId),
+ dataContractId: new Identifier(contractId),
+ tokenPosition: 0,
+ amount: 10_00n,
+ recipientId: new Identifier('FRIEND_IDENTITY_ID'),
+ senderId: new Identifier(identityId),
+ identityKey,
+ signer,
});
console.log('Transfer complete!');
diff --git a/book/src/evo-sdk/tutorials/car-sales.md b/book/src/evo-sdk/tutorials/car-sales.md
index bcb1a96c919..b2d71b8e641 100644
--- a/book/src/evo-sdk/tutorials/car-sales.md
+++ b/book/src/evo-sdk/tutorials/car-sales.md
@@ -36,14 +36,14 @@ const carSalesSchema = {
listing: {
type: 'object',
properties: {
- make: { type: 'string', maxLength: 64 },
- model: { type: 'string', maxLength: 64 },
- year: { type: 'integer', minimum: 1900, maximum: 2100 },
- mileageKm: { type: 'integer', minimum: 0 },
- priceUsd: { type: 'integer', minimum: 0 },
- description: { type: 'string', maxLength: 1024 },
- imageUrl: { type: 'string', maxLength: 512, format: 'uri' },
- status: { type: 'string', enum: ['available', 'pending', 'sold'] },
+ make: { type: 'string', maxLength: 63, position: 0 },
+ model: { type: 'string', maxLength: 63, position: 1 },
+ year: { type: 'integer', minimum: 1900, maximum: 2100, position: 2 },
+ mileageKm: { type: 'integer', minimum: 0, position: 3 },
+ priceUsd: { type: 'integer', minimum: 0, position: 4 },
+ description: { type: 'string', maxLength: 1024, position: 5 },
+ imageUrl: { type: 'string', maxLength: 512, format: 'uri', position: 6 },
+ status: { type: 'string', enum: ['available', 'pending', 'sold'], position: 7 },
},
required: ['make', 'model', 'year', 'priceUsd', 'status'],
additionalProperties: false,
@@ -51,10 +51,10 @@ const carSalesSchema = {
review: {
type: 'object',
properties: {
- sellerId: { type: 'string', maxLength: 44 },
- listingId: { type: 'string', maxLength: 44 },
- rating: { type: 'integer', minimum: 1, maximum: 5 },
- comment: { type: 'string', maxLength: 512 },
+ sellerId: { type: 'string', maxLength: 44, position: 0 },
+ listingId: { type: 'string', maxLength: 44, position: 1 },
+ rating: { type: 'integer', minimum: 1, maximum: 5, position: 2 },
+ comment: { type: 'string', maxLength: 512, position: 3 },
},
required: ['sellerId', 'rating'],
additionalProperties: false,
@@ -65,7 +65,7 @@ const carSalesSchema = {
## Step 2: Connect and publish the contract
```typescript
-import { EvoSDK, wallet } from '@dashevo/evo-sdk';
+import { EvoSDK, DataContract, Document, Identifier, IdentitySigner } from '@dashevo/evo-sdk';
const sdk = EvoSDK.testnetTrusted();
await sdk.connect();
@@ -75,16 +75,22 @@ const identityId = 'YOUR_IDENTITY_ID';
const privateKeyWif = 'YOUR_PRIVATE_KEY_WIF';
const signingKeyIndex = 0;
+// Set up signing
+const identity = await sdk.identities.fetch(identityId);
+const identityKey = identity.publicKeys[signingKeyIndex];
+const signer = new IdentitySigner();
+signer.addKeyFromWif(privateKeyWif);
+
// Publish the data contract
-const contract = await sdk.contracts.publish({
- identityId,
- documentSchemas: carSalesSchema,
- privateKeyWif,
- signingKeyIndex,
- nonce: await sdk.identities.nonce(identityId),
+const nonce = await sdk.identities.nonce(identityId);
+const dataContract = new DataContract({
+ ownerId: new Identifier(identityId),
+ identityNonce: nonce + 1n,
+ schemas: carSalesSchema,
});
+const contract = await sdk.contracts.publish({ dataContract, identityKey, signer });
-const contractId = contract.getId().toString();
+const contractId = contract.id.toString();
console.log('Contract published:', contractId);
```
@@ -93,12 +99,11 @@ Save the `contractId` — you will need it for all subsequent operations.
## Step 3: Create a listing
```typescript
-const nonce = await sdk.identities.contractNonce(identityId, contractId);
-
-await sdk.documents.create({
- contractId,
- documentType: 'listing',
- document: {
+const doc = new Document({
+ documentTypeName: 'listing',
+ dataContractId: new Identifier(contractId),
+ ownerId: new Identifier(identityId),
+ properties: {
make: 'Toyota',
model: 'Camry',
year: 2021,
@@ -107,11 +112,8 @@ await sdk.documents.create({
description: 'Well-maintained, single owner, full service history.',
status: 'available',
},
- identityId,
- privateKeyWif,
- signingKeyIndex,
- nonce,
});
+await sdk.documents.create({ document: doc, identityKey, signer });
console.log('Listing created!');
```
@@ -121,8 +123,8 @@ console.log('Listing created!');
```typescript
// Fetch all available listings
const results = await sdk.documents.query({
- contractId,
- documentType: 'listing',
+ dataContractId: contractId,
+ documentTypeName: 'listing',
where: [['status', '==', 'available']],
orderBy: [['priceUsd', 'asc']],
limit: 20,
@@ -130,7 +132,7 @@ const results = await sdk.documents.query({
for (const [id, doc] of results) {
if (!doc) continue;
- const data = doc.getData();
+ const data = doc.properties as Record;
console.log(`${data.year} ${data.make} ${data.model} — $${data.priceUsd}`);
console.log(` ID: ${id}`);
}
@@ -140,8 +142,8 @@ for (const [id, doc] of results) {
```typescript
const toyotas = await sdk.documents.query({
- contractId,
- documentType: 'listing',
+ dataContractId: contractId,
+ documentTypeName: 'listing',
where: [
['make', '==', 'Toyota'],
['status', '==', 'available'],
@@ -157,24 +159,11 @@ Mark a listing as sold:
```typescript
const listingId = 'THE_LISTING_DOCUMENT_ID';
-await sdk.documents.replace({
- contractId,
- documentType: 'listing',
- documentId: listingId,
- document: {
- make: 'Toyota',
- model: 'Camry',
- year: 2021,
- mileageKm: 45000,
- priceUsd: 22500,
- description: 'Well-maintained, single owner, full service history.',
- status: 'sold',
- },
- identityId,
- privateKeyWif,
- signingKeyIndex,
- nonce: await sdk.identities.contractNonce(identityId, contractId),
-});
+// Fetch the existing document, modify it, and bump the revision
+const existing = await sdk.documents.get(contractId, 'listing', listingId);
+existing.properties = { ...existing.properties, status: 'sold' };
+existing.revision = (existing.revision ?? 0n) + 1n;
+await sdk.documents.replace({ document: existing, identityKey, signer });
console.log('Listing marked as sold');
```
@@ -182,28 +171,32 @@ console.log('Listing marked as sold');
## Step 6: Leave a review
```typescript
-await sdk.documents.create({
- contractId,
- documentType: 'review',
- document: {
+// Set up buyer signing
+const buyerIdentity = await sdk.identities.fetch(buyerIdentityId);
+const buyerKey = buyerIdentity.publicKeys[0];
+const buyerSigner = new IdentitySigner();
+buyerSigner.addKeyFromWif(buyerKeyWif);
+
+const reviewDoc = new Document({
+ documentTypeName: 'review',
+ dataContractId: new Identifier(contractId),
+ ownerId: new Identifier(buyerIdentityId),
+ properties: {
sellerId: 'SELLER_IDENTITY_ID',
listingId: 'THE_LISTING_DOCUMENT_ID',
rating: 5,
comment: 'Great seller, car was exactly as described!',
},
- identityId: buyerIdentityId,
- privateKeyWif: buyerKeyWif,
- signingKeyIndex: 0,
- nonce: await sdk.identities.contractNonce(buyerIdentityId, contractId),
});
+await sdk.documents.create({ document: reviewDoc, identityKey: buyerKey, signer: buyerSigner });
```
### Query reviews for a seller
```typescript
const reviews = await sdk.documents.query({
- contractId,
- documentType: 'review',
+ dataContractId: contractId,
+ documentTypeName: 'review',
where: [['sellerId', '==', 'SELLER_IDENTITY_ID']],
orderBy: [['rating', 'desc']],
limit: 50,
@@ -213,7 +206,8 @@ let totalRating = 0;
let count = 0;
for (const [, doc] of reviews) {
if (!doc) continue;
- totalRating += doc.getData().rating;
+ const props = doc.properties as Record;
+ totalRating += props.rating as number;
count++;
}
console.log(`Average rating: ${(totalRating / count).toFixed(1)} (${count} reviews)`);
diff --git a/book/src/evo-sdk/tutorials/card-game.md b/book/src/evo-sdk/tutorials/card-game.md
index 89567f8eee7..79b24fd40ed 100644
--- a/book/src/evo-sdk/tutorials/card-game.md
+++ b/book/src/evo-sdk/tutorials/card-game.md
@@ -37,17 +37,23 @@ The contract defines three document types and one token:
- **GemToken** — In-game currency for buying card packs
```typescript
+import {
+ TokenConfigurationConvention, TokenConfigurationLocalization, TokenConfiguration,
+ ChangeControlRules, AuthorizedActionTakers, TokenDistributionRules,
+ TokenKeepsHistoryRules, TokenMarketplaceRules, TokenTradeMode,
+} from '@dashevo/evo-sdk';
+
const gameSchema = {
card: {
type: 'object',
properties: {
- name: { type: 'string', maxLength: 64 },
- element: { type: 'string', enum: ['fire', 'water', 'earth', 'air', 'shadow'] },
- rarity: { type: 'string', enum: ['common', 'uncommon', 'rare', 'legendary'] },
- power: { type: 'integer', minimum: 1, maximum: 100 },
- defense: { type: 'integer', minimum: 1, maximum: 100 },
- ability: { type: 'string', maxLength: 128 },
- edition: { type: 'integer', minimum: 1 },
+ name: { type: 'string', maxLength: 63, position: 0 },
+ element: { type: 'string', enum: ['fire', 'water', 'earth', 'air', 'shadow'], position: 1 },
+ rarity: { type: 'string', enum: ['common', 'uncommon', 'rare', 'legendary'], position: 2 },
+ power: { type: 'integer', minimum: 1, maximum: 100, position: 3 },
+ defense: { type: 'integer', minimum: 1, maximum: 100, position: 4 },
+ ability: { type: 'string', maxLength: 128, position: 5 },
+ edition: { type: 'integer', minimum: 1, position: 6 },
},
required: ['name', 'element', 'rarity', 'power', 'defense', 'edition'],
additionalProperties: false,
@@ -55,12 +61,13 @@ const gameSchema = {
deck: {
type: 'object',
properties: {
- name: { type: 'string', maxLength: 64 },
+ name: { type: 'string', maxLength: 63, position: 0 },
cardIds: {
type: 'array',
items: { type: 'string', maxLength: 44 },
minItems: 5,
maxItems: 10,
+ position: 1,
},
},
required: ['name', 'cardIds'],
@@ -69,43 +76,64 @@ const gameSchema = {
match: {
type: 'object',
properties: {
- player1Id: { type: 'string', maxLength: 44 },
- player2Id: { type: 'string', maxLength: 44 },
- winnerId: { type: 'string', maxLength: 44 },
- player1Score: { type: 'integer', minimum: 0 },
- player2Score: { type: 'integer', minimum: 0 },
- timestamp: { type: 'integer' },
+ player1Id: { type: 'string', maxLength: 44, position: 0 },
+ player2Id: { type: 'string', maxLength: 44, position: 1 },
+ winnerId: { type: 'string', maxLength: 44, position: 2 },
+ player1Score: { type: 'integer', minimum: 0, position: 3 },
+ player2Score: { type: 'integer', minimum: 0, position: 4 },
+ timestamp: { type: 'integer', position: 5 },
},
required: ['player1Id', 'player2Id', 'winnerId', 'timestamp'],
additionalProperties: false,
},
};
-const gemTokenConfig = {
- conventions: {
- localizations: {
- en: {
- shouldCapitalize: true,
- singularForm: 'Gem',
- pluralForm: 'Gems',
- },
- },
- decimals: 0, // whole numbers only
- },
- manualMinting: {
- rules: { type: 'ownerOnly' },
- },
- manualBurning: {
- rules: { type: 'ownerOnly' },
- },
- maxSupply: 10_000_000, // 10 million Gems total
-};
+// Build the token configuration using SDK classes
+const localization = new TokenConfigurationLocalization(true, 'Gem', 'Gems');
+const conventions = new TokenConfigurationConvention({ en: localization }, 0);
+
+const ownerOnly = new ChangeControlRules({
+ authorizedToMakeChange: AuthorizedActionTakers.ContractOwner(),
+ adminActionTakers: AuthorizedActionTakers.ContractOwner(),
+});
+const noOne = new ChangeControlRules({
+ authorizedToMakeChange: AuthorizedActionTakers.NoOne(),
+ adminActionTakers: AuthorizedActionTakers.NoOne(),
+});
+
+const gemTokenConfig = new TokenConfiguration({
+ conventions,
+ conventionsChangeRules: noOne,
+ baseSupply: 0n,
+ maxSupply: 10_000_000n, // 10 million Gems total
+ maxSupplyChangeRules: noOne,
+ keepsHistory: new TokenKeepsHistoryRules({
+ isKeepingMintingHistory: true,
+ isKeepingBurningHistory: true,
+ isKeepingTransferHistory: true,
+ }),
+ distributionRules: new TokenDistributionRules({
+ perpetualDistributionRules: noOne,
+ newTokensDestinationIdentityRules: noOne,
+ mintingAllowChoosingDestination: true,
+ mintingAllowChoosingDestinationRules: noOne,
+ changeDirectPurchasePricingRules: noOne,
+ }),
+ marketplaceRules: new TokenMarketplaceRules(TokenTradeMode.NotTradeable(), noOne),
+ manualMintingRules: ownerOnly,
+ manualBurningRules: ownerOnly,
+ freezeRules: noOne,
+ unfreezeRules: noOne,
+ destroyFrozenFundsRules: noOne,
+ emergencyActionRules: noOne,
+ mainControlGroupCanBeModified: AuthorizedActionTakers.NoOne(),
+});
```
## Step 2: Deploy the contract
```typescript
-import { EvoSDK } from '@dashevo/evo-sdk';
+import { EvoSDK, DataContract, Document, Identifier, IdentitySigner } from '@dashevo/evo-sdk';
const sdk = EvoSDK.testnetTrusted();
await sdk.connect();
@@ -114,20 +142,28 @@ await sdk.connect();
const operatorId = 'OPERATOR_IDENTITY_ID';
const operatorKey = 'OPERATOR_PRIVATE_KEY_WIF';
+// Set up signing
+const operatorIdentity = await sdk.identities.fetch(operatorId);
+const operatorIdentityKey = operatorIdentity.publicKeys[0];
+const operatorSigner = new IdentitySigner();
+operatorSigner.addKeyFromWif(operatorKey);
+
+const nonce = await sdk.identities.nonce(operatorId);
+const dataContract = new DataContract({
+ ownerId: new Identifier(operatorId),
+ identityNonce: nonce + 1n,
+ schemas: gameSchema,
+ tokens: { 0: gemTokenConfig },
+});
const contract = await sdk.contracts.publish({
- identityId: operatorId,
- documentSchemas: gameSchema,
- tokens: [gemTokenConfig],
- privateKeyWif: operatorKey,
- signingKeyIndex: 0,
- nonce: await sdk.identities.nonce(operatorId),
+ dataContract,
+ identityKey: operatorIdentityKey,
+ signer: operatorSigner,
});
-const contractId = contract.getId().toString();
-const gemTokenId = await sdk.tokens.calculateId(contractId, 0);
+const contractId = contract.id.toString();
console.log('Game contract:', contractId);
-console.log('Gem token:', gemTokenId);
```
## Step 3: Mint starter Gems for a new player
@@ -136,15 +172,16 @@ When a player joins, give them starter Gems:
```typescript
async function onboardPlayer(playerId: string) {
+ // Token operations require a CRITICAL security level key
// Gift 100 Gems to the new player
await sdk.tokens.mint({
- tokenId: gemTokenId,
- amount: 100,
- recipientId: playerId,
- identityId: operatorId,
- privateKeyWif: operatorKey,
- signingKeyIndex: 0,
- nonce: await sdk.identities.nonce(operatorId),
+ dataContractId: new Identifier(contractId),
+ tokenPosition: 0,
+ amount: 100n,
+ recipientId: new Identifier(playerId),
+ identityId: new Identifier(operatorId),
+ identityKey: operatorIdentityKey,
+ signer: operatorSigner,
});
console.log(`Welcomed ${playerId} with 100 Gems`);
@@ -168,14 +205,16 @@ const starterPack = [
async function createCards(cards: typeof starterPack) {
for (const card of cards) {
+ const cardDoc = new Document({
+ documentTypeName: 'card',
+ dataContractId: new Identifier(contractId),
+ ownerId: new Identifier(operatorId),
+ properties: card,
+ });
await sdk.documents.create({
- contractId,
- documentType: 'card',
- document: card,
- identityId: operatorId,
- privateKeyWif: operatorKey,
- signingKeyIndex: 0,
- nonce: await sdk.identities.contractNonce(operatorId, contractId),
+ document: cardDoc,
+ identityKey: operatorIdentityKey,
+ signer: operatorSigner,
});
console.log(`Created: ${card.name} (${card.rarity})`);
}
@@ -191,26 +230,32 @@ The purchase flow:
2. Operator transfers card documents to the player
```typescript
-const PACK_PRICE = 50; // 50 Gems per pack
+const PACK_PRICE = 50n; // 50 Gems per pack
async function buyPack(playerId: string, playerKey: string) {
+ // Set up player signing (token ops require CRITICAL security level key)
+ const playerIdentity = await sdk.identities.fetch(playerId);
+ const playerIdentityKey = playerIdentity.publicKeys[0];
+ const playerSigner = new IdentitySigner();
+ playerSigner.addKeyFromWif(playerKey);
+
// Player pays Gems to the operator
await sdk.tokens.transfer({
- tokenId: gemTokenId,
+ dataContractId: new Identifier(contractId),
+ tokenPosition: 0,
amount: PACK_PRICE,
- recipientId: operatorId,
- identityId: playerId,
- privateKeyWif: playerKey,
- signingKeyIndex: 0,
- nonce: await sdk.identities.nonce(playerId),
+ recipientId: new Identifier(operatorId),
+ senderId: new Identifier(playerId),
+ identityKey: playerIdentityKey,
+ signer: playerSigner,
});
console.log(`Player paid ${PACK_PRICE} Gems`);
// Operator transfers cards to the player
// (In production, select random cards from available pool)
const availableCards = await sdk.documents.query({
- contractId,
- documentType: 'card',
+ dataContractId: contractId,
+ documentTypeName: 'card',
where: [['$ownerId', '==', operatorId]],
limit: 5,
});
@@ -218,16 +263,13 @@ async function buyPack(playerId: string, playerKey: string) {
for (const [cardId, card] of availableCards) {
if (!card) continue;
await sdk.documents.transfer({
- contractId,
- documentType: 'card',
- documentId: cardId,
- recipientId: playerId,
- identityId: operatorId,
- privateKeyWif: operatorKey,
- signingKeyIndex: 0,
- nonce: await sdk.identities.contractNonce(operatorId, contractId),
+ document: card,
+ recipientId: new Identifier(playerId),
+ identityKey: operatorIdentityKey,
+ signer: operatorSigner,
});
- console.log(`Transferred ${card.getData().name} to player`);
+ const props = card.properties as Record;
+ console.log(`Transferred ${props.name} to player`);
}
}
```
@@ -237,8 +279,8 @@ async function buyPack(playerId: string, playerKey: string) {
```typescript
async function getCollection(playerId: string) {
const cards = await sdk.documents.query({
- contractId,
- documentType: 'card',
+ dataContractId: contractId,
+ documentTypeName: 'card',
where: [['$ownerId', '==', playerId]],
orderBy: [['power', 'desc']],
limit: 100,
@@ -247,7 +289,7 @@ async function getCollection(playerId: string) {
console.log(`\n${playerId}'s collection:`);
for (const [id, card] of cards) {
if (!card) continue;
- const d = card.getData();
+ const d = card.properties as Record;
console.log(` [${d.rarity}] ${d.name} — ${d.element} — ATK:${d.power} DEF:${d.defense}`);
}
@@ -259,8 +301,8 @@ async function getCollection(playerId: string) {
```typescript
const legendaries = await sdk.documents.query({
- contractId,
- documentType: 'card',
+ dataContractId: contractId,
+ documentTypeName: 'card',
where: [
['$ownerId', '==', playerId],
['rarity', '==', 'legendary'],
@@ -278,28 +320,35 @@ async function tradeCards(
fromId: string, fromKey: string, fromCardId: string,
toId: string, toKey: string, toCardId: string,
) {
+ // Set up signers for both players
+ const fromIdentity = await sdk.identities.fetch(fromId);
+ const fromIdentityKey = fromIdentity.publicKeys[0];
+ const fromSigner = new IdentitySigner();
+ fromSigner.addKeyFromWif(fromKey);
+
+ const toIdentity = await sdk.identities.fetch(toId);
+ const toIdentityKey = toIdentity.publicKeys[0];
+ const toSigner = new IdentitySigner();
+ toSigner.addKeyFromWif(toKey);
+
+ // Fetch both card documents
+ const fromCard = await sdk.documents.get(contractId, 'card', fromCardId);
+ const toCard = await sdk.documents.get(contractId, 'card', toCardId);
+
// Player A sends their card to Player B
await sdk.documents.transfer({
- contractId,
- documentType: 'card',
- documentId: fromCardId,
- recipientId: toId,
- identityId: fromId,
- privateKeyWif: fromKey,
- signingKeyIndex: 0,
- nonce: await sdk.identities.contractNonce(fromId, contractId),
+ document: fromCard,
+ recipientId: new Identifier(toId),
+ identityKey: fromIdentityKey,
+ signer: fromSigner,
});
// Player B sends their card to Player A
await sdk.documents.transfer({
- contractId,
- documentType: 'card',
- documentId: toCardId,
- recipientId: fromId,
- identityId: toId,
- privateKeyWif: toKey,
- signingKeyIndex: 0,
- nonce: await sdk.identities.contractNonce(toId, contractId),
+ document: toCard,
+ recipientId: new Identifier(fromId),
+ identityKey: toIdentityKey,
+ signer: toSigner,
});
console.log('Trade complete!');
@@ -314,10 +363,11 @@ async function recordMatch(
winnerId: string,
p1Score: number, p2Score: number,
) {
- await sdk.documents.create({
- contractId,
- documentType: 'match',
- document: {
+ const matchDoc = new Document({
+ documentTypeName: 'match',
+ dataContractId: new Identifier(contractId),
+ ownerId: new Identifier(operatorId),
+ properties: {
player1Id,
player2Id,
winnerId,
@@ -325,21 +375,22 @@ async function recordMatch(
player2Score: p2Score,
timestamp: Date.now(),
},
- identityId: operatorId,
- privateKeyWif: operatorKey,
- signingKeyIndex: 0,
- nonce: await sdk.identities.contractNonce(operatorId, contractId),
+ });
+ await sdk.documents.create({
+ document: matchDoc,
+ identityKey: operatorIdentityKey,
+ signer: operatorSigner,
});
- // Reward the winner with Gems
+ // Reward the winner with Gems (token ops require CRITICAL security level key)
await sdk.tokens.mint({
- tokenId: gemTokenId,
- amount: 10,
- recipientId: winnerId,
- identityId: operatorId,
- privateKeyWif: operatorKey,
- signingKeyIndex: 0,
- nonce: await sdk.identities.nonce(operatorId),
+ dataContractId: new Identifier(contractId),
+ tokenPosition: 0,
+ amount: 10n,
+ recipientId: new Identifier(winnerId),
+ identityId: new Identifier(operatorId),
+ identityKey: operatorIdentityKey,
+ signer: operatorSigner,
});
console.log(`Match recorded. ${winnerId} wins and earns 10 Gems!`);
@@ -353,8 +404,8 @@ Query match history to build a win count:
```typescript
async function getWinCounts() {
const matches = await sdk.documents.query({
- contractId,
- documentType: 'match',
+ dataContractId: contractId,
+ documentTypeName: 'match',
orderBy: [['timestamp', 'desc']],
limit: 100,
});
@@ -362,7 +413,8 @@ async function getWinCounts() {
const wins = new Map();
for (const [, doc] of matches) {
if (!doc) continue;
- const winner = doc.getData().winnerId;
+ const props = doc.properties as Record;
+ const winner = props.winnerId as string;
wins.set(winner, (wins.get(winner) ?? 0) + 1);
}
diff --git a/book/src/evo-sdk/tutorials/react-integration.md b/book/src/evo-sdk/tutorials/react-integration.md
index a20b6dc1cd2..7bc4f00fad6 100644
--- a/book/src/evo-sdk/tutorials/react-integration.md
+++ b/book/src/evo-sdk/tutorials/react-integration.md
@@ -221,7 +221,10 @@ export function useTokenBalance(identityId: string, tokenId: string) {
return useDashQuery(
async (sdk) => {
const balances = await sdk.tokens.identityBalances(identityId, [tokenId]);
- return balances.get(tokenId) ?? 0n;
+ for (const [id, balance] of balances) {
+ if (id.toString() === tokenId) return balance;
+ }
+ return 0n;
},
[identityId, tokenId],
);
@@ -323,9 +326,9 @@ export function IdentityViewer() {
{error &&