From 2dd0619f68afe26a22fae14a0d84240ca4027608 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Mon, 17 Mar 2025 22:04:36 +0700 Subject: [PATCH 01/21] Squashed commit of the following: commit 61ad58a887ab59b45910dc48b083d7dc5a4cf7e4 Merge: 8d99708470 6161f75533 Author: QuantumExplorer Date: Mon Mar 17 21:39:12 2025 +0700 Merge branch 'v2.0-dev' into feat/getIdentitiesByPublicKeyHashes1 commit 8d997084709fd5bb39f415ea26bd65adf1ffe71d Author: Quantum Explorer Date: Thu Mar 6 21:04:45 2025 +0700 more work commit 0f26760b78bf5c3e48fb090332416bad4eebb031 Author: Quantum Explorer Date: Mon Mar 3 21:03:26 2025 +0700 temp work --- .../protos/platform/v0/platform.proto | 31 +++ .../mod.rs | 70 ++++++ .../v0/mod.rs | 192 +++++++++++++++ .../mod.rs | 6 +- .../v0/mod.rs | 8 +- .../src/query/identity_based_queries/mod.rs | 3 +- packages/rs-drive-abci/src/query/service.rs | 13 ++ .../tests/strategy_tests/query.rs | 2 +- packages/rs-drive-proof-verifier/src/proof.rs | 2 +- .../mod.rs | 53 +++++ .../v0/mod.rs | 53 +++++ .../mod.rs | 50 +++- .../v0/mod.rs | 33 ++- .../fetch/fetch_by_public_key_hashes/mod.rs | 6 +- .../src/drive/identity/fetch/prove/mod.rs | 1 + .../mod.rs | 75 ++++++ .../v0/mod.rs | 219 ++++++++++++++++++ .../v0/mod.rs | 2 +- .../v0/mod.rs | 2 +- .../v0/mod.rs | 2 +- .../src/drive/identity/fetch/queries/mod.rs | 39 +++- ...non_unique_public_key_hash_double_proof.rs | 21 ++ packages/rs-drive/src/drive/identity/mod.rs | 4 + packages/rs-drive/src/verify/identity/mod.rs | 8 +- .../v0/mod.rs | 2 +- .../mod.rs | 79 +++++++ .../v0/mod.rs | 82 +++++++ .../mod.rs | 6 +- .../v0/mod.rs | 4 +- .../mod.rs | 65 ++++++ .../v0/mod.rs | 69 ++++++ .../mod.rs | 8 +- .../v0/mod.rs | 2 +- .../mod.rs | 8 +- .../v0/mod.rs | 2 +- .../drive_abci_query_versions/mod.rs | 2 +- .../drive_abci_query_versions/v1.rs | 2 +- .../drive_identity_method_versions/mod.rs | 2 + .../drive_identity_method_versions/v1.rs | 2 + .../drive_verify_method_versions/mod.rs | 6 +- .../drive_verify_method_versions/v1.rs | 6 +- .../src/version/mocks/v2_test.rs | 2 +- 42 files changed, 1186 insertions(+), 58 deletions(-) create mode 100644 packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs rename packages/rs-drive-abci/src/query/identity_based_queries/{identity_by_public_key_hash => identity_by_unique_public_key_hash}/mod.rs (92%) rename packages/rs-drive-abci/src/query/identity_based_queries/{identity_by_public_key_hash => identity_by_unique_public_key_hash}/v0/mod.rs (94%) create mode 100644 packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/v0/mod.rs create mode 100644 packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs create mode 100644 packages/rs-drive/src/drive/identity/identity_and_non_unique_public_key_hash_double_proof.rs create mode 100644 packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs rename packages/rs-drive/src/verify/identity/{verify_full_identity_by_public_key_hash => verify_full_identity_by_unique_public_key_hash}/mod.rs (89%) rename packages/rs-drive/src/verify/identity/{verify_full_identity_by_public_key_hash => verify_full_identity_by_unique_public_key_hash}/v0/mod.rs (96%) create mode 100644 packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/v0/mod.rs rename packages/rs-drive/src/verify/identity/{verify_identity_id_by_public_key_hash => verify_identity_id_by_unique_public_key_hash}/mod.rs (86%) rename packages/rs-drive/src/verify/identity/{verify_identity_id_by_public_key_hash => verify_identity_id_by_unique_public_key_hash}/v0/mod.rs (98%) rename packages/rs-drive/src/verify/identity/{verify_identity_ids_by_public_key_hashes => verify_identity_ids_by_unique_public_key_hashes}/mod.rs (87%) rename packages/rs-drive/src/verify/identity/{verify_identity_ids_by_public_key_hashes => verify_identity_ids_by_unique_public_key_hashes}/v0/mod.rs (98%) diff --git a/packages/dapi-grpc/protos/platform/v0/platform.proto b/packages/dapi-grpc/protos/platform/v0/platform.proto index 905ef6c1d23..4d8ddd09363 100644 --- a/packages/dapi-grpc/protos/platform/v0/platform.proto +++ b/packages/dapi-grpc/protos/platform/v0/platform.proto @@ -33,6 +33,8 @@ service Platform { rpc getDocuments(GetDocumentsRequest) returns (GetDocumentsResponse); rpc getIdentityByPublicKeyHash(GetIdentityByPublicKeyHashRequest) returns (GetIdentityByPublicKeyHashResponse); + rpc getIdentityByNonUniquePublicKeyHash(GetIdentityByNonUniquePublicKeyHashRequest) + returns (GetIdentityByNonUniquePublicKeyHashResponse); rpc waitForStateTransitionResult(WaitForStateTransitionResultRequest) returns (WaitForStateTransitionResultResponse); rpc getConsensusParams(GetConsensusParamsRequest) @@ -620,6 +622,35 @@ message GetIdentityByPublicKeyHashResponse { oneof version { GetIdentityByPublicKeyHashResponseV0 v0 = 1; } } +message GetIdentityByNonUniquePublicKeyHashRequest { + message GetIdentityByNonUniquePublicKeyHashRequestV0 { + bytes public_key_hash = 1; + optional bytes start_after = 2; // Give one result after a previous result + bool prove = 3; + } + oneof version { GetIdentityByNonUniquePublicKeyHashRequestV0 v0 = 1; } +} + +message GetIdentityByNonUniquePublicKeyHashResponse { + message GetIdentityByNonUniquePublicKeyHashResponseV0 { + message IdentityResponse { + optional bytes identity = 1; + } + + message IdentityProvedResponse { + Proof grovedb_identity_public_key_hash_proof = 1; + optional bytes identity_proof_bytes = 2; // A hack, we return 2 proofs + } + oneof result { + IdentityResponse identity = 1; + IdentityProvedResponse proof = 2; + } + + ResponseMetadata metadata = 3; // Metadata about the blockchain state + } + oneof version { GetIdentityByNonUniquePublicKeyHashResponseV0 v0 = 1; } +} + message WaitForStateTransitionResultRequest { message WaitForStateTransitionResultRequestV0 { bytes state_transition_hash = 1; // The hash of the state transition to wait for diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..33403fad6a5 --- /dev/null +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,70 @@ +use crate::error::query::QueryError; +use crate::error::Error; +use crate::platform_types::platform::Platform; +use crate::platform_types::platform_state::PlatformState; +use crate::query::QueryValidationResult; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_request::Version as RequestVersion; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_response::Version as ResponseVersion; +use dapi_grpc::platform::v0::{ + GetIdentityByNonUniquePublicKeyHashRequest, GetIdentityByNonUniquePublicKeyHashResponse, + GetIdentityByPublicKeyHashResponse, +}; +use dpp::version::PlatformVersion; + +mod v0; + +impl Platform { + /// Querying of an identity by a public key hash + pub fn query_identity_by_non_unique_public_key_hash( + &self, + GetIdentityByNonUniquePublicKeyHashRequest { version }: GetIdentityByNonUniquePublicKeyHashRequest, + platform_state: &PlatformState, + platform_version: &PlatformVersion, + ) -> Result, Error> { + let Some(version) = version else { + return Ok(QueryValidationResult::new_with_error( + QueryError::DecodingError( + "could not decode identity by public key non unique hash query".to_string(), + ), + )); + }; + + let feature_version_bounds = &platform_version + .drive_abci + .query + .identity_based_queries + .identity_by_unique_public_key_hash; + + let feature_version = match &version { + RequestVersion::V0(_) => 0, + }; + + if !feature_version_bounds.check_version(feature_version) { + return Ok(QueryValidationResult::new_with_error( + QueryError::UnsupportedQueryVersion( + "identity_by_non_unique_public_key_hash".to_string(), + feature_version_bounds.min_version, + feature_version_bounds.max_version, + platform_version.protocol_version, + feature_version, + ), + )); + } + + match version { + RequestVersion::V0(request_v0) => { + let request = self.query_identity_by_non_unique_public_key_hash_v0( + request_v0, + platform_state, + platform_version, + )?; + + Ok( + request.map(|response_v0| GetIdentityByNonUniquePublicKeyHashResponse { + version: Some(ResponseVersion::V0(response_v0)), + }), + ) + } + } + } +} diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..a9a3ac7dd42 --- /dev/null +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,192 @@ +use crate::error::query::QueryError; +use crate::error::Error; +use crate::platform_types::platform::Platform; +use crate::platform_types::platform_state::PlatformState; +use crate::query::QueryValidationResult; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_request::GetIdentityByNonUniquePublicKeyHashRequestV0; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_response::{ + get_identity_by_non_unique_public_key_hash_response_v0, GetIdentityByNonUniquePublicKeyHashResponseV0, +}; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_response::get_identity_by_non_unique_public_key_hash_response_v0::{IdentityProvedResponse, IdentityResponse}; +use dpp::check_validation_result_with_data; +use dpp::platform_value::{Bytes20, Bytes32}; +use dpp::serialization::PlatformSerializable; +use dpp::validation::ValidationResult; +use dpp::version::PlatformVersion; + +impl Platform { + pub(super) fn query_identity_by_non_unique_public_key_hash_v0( + &self, + GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash, + start_after, + prove, + }: GetIdentityByNonUniquePublicKeyHashRequestV0, + platform_state: &PlatformState, + platform_version: &PlatformVersion, + ) -> Result, Error> { + let public_key_hash = + check_validation_result_with_data!(Bytes20::from_vec(public_key_hash) + .map(|bytes| bytes.0) + .map_err(|_| QueryError::InvalidArgument( + "public key hash must be 20 bytes long".to_string() + ))); + + let start_after = if let Some(start_after) = start_after { + Some(check_validation_result_with_data!(Bytes32::from_vec( + start_after + ) + .map(|bytes| bytes.0) + .map_err(|_| QueryError::InvalidArgument( + "public key hash must be 20 bytes long".to_string() + )))) + } else { + None + }; + + let response = if prove { + let proof = self + .drive + .prove_full_identity_by_non_unique_public_key_hash( + public_key_hash, + start_after, + None, + platform_version, + )?; + + GetIdentityByNonUniquePublicKeyHashResponseV0 { + result: Some( + get_identity_by_non_unique_public_key_hash_response_v0::Result::Proof( + IdentityProvedResponse { + grovedb_identity_public_key_hash_proof: Some(self.response_proof_v0( + platform_state, + proof.identity_id_public_key_hash_proof, + )), + identity_proof_bytes: proof.identity_proof, + }, + ), + ), + metadata: Some(self.response_metadata_v0(platform_state)), + } + } else { + let maybe_identity = self + .drive + .fetch_full_identity_by_non_unique_public_key_hash( + public_key_hash, + start_after, + None, + platform_version, + )?; + + let serialized_identity = maybe_identity + .map(|identity| { + identity + .serialize_consume_to_bytes() + .map_err(Error::Protocol) + }) + .transpose()?; + + GetIdentityByNonUniquePublicKeyHashResponseV0 { + metadata: Some(self.response_metadata_v0(platform_state)), + result: Some( + get_identity_by_non_unique_public_key_hash_response_v0::Result::Identity( + IdentityResponse { + identity: serialized_identity, + }, + ), + ), + } + }; + + Ok(QueryValidationResult::new_with_data(response)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::query::tests::setup_platform; + use dapi_grpc::platform::v0::ResponseMetadata; + use dpp::dashcore::Network; + + #[test] + fn test_invalid_public_key_hash() { + let (platform, state, version) = setup_platform(None, Network::Testnet, None); + + let request = GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: vec![0; 8], + start_after: None, + prove: false, + }; + + let result = platform + .query_identity_by_non_unique_public_key_hash_v0(request, &state, version) + .expect("expected query to succeed"); + + assert!(matches!( + result.errors.as_slice(), + [QueryError::InvalidArgument(msg)] if msg == &"public key hash must be 20 bytes long".to_string() + )); + } + + #[test] + fn test_identity_not_found() { + let (platform, state, version) = setup_platform(None, Network::Testnet, None); + + let public_key_hash = vec![0; 20]; + let request = GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: public_key_hash.clone(), + start_after: None, + prove: false, + }; + + let result = platform + .query_identity_by_non_unique_public_key_hash_v0(request, &state, version) + .expect("expected query to succeed"); + + assert_eq!( + result.data, + Some(GetIdentityByNonUniquePublicKeyHashResponseV0 { + metadata: Some(ResponseMetadata { + height: 0, + core_chain_locked_height: 0, + epoch: 0, + time_ms: 0, + protocol_version: 9, + chain_id: "chain_id".to_string() + }), + result: Some( + get_identity_by_non_unique_public_key_hash_response_v0::Result::Identity( + IdentityResponse { identity: None } + ) + ), + }) + ); + } + + #[test] + fn test_identity_absence_proof() { + let (platform, state, version) = setup_platform(None, Network::Testnet, None); + + let public_key_hash = vec![0; 20]; + let request = GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: public_key_hash.clone(), + start_after: None, + prove: true, + }; + + let result = platform + .query_identity_by_non_unique_public_key_hash_v0(request, &state, version) + .expect("expected query to succeed"); + + assert!(matches!( + result.data, + Some(GetIdentityByNonUniquePublicKeyHashResponseV0 { + result: Some( + get_identity_by_non_unique_public_key_hash_response_v0::Result::Proof(_) + ), + metadata: Some(_), + }) + )); + } +} diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/mod.rs similarity index 92% rename from packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/mod.rs rename to packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/mod.rs index db20b658b6f..2585d26afb8 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/mod.rs @@ -32,7 +32,7 @@ impl Platform { .drive_abci .query .identity_based_queries - .identity_by_public_key_hash; + .identity_by_unique_public_key_hash; let feature_version = match &version { RequestVersion::V0(_) => 0, @@ -41,7 +41,7 @@ impl Platform { if !feature_version_bounds.check_version(feature_version) { return Ok(QueryValidationResult::new_with_error( QueryError::UnsupportedQueryVersion( - "identity_by_public_key_hash".to_string(), + "identity_by_unique_public_key_hash".to_string(), feature_version_bounds.min_version, feature_version_bounds.max_version, platform_version.protocol_version, @@ -52,7 +52,7 @@ impl Platform { match version { RequestVersion::V0(request_v0) => { - let request = self.query_identity_by_public_key_hash_v0( + let request = self.query_identity_by_unique_public_key_hash_v0( request_v0, platform_state, platform_version, diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/v0/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/v0/mod.rs similarity index 94% rename from packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/v0/mod.rs rename to packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/v0/mod.rs index 0d3db5c8023..012f289e027 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/v0/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/v0/mod.rs @@ -14,7 +14,7 @@ use dpp::validation::ValidationResult; use dpp::version::PlatformVersion; impl Platform { - pub(super) fn query_identity_by_public_key_hash_v0( + pub(super) fn query_identity_by_unique_public_key_hash_v0( &self, GetIdentityByPublicKeyHashRequestV0 { public_key_hash, @@ -91,7 +91,7 @@ mod tests { }; let result = platform - .query_identity_by_public_key_hash_v0(request, &state, version) + .query_identity_by_unique_public_key_hash_v0(request, &state, version) .expect("expected query to succeed"); assert!(matches!( @@ -111,7 +111,7 @@ mod tests { }; let result = platform - .query_identity_by_public_key_hash_v0(request, &state, version) + .query_identity_by_unique_public_key_hash_v0(request, &state, version) .expect("expected query to succeed"); assert!(matches!( @@ -131,7 +131,7 @@ mod tests { }; let result = platform - .query_identity_by_public_key_hash_v0(request, &state, version) + .query_identity_by_unique_public_key_hash_v0(request, &state, version) .expect("expected query to succeed"); assert!(matches!( diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/mod.rs index 29fea765215..98b92b25128 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/mod.rs @@ -3,7 +3,8 @@ mod balance_and_revision; mod balances; mod identities_contract_keys; mod identity; -mod identity_by_public_key_hash; +mod identity_by_non_unique_public_key_hash; +mod identity_by_unique_public_key_hash; mod identity_contract_nonce; mod identity_nonce; mod keys; diff --git a/packages/rs-drive-abci/src/query/service.rs b/packages/rs-drive-abci/src/query/service.rs index 156ae629983..9d77f180745 100644 --- a/packages/rs-drive-abci/src/query/service.rs +++ b/packages/rs-drive-abci/src/query/service.rs @@ -28,6 +28,7 @@ use dapi_grpc::platform::v0::{ GetIdentitiesTokenBalancesResponse, GetIdentitiesTokenInfosRequest, GetIdentitiesTokenInfosResponse, GetIdentityBalanceAndRevisionRequest, GetIdentityBalanceAndRevisionResponse, GetIdentityBalanceRequest, GetIdentityBalanceResponse, + GetIdentityByNonUniquePublicKeyHashRequest, GetIdentityByNonUniquePublicKeyHashResponse, GetIdentityByPublicKeyHashRequest, GetIdentityByPublicKeyHashResponse, GetIdentityContractNonceRequest, GetIdentityContractNonceResponse, GetIdentityKeysRequest, GetIdentityKeysResponse, GetIdentityNonceRequest, GetIdentityNonceResponse, GetIdentityRequest, @@ -411,6 +412,18 @@ impl PlatformService for QueryService { .await } + async fn get_identity_by_non_unique_public_key_hash( + &self, + request: Request, + ) -> Result, Status> { + self.handle_blocking_query( + request, + Platform::::query_identity_by_non_unique_public_key_hash, + "get_identity_by_non_unique_public_key_hash", + ) + .await + } + async fn wait_for_state_transition_result( &self, _request: Request, diff --git a/packages/rs-drive-abci/tests/strategy_tests/query.rs b/packages/rs-drive-abci/tests/strategy_tests/query.rs index 4a4103c4dbc..5e0aaba3d85 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/query.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/query.rs @@ -266,7 +266,7 @@ impl QueryStrategy { let (proof_root_hash, identity): ( RootHash, Option, - ) = Drive::verify_full_identity_by_public_key_hash( + ) = Drive::verify_full_identity_by_unique_public_key_hash( &proof.grovedb_proof, key_hash, platform_version, diff --git a/packages/rs-drive-proof-verifier/src/proof.rs b/packages/rs-drive-proof-verifier/src/proof.rs index ec15e3df0c9..6be02632ec1 100644 --- a/packages/rs-drive-proof-verifier/src/proof.rs +++ b/packages/rs-drive-proof-verifier/src/proof.rs @@ -326,7 +326,7 @@ impl FromProof for Identity { }; // Extract content from proof and verify Drive/GroveDB proofs - let (root_hash, maybe_identity) = Drive::verify_full_identity_by_public_key_hash( + let (root_hash, maybe_identity) = Drive::verify_full_identity_by_unique_public_key_hash( &proof.grovedb_proof, public_key_hash, platform_version, diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..7945cb0799c --- /dev/null +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,53 @@ +mod v0; + +use crate::drive::Drive; +use crate::error::{drive::DriveError, Error}; +use dpp::identity::Identity; + +use dpp::version::PlatformVersion; +use grovedb::TransactionArg; + +impl Drive { + /// Fetches an identity with all its related information from storage based on a non unique public key hash. + /// + /// This function leverages the versioning system to direct the fetch operation to the appropriate handler based on the `DriveVersion` provided. + /// + /// # Arguments + /// + /// * `public_key_hash` - A non-unique public key hash corresponding to the identity to be fetched. + /// * `after` - An identity ID after which we want to get back our identity. Basically "don't get back this identity, get the next one" + /// * `transaction` - Transaction arguments. + /// * `drive_version` - A reference to the drive version. + /// + /// # Returns + /// + /// Returns a `Result` containing an `Option` of the `Identity` if it exists, otherwise an `Error` if the fetch operation fails or the version is not supported. + pub fn fetch_full_identity_by_non_unique_public_key_hash( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + platform_version: &PlatformVersion, + ) -> Result, Error> { + match platform_version + .drive + .methods + .identity + .fetch + .public_key_hashes + .fetch_full_identity_by_non_unique_public_key_hash + { + 0 => self.fetch_full_identity_by_non_unique_public_key_hash_v0( + public_key_hash, + after, + transaction, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "fetch_full_identity_by_non_unique_public_key_hash".to_string(), + known_versions: vec![0], + received: version, + })), + } + } +} diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..fa6dd8b1618 --- /dev/null +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,53 @@ +use crate::drive::Drive; + +use crate::error::Error; +use crate::fees::op::LowLevelDriveOperation; + +use dpp::identity::Identity; +use dpp::version::PlatformVersion; + +use grovedb::TransactionArg; + +impl Drive { + /// Fetches an identity with all its information from storage. + pub(super) fn fetch_full_identity_by_non_unique_public_key_hash_v0( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + platform_version: &PlatformVersion, + ) -> Result, Error> { + let mut drive_operations: Vec = vec![]; + self.fetch_full_identity_by_non_unique_public_key_hash_operations_v0( + public_key_hash, + after, + transaction, + &mut drive_operations, + platform_version, + ) + } + + /// Given an identity, fetches the identity with its flags from storage. + pub(super) fn fetch_full_identity_by_non_unique_public_key_hash_operations_v0( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + drive_operations: &mut Vec, + platform_version: &PlatformVersion, + ) -> Result, Error> { + let identity_ids = self.fetch_identity_ids_by_non_unique_public_key_hash_operations( + public_key_hash, + Some(1), + after, + transaction, + drive_operations, + platform_version, + )?; + if let Some(identity_id) = identity_ids.first() { + self.fetch_full_identity(*identity_id, transaction, platform_version) + } else { + Ok(None) + } + } +} diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/mod.rs index 9c444e59731..13c3e8111f8 100644 --- a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/mod.rs @@ -2,8 +2,9 @@ mod v0; use crate::drive::Drive; use crate::error::{drive::DriveError, Error}; -use dpp::version::drive_versions::DriveVersion; +use crate::fees::op::LowLevelDriveOperation; use grovedb::TransactionArg; +use platform_version::version::PlatformVersion; impl Drive { /// Fetches identity ids from storage based on a non-unique public key hash. @@ -13,8 +14,9 @@ impl Drive { /// # Arguments /// /// * `public_key_hash` - A non-unique public key hash corresponding to the identity ids to be fetched. + /// * `limit` - An optional limit. /// * `transaction` - Transaction arguments. - /// * `drive_version` - A reference to the drive version. + /// * `platform_version` - A reference to the platform version. /// /// # Returns /// @@ -22,10 +24,13 @@ impl Drive { pub fn fetch_identity_ids_by_non_unique_public_key_hash( &self, public_key_hash: [u8; 20], + limit: Option, + after: Option<[u8; 32]>, transaction: TransactionArg, - drive_version: &DriveVersion, + platform_version: &PlatformVersion, ) -> Result, Error> { - match drive_version + match platform_version + .drive .methods .identity .fetch @@ -34,8 +39,10 @@ impl Drive { { 0 => self.fetch_identity_ids_by_non_unique_public_key_hash_v0( public_key_hash, + limit, + after, transaction, - drive_version, + platform_version, ), version => Err(Error::Drive(DriveError::UnknownVersionMismatch { method: "fetch_identity_ids_by_non_unique_public_key_hash".to_string(), @@ -44,4 +51,37 @@ impl Drive { })), } } + + pub(crate) fn fetch_identity_ids_by_non_unique_public_key_hash_operations( + &self, + public_key_hash: [u8; 20], + limit: Option, + after: Option<[u8; 32]>, + transaction: TransactionArg, + drive_operations: &mut Vec, + platform_version: &PlatformVersion, + ) -> Result, Error> { + match platform_version + .drive + .methods + .identity + .fetch + .public_key_hashes + .fetch_identity_ids_by_non_unique_public_key_hash + { + 0 => self.fetch_identity_ids_by_non_unique_public_key_hash_operations_v0( + public_key_hash, + limit, + after, + transaction, + drive_operations, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "fetch_identity_ids_by_non_unique_public_key_hash_operations".to_string(), + known_versions: vec![0], + received: version, + })), + } + } } diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/v0/mod.rs index ee462d63281..0eeacb54b23 100644 --- a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/v0/mod.rs @@ -6,10 +6,9 @@ use crate::query::QueryItem; use grovedb::query_result_type::QueryResultType; -use dpp::version::drive_versions::DriveVersion; - use grovedb::{PathQuery, TransactionArg}; +use platform_version::version::PlatformVersion; use std::ops::RangeFull; impl Drive { @@ -17,37 +16,49 @@ impl Drive { pub(super) fn fetch_identity_ids_by_non_unique_public_key_hash_v0( &self, public_key_hash: [u8; 20], + limit: Option, + after: Option<[u8; 32]>, transaction: TransactionArg, - drive_version: &DriveVersion, + platform_version: &PlatformVersion, ) -> Result, Error> { let mut drive_operations: Vec = vec![]; self.fetch_identity_ids_by_non_unique_public_key_hash_operations_v0( public_key_hash, + limit, + after, transaction, &mut drive_operations, - drive_version, + platform_version, ) } - /// Gets identity ids from non unique public key hashes. + /// Gets identity ids from non-unique public key hashes. pub(super) fn fetch_identity_ids_by_non_unique_public_key_hash_operations_v0( &self, public_key_hash: [u8; 20], + limit: Option, + after: Option<[u8; 32]>, transaction: TransactionArg, drive_operations: &mut Vec, - drive_version: &DriveVersion, + platform_version: &PlatformVersion, ) -> Result, Error> { let non_unique_key_hashes = non_unique_key_hashes_sub_tree_path_vec(public_key_hash); - let path_query = PathQuery::new_single_query_item( - non_unique_key_hashes, - QueryItem::RangeFull(RangeFull), - ); + let mut path_query = if let Some(after) = after { + PathQuery::new_single_query_item( + non_unique_key_hashes, + QueryItem::RangeAfter(after.to_vec()..), + ) + } else { + PathQuery::new_single_query_item(non_unique_key_hashes, QueryItem::RangeFull(RangeFull)) + }; + path_query.query.limit = limit; + let (results, _) = self.grove_get_path_query( &path_query, transaction, QueryResultType::QueryKeyElementPairResultType, drive_operations, - drive_version, + &platform_version.drive, )?; results .to_keys() diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/mod.rs index 859982396ab..cbd75b76d42 100644 --- a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/mod.rs @@ -1,4 +1,5 @@ mod fetch_full_identities_by_unique_public_key_hashes; +mod fetch_full_identity_by_non_unique_public_key_hash; mod fetch_full_identity_by_unique_public_key_hash; mod fetch_identity_id_by_unique_public_key_hash; mod fetch_identity_ids_by_non_unique_public_key_hash; @@ -23,7 +24,6 @@ mod tests { fn test_fetch_all_keys_on_identity() { let drive = setup_drive(None, None); let platform_version = PlatformVersion::latest(); - let drive_version = &platform_version.drive; let transaction = drive.grove.start_transaction(); @@ -71,8 +71,10 @@ mod tests { let identity_ids = drive .fetch_identity_ids_by_non_unique_public_key_hash( hash, + None, + None, Some(&transaction), - &drive_version, + platform_version, ) .expect("expected to get identity ids"); assert!(identity_ids.contains(&identity.id().to_buffer())); diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/mod.rs index b095b6bf427..7ca607c9f0a 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/mod.rs @@ -1,6 +1,7 @@ mod prove_full_identities; mod prove_full_identities_by_unique_public_key_hashes; mod prove_full_identity; +mod prove_full_identity_by_non_unique_public_key_hash; mod prove_full_identity_by_unique_public_key_hash; mod prove_identities_contract_keys; mod prove_identity_id_by_unique_public_key_hash; diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..5712e0df0d4 --- /dev/null +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,75 @@ +mod v0; + +use crate::drive::Drive; +use crate::error::drive::DriveError; +use crate::error::Error; + +use crate::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; +use dpp::version::PlatformVersion; +use grovedb::TransactionArg; + +impl Drive { + /// Generates a proof for an identity associated with a given non-unique public key hash. + /// + /// This function retrieves an identity along with its associated proofs from storage. + /// It utilizes versioning to call the appropriate handler based on the provided + /// `PlatformVersion`. + /// + /// # Arguments + /// + /// - `public_key_hash` - A 20-byte array representing the hash of the public key + /// for which the identity should be fetched. + /// - `after` - An optional identity ID specifying the starting point for retrieval. + /// If provided, the function will return the identity that appears after the given ID, + /// ensuring that the specified identity itself is not included. + /// - `transaction` - A transaction argument used for database operations. + /// - `platform_version` - A reference to the platform version, ensuring that the + /// correct version-specific function is used. + /// + /// # Returns + /// + /// Returns a `Result` containing an [`IdentityAndNonUniquePublicKeyHashDoubleProof`], which + /// includes both the proof of the identity and the proof linking the public key hash to + /// an identity ID. If the operation fails or the platform version is unsupported, an `Error` + /// is returned. + /// + /// # Errors + /// + /// This function will return an `Error` if: + /// - The identity retrieval operation fails. + /// - The provided public key hash does not correspond to a known identity. + /// - The requested platform version is unknown or not supported. + /// + /// # Versioning + /// + /// - Currently, only version `0` of `prove_full_identity_by_non_unique_public_key_hash` + /// is implemented. If an unsupported version is provided, an `UnknownVersionMismatch` + /// error is returned. + pub fn prove_full_identity_by_non_unique_public_key_hash( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + platform_version: &PlatformVersion, + ) -> Result { + match platform_version + .drive + .methods + .identity + .prove + .prove_full_identity_by_non_unique_public_key_hash + { + 0 => self.prove_full_identity_by_non_unique_public_key_hash_v0( + public_key_hash, + after, + transaction, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "prove_full_identity_by_non_unique_public_key_hash".to_string(), + known_versions: vec![0], + received: version, + })), + } + } +} diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..93c8d26c519 --- /dev/null +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,219 @@ +use crate::drive::Drive; + +use crate::error::Error; + +use crate::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; +use dpp::version::PlatformVersion; +use grovedb::TransactionArg; + +impl Drive { + /// Fetches an identity with all its information from storage. + pub(super) fn prove_full_identity_by_non_unique_public_key_hash_v0( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + platform_version: &PlatformVersion, + ) -> Result { + let identity_ids = self.fetch_identity_ids_by_non_unique_public_key_hash_operations( + public_key_hash, + Some(1), + after, + transaction, + &mut vec![], + platform_version, + )?; + // We only prove the absence of the public key hash + let mut path_query = + Self::identity_id_by_non_unique_public_key_hash_query(public_key_hash, after); + path_query.query.limit = Some(1); + let identity_id_public_key_hash_proof = self.grove_get_proved_path_query( + &path_query, + transaction, + &mut vec![], + &platform_version.drive, + )?; + let identity_proof = if let Some(identity_id) = identity_ids.first() { + let full_identity_query = + Self::full_identity_query(&identity_id, &platform_version.drive.grove_version)?; + Some(self.grove_get_proved_path_query( + &full_identity_query, + transaction, + &mut vec![], + &platform_version.drive, + )?) + } else { + None + }; + + Ok(IdentityAndNonUniquePublicKeyHashDoubleProof { + identity_proof, + identity_id_public_key_hash_proof, + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::util::test_helpers::setup::setup_drive_with_initial_state_structure; + use dpp::block::block_info::BlockInfo; + use dpp::identity::accessors::IdentityGettersV0; + use dpp::identity::identity_public_key::accessors::v0::IdentityPublicKeyGettersV0; + use dpp::identity::identity_public_key::methods::hash::IdentityPublicKeyHashMethodsV0; + use dpp::identity::{Identity, IdentityPublicKey, KeyType}; + use dpp::version::PlatformVersion; + use rand::prelude::StdRng; + use rand::SeedableRng; + + #[test] + fn should_prove_a_single_identity_with_non_unique_key() { + let drive = setup_drive_with_initial_state_structure(None); + + let platform_version = PlatformVersion::latest(); + + let identity = Identity::random_identity(3, Some(14), platform_version) + .expect("expected a random identity"); + + drive + .add_new_identity( + identity.clone(), + false, + &BlockInfo::default(), + true, + None, + platform_version, + ) + .expect("expected to add an identity"); + + let first_key_hash = identity + .public_keys() + .values() + .find(|public_key| !public_key.key_type().is_unique_key_type()) + .expect("expected a unique key") + .public_key_hash() + .expect("expected to hash data"); + + let proof = drive + .prove_full_identity_by_non_unique_public_key_hash( + first_key_hash, + None, + None, + platform_version, + ) + .expect("should not error when proving an identity"); + + let (_, proved_identity) = Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof, + first_key_hash, + None, + platform_version, + ) + .expect("expect that this be verified"); + + assert_eq!(proved_identity, Some(identity)); + } + + #[test] + fn should_prove_a_single_identity_with_non_unique_key_when_two_have_same_key() { + let drive = setup_drive_with_initial_state_structure(None); + + let platform_version = PlatformVersion::latest(); + + let mut identity_1 = Identity::random_identity(3, Some(14), platform_version) + .expect("expected a random identity"); + + let mut identity_2 = Identity::random_identity(3, Some(15), platform_version) + .expect("expected a random identity"); + + let mut rng = StdRng::seed_from_u64(506); + + let key = IdentityPublicKey::random_voting_key_with_rng(3, &mut rng, platform_version) + .expect("expected key") + .0; + + identity_1.add_public_key(key.clone()); + identity_2.add_public_key(key.clone()); + + drive + .add_new_identity( + identity_1.clone(), + false, + &BlockInfo::default(), + true, + None, + platform_version, + ) + .expect("expected to add an identity"); + + drive + .add_new_identity( + identity_2.clone(), + false, + &BlockInfo::default(), + true, + None, + platform_version, + ) + .expect("expected to add an identity"); + + let key_hash = key.public_key_hash().expect("expected key hash"); + + let proof = drive + .prove_full_identity_by_non_unique_public_key_hash( + key_hash, + None, + None, + platform_version, + ) + .expect("should not error when proving an identity"); + + let (_, proved_identity) = Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof, + key_hash, + None, + platform_version, + ) + .expect("expect that this be verified"); + + assert_eq!(proved_identity, Some(identity_1.clone())); + + let proof = drive + .prove_full_identity_by_non_unique_public_key_hash( + key_hash, + Some(identity_1.id().to_buffer()), + None, + platform_version, + ) + .expect("should not error when proving an identity"); + + let (_, proved_identity) = Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof, + key_hash, + Some(identity_1.id().to_buffer()), + platform_version, + ) + .expect("expect that this be verified"); + + assert_eq!(proved_identity, Some(identity_2.clone())); + + let proof = drive + .prove_full_identity_by_non_unique_public_key_hash( + key_hash, + Some(identity_2.id().to_buffer()), + None, + platform_version, + ) + .expect("should not error when proving an identity"); + + let (_, proved_identity) = Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof, + key_hash, + Some(identity_2.id().to_buffer()), + platform_version, + ) + .expect("expect that this be verified"); + + assert_eq!(proved_identity, None); + } +} diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_unique_public_key_hash/v0/mod.rs index f474fa52315..085920640ba 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_unique_public_key_hash/v0/mod.rs @@ -87,7 +87,7 @@ mod tests { .prove_full_identity_by_unique_public_key_hash(first_key_hash, None, platform_version) .expect("should not error when proving an identity"); - let (_, proved_identity) = Drive::verify_full_identity_by_public_key_hash( + let (_, proved_identity) = Drive::verify_full_identity_by_unique_public_key_hash( proof.as_slice(), first_key_hash, platform_version, diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_id_by_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_id_by_unique_public_key_hash/v0/mod.rs index 8900c2e2dcc..83af15538f1 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_id_by_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_id_by_unique_public_key_hash/v0/mod.rs @@ -65,7 +65,7 @@ mod tests { ) .expect("should not error when proving an identity"); - let (_, proved_identity_id) = Drive::verify_identity_id_by_public_key_hash( + let (_, proved_identity_id) = Drive::verify_identity_id_by_unique_public_key_hash( proof.as_slice(), false, first_key_hash, diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_ids_by_unique_public_key_hashes/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_ids_by_unique_public_key_hashes/v0/mod.rs index 1b4636e6b5a..d9c54243db5 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_ids_by_unique_public_key_hashes/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_ids_by_unique_public_key_hashes/v0/mod.rs @@ -89,7 +89,7 @@ mod tests { .expect("should not error when proving an identity"); let (_, proved_identity_id): ([u8; 32], BTreeMap<[u8; 20], Option<[u8; 32]>>) = - Drive::verify_identity_ids_by_public_key_hashes( + Drive::verify_identity_ids_by_unique_public_key_hashes( proof.as_slice(), false, &key_hashes, diff --git a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs index f7832d2274f..6e1b1e4b65d 100644 --- a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs @@ -1,6 +1,9 @@ use crate::drive::balances::balance_path_vec; use crate::drive::identity::key::fetch::IdentityKeysRequest; -use crate::drive::{identity_tree_path_vec, unique_key_hashes_tree_path_vec, Drive}; +use crate::drive::{ + identity_tree_path_vec, non_unique_key_hashes_tree_path, non_unique_key_hashes_tree_path_vec, + unique_key_hashes_tree_path_vec, Drive, +}; use std::ops::RangeFull; use crate::error::Error; @@ -90,6 +93,23 @@ impl Drive { PathQuery::new_single_key(unique_key_hashes, public_key_hash.to_vec()) } + /// The query for proving an identity id from a non-unique public key hash. + /// This should be used for absence proofs + pub fn identity_id_by_non_unique_public_key_hash_query( + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + ) -> PathQuery { + let non_unique_key_hashes = non_unique_key_hashes_tree_path_vec(); + let mut query = Query::new_single_key(public_key_hash.to_vec()); + let sub_query = if let Some(after) = after { + Query::new_single_query_item(QueryItem::RangeFrom(after.to_vec()..)) + } else { + Query::new_range_full() + }; + query.set_subquery(sub_query); + PathQuery::new(non_unique_key_hashes, SizedQuery::new(query, None, None)) + } + /// The query for proving identity ids from a vector of public key hashes. pub fn identity_ids_by_unique_public_key_hash_query( public_key_hashes: &[[u8; 20]], @@ -213,6 +233,23 @@ impl Drive { .map_err(Error::GroveDB) } + /// This query gets the full identity and the public key hash + pub fn full_identity_with_non_unique_public_key_hash_query( + public_key_hash: [u8; 20], + identity_id: [u8; 32], + after: Option<[u8; 32]>, + grove_version: &GroveVersion, + ) -> Result { + let full_identity_query = Self::full_identity_query(&identity_id, grove_version)?; + let identity_id_by_public_key_hash_query = + Self::identity_id_by_non_unique_public_key_hash_query(public_key_hash, after); + PathQuery::merge( + vec![&full_identity_query, &identity_id_by_public_key_hash_query], + grove_version, + ) + .map_err(Error::GroveDB) + } + /// The query full identities with key hashes too pub fn full_identities_with_keys_hashes_query( identity_ids: &[[u8; 32]], diff --git a/packages/rs-drive/src/drive/identity/identity_and_non_unique_public_key_hash_double_proof.rs b/packages/rs-drive/src/drive/identity/identity_and_non_unique_public_key_hash_double_proof.rs new file mode 100644 index 00000000000..ccd439ca080 --- /dev/null +++ b/packages/rs-drive/src/drive/identity/identity_and_non_unique_public_key_hash_double_proof.rs @@ -0,0 +1,21 @@ +/// Represents a proof containing an optional identity proof and a required +/// proof for the identity ID and non-unique public key hash. +/// +/// This struct is used to verify the authenticity and validity of an identity +/// and its associated non-unique public key hash. +/// +/// # Fields +/// +/// * `identity_proof` - An optional proof for the identity, represented as a +/// serialized byte vector. This may be `None` if no additional proof is required. +/// * `identity_id_public_key_hash_proof` - A required proof verifying the +/// association between an identity ID and its non-unique public key hash, +/// stored as a serialized byte vector. +pub struct IdentityAndNonUniquePublicKeyHashDoubleProof { + /// Optional proof of identity, stored as a serialized byte vector. + pub identity_proof: Option>, + + /// Proof linking an identity ID to a non-unique public key hash, + /// stored as a serialized byte vector. + pub identity_id_public_key_hash_proof: Vec, +} diff --git a/packages/rs-drive/src/drive/identity/mod.rs b/packages/rs-drive/src/drive/identity/mod.rs index 1922e74b884..a75be0ea563 100644 --- a/packages/rs-drive/src/drive/identity/mod.rs +++ b/packages/rs-drive/src/drive/identity/mod.rs @@ -40,6 +40,10 @@ pub mod key; #[cfg(feature = "server")] pub mod update; +/// A module for a struct encapsulating an identity and a non-unique public key hash to identity id proof +#[cfg(any(feature = "server", feature = "verify"))] +pub mod identity_and_non_unique_public_key_hash_double_proof; + use crate::drive::identity::contract_info::ContractInfoStructure; use crate::error::drive::DriveError; use crate::error::Error; diff --git a/packages/rs-drive/src/verify/identity/mod.rs b/packages/rs-drive/src/verify/identity/mod.rs index dd24b00f678..a9308182abb 100644 --- a/packages/rs-drive/src/verify/identity/mod.rs +++ b/packages/rs-drive/src/verify/identity/mod.rs @@ -1,13 +1,15 @@ mod verify_full_identities_by_public_key_hashes; mod verify_full_identity_by_identity_id; -mod verify_full_identity_by_public_key_hash; +mod verify_full_identity_by_non_unique_public_key_hash; +mod verify_full_identity_by_unique_public_key_hash; mod verify_identities_contract_keys; mod verify_identity_balance_and_revision_for_identity_id; mod verify_identity_balance_for_identity_id; mod verify_identity_balances_for_identity_ids; mod verify_identity_contract_nonce; -mod verify_identity_id_by_public_key_hash; -mod verify_identity_ids_by_public_key_hashes; +mod verify_identity_id_by_non_unique_public_key_hash; +mod verify_identity_id_by_unique_public_key_hash; +mod verify_identity_ids_by_unique_public_key_hashes; mod verify_identity_keys_by_identity_id; mod verify_identity_nonce; mod verify_identity_revision_for_identity_id; diff --git a/packages/rs-drive/src/verify/identity/verify_full_identities_by_public_key_hashes/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identities_by_public_key_hashes/v0/mod.rs index 7c1f0545d05..43ce89e6d00 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identities_by_public_key_hashes/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identities_by_public_key_hashes/v0/mod.rs @@ -48,7 +48,7 @@ impl Drive { platform_version: &PlatformVersion, ) -> Result<(RootHash, T), Error> { let (root_hash, identity_ids_by_key_hashes) = - Self::verify_identity_ids_by_public_key_hashes::>( + Self::verify_identity_ids_by_unique_public_key_hashes::>( proof, true, public_key_hashes, diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..4e593caeb67 --- /dev/null +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,79 @@ +mod v0; + +use crate::drive::Drive; + +use crate::error::drive::DriveError; + +use crate::error::Error; + +use crate::verify::RootHash; + +pub use dpp::prelude::Identity; + +use crate::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; +use dpp::version::PlatformVersion; + +impl Drive { + /// Verifies the full identity of a user using their non-unique public key hash. + /// + /// This function acts as a dispatcher that selects the appropriate version-specific + /// verification method based on the provided platform version. + /// + /// # Parameters + /// + /// - `proof`: A proof containing both the identity proof (if applicable) and the + /// proof linking the public key hash to an identity ID. + /// - `public_key_hash`: A 20-byte array representing the hash of the user's public key. + /// - `after`: An optional 32-byte array specifying an identity after which + /// the search should begin when retrieving the identity. + /// - `platform_version`: A reference to the platform version, ensuring that + /// the correct verification method is used. + /// + /// # Returns + /// + /// Returns a `Result` containing: + /// - `RootHash`: The root hash of GroveDB after verification. + /// - `Option`: The full identity of the user, if it exists. + /// + /// If no identity is found, the returned `Option` will be `None`. + /// + /// # Errors + /// + /// This function returns an `Error` if: + /// - The provided proof is invalid. + /// - The public key hash does not correspond to a valid identity ID. + /// - The identity ID exists but does not correspond to a valid full identity. + /// - The provided platform version is unknown or unsupported. + /// + /// # Versioning + /// + /// - Currently, only version `0` of `verify_full_identity_by_non_unique_public_key_hash` + /// is implemented. If an unsupported version is provided, an `UnknownVersionMismatch` + /// error is returned. + pub fn verify_full_identity_by_non_unique_public_key_hash( + proof: &IdentityAndNonUniquePublicKeyHashDoubleProof, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + platform_version: &PlatformVersion, + ) -> Result<(RootHash, Option), Error> { + match platform_version + .drive + .methods + .verify + .identity + .verify_full_identity_by_non_unique_public_key_hash + { + 0 => Self::verify_full_identity_by_non_unique_public_key_hash_v0( + proof, + public_key_hash, + after, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "verify_full_identity_by_non_unique_public_key_hash".to_string(), + known_versions: vec![0], + received: version, + })), + } + } +} diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..ccb81c1d6fb --- /dev/null +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,82 @@ +use crate::drive::Drive; + +use crate::error::Error; + +use crate::verify::RootHash; + +pub use dpp::prelude::Identity; + +use crate::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; +use crate::error::proof::ProofError; +use dpp::version::PlatformVersion; + +impl Drive { + /// Verifies the full identity of a user using their non-unique public key hash. + /// + /// This function performs a two-step verification process: + /// 1. It verifies the identity ID associated with the given public key hash + /// by calling [`verify_identity_id_by_non_unique_public_key_hash()`]. + /// 2. If an identity ID is found, it then verifies the full identity by calling + /// [`verify_full_identity_by_identity_id()`]. + /// + /// # Arguments + /// + /// * `proof` - A proof containing both the identity proof (if applicable) and + /// the proof linking the public key hash to an identity ID. + /// * `public_key_hash` - A 20-byte array representing the hash of the user's public key. + /// * `after` - An optional 32-byte array used to specify a search point in the proof verification process. + /// * `platform_version` - A reference to the platform version, ensuring compatibility. + /// + /// # Returns + /// + /// If verification is successful, returns a `Result` containing: + /// - `RootHash` - The root hash of GroveDB after verification. + /// - `Option` - The full identity of the user, if it exists. + /// + /// If no identity is found, the returned `Option` will be `None`. + /// + /// # Errors + /// + /// This function will return an `Error` if: + /// * The provided proof is invalid. + /// * The public key hash does not correspond to a valid identity ID. + /// * The identity ID exists but the associated identity proof is missing. + /// * The identity verification process fails. + /// + /// # Inline Optimization + /// + /// This function is marked with `#[inline(always)]` to hint the compiler to + /// aggressively inline it for performance optimization. + #[inline(always)] + pub(super) fn verify_full_identity_by_non_unique_public_key_hash_v0( + proof: &IdentityAndNonUniquePublicKeyHashDoubleProof, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + platform_version: &PlatformVersion, + ) -> Result<(RootHash, Option), Error> { + let (root_hash, identity_id) = Self::verify_identity_id_by_non_unique_public_key_hash( + &proof.identity_id_public_key_hash_proof, + false, + public_key_hash, + after, + platform_version, + )?; + let maybe_identity = identity_id + .map(|identity_id| { + let Some(identity_proof) = &proof.identity_proof else { + return Err(Error::Proof(ProofError::IncompleteProof("identity is not in proof even though identity id is set from non unique public key hash"))); + }; + println!("hex {}", hex::encode(&identity_proof)); + Self::verify_full_identity_by_identity_id( + identity_proof.as_slice(), + false, + identity_id, + platform_version, + ) + .map(|(_, maybe_identity)| maybe_identity) + }) + .transpose()? + .flatten(); + Ok((root_hash, maybe_identity)) + } +} diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/mod.rs similarity index 89% rename from packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/mod.rs rename to packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/mod.rs index 6ce7cc6f263..17cd33c1995 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/mod.rs @@ -36,7 +36,7 @@ impl Drive { /// - The identity ID does not correspond to a valid full identity. /// - An unknown or unsupported platform version is provided. /// - pub fn verify_full_identity_by_public_key_hash( + pub fn verify_full_identity_by_unique_public_key_hash( proof: &[u8], public_key_hash: [u8; 20], platform_version: &PlatformVersion, @@ -48,13 +48,13 @@ impl Drive { .identity .verify_full_identity_by_public_key_hash { - 0 => Self::verify_full_identity_by_public_key_hash_v0( + 0 => Self::verify_full_identity_by_unique_public_key_hash_v0( proof, public_key_hash, platform_version, ), version => Err(Error::Drive(DriveError::UnknownVersionMismatch { - method: "verify_full_identity_by_public_key_hash".to_string(), + method: "verify_full_identity_by_unique_public_key_hash".to_string(), known_versions: vec![0], received: version, })), diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs similarity index 96% rename from packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/v0/mod.rs rename to packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs index e481343470a..88fe2894242 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs @@ -38,12 +38,12 @@ impl Drive { /// * The identity ID does not correspond to a valid full identity. /// #[inline(always)] - pub(super) fn verify_full_identity_by_public_key_hash_v0( + pub(super) fn verify_full_identity_by_unique_public_key_hash_v0( proof: &[u8], public_key_hash: [u8; 20], platform_version: &PlatformVersion, ) -> Result<(RootHash, Option), Error> { - let (root_hash, identity_id) = Self::verify_identity_id_by_public_key_hash( + let (root_hash, identity_id) = Self::verify_identity_id_by_unique_public_key_hash( proof, true, public_key_hash, diff --git a/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..0d1ccae9f99 --- /dev/null +++ b/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,65 @@ +mod v0; + +use crate::drive::Drive; + +use crate::error::drive::DriveError; + +use crate::error::Error; + +use crate::verify::RootHash; + +use dpp::version::PlatformVersion; + +impl Drive { + /// Verifies the identity ID of a user by their public key hash. + /// + /// # Parameters + /// + /// - `proof`: A byte slice representing the proof of authentication from the user. + /// - `is_proof_subset`: A boolean indicating whether the proof is a subset. + /// - `public_key_hash`: A 20-byte array representing the hash of the public key of the user. + /// - `after`: A 32 byte array representing an identity after which we want to get the identity id. + /// - `platform_version`: The platform version against which to verify the identity ID. + /// + /// # Returns + /// + /// If the verification is successful, it returns a `Result` with a tuple of `RootHash` and + /// an `Option` of a 32-byte array. The `RootHash` represents the root hash of GroveDB, + /// and the `Option<[u8; 32]>` represents the identity ID of the user if it exists. + /// + /// # Errors + /// + /// Returns an `Error` if: + /// + /// - An unknown or unsupported platform version is provided. + /// - Any other error as documented in the specific versioned function. + /// + pub fn verify_identity_id_by_non_unique_public_key_hash( + proof: &[u8], + is_proof_subset: bool, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + platform_version: &PlatformVersion, + ) -> Result<(RootHash, Option<[u8; 32]>), Error> { + match platform_version + .drive + .methods + .verify + .identity + .verify_identity_id_by_non_unique_public_key_hash + { + 0 => Self::verify_identity_id_by_non_unique_public_key_hash_v0( + proof, + is_proof_subset, + public_key_hash, + after, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "verify_identity_id_by_non_unique_public_key_hash".to_string(), + known_versions: vec![0], + received: version, + })), + } + } +} diff --git a/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..1d3730b5562 --- /dev/null +++ b/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,69 @@ +use crate::drive::{non_unique_key_hashes_sub_tree_path_vec, Drive}; + +use crate::error::proof::ProofError; +use crate::error::Error; + +use crate::verify::RootHash; + +use grovedb::GroveDb; +use platform_version::version::PlatformVersion; + +impl Drive { + /// Verifies the identity ID of a user by their public key hash. + /// + /// # Parameters + /// + /// - `proof`: A byte slice representing the proof of authentication from the user. + /// - `is_proof_subset`: A boolean indicating whether the proof is a subset. + /// - `public_key_hash`: A 20-byte array representing the hash of the public key of the user. + /// - `after`: A 32 byte array representing an identity after which we want to get the identity id. + /// + /// # Returns + /// + /// If the verification is successful, it returns a `Result` with a tuple of `RootHash` and + /// an `Option` of a 32-byte array. The `RootHash` represents the root hash of GroveDB, + /// and the `Option<[u8; 32]>` represents the identity ID of the user if it exists. + /// + /// # Errors + /// + /// Returns an `Error` if: + /// + /// - The proof of authentication is not valid. + /// - The public key hash does not correspond to a valid identity ID. + /// - The proved key value is not for the correct path or key in unique key hashes. + /// - More than one identity ID is found. + /// + #[inline(always)] + pub(super) fn verify_identity_id_by_non_unique_public_key_hash_v0( + proof: &[u8], + is_proof_subset: bool, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + platform_version: &PlatformVersion, + ) -> Result<(RootHash, Option<[u8; 32]>), Error> { + let mut path_query = + Self::identity_id_by_non_unique_public_key_hash_query(public_key_hash, after); + path_query.query.limit = Some(1); + let (root_hash, mut proved_key_values) = if is_proof_subset { + GroveDb::verify_subset_query(proof, &path_query, &platform_version.drive.grove_version)? + } else { + GroveDb::verify_query(proof, &path_query, &platform_version.drive.grove_version)? + }; + + if proved_key_values.len() == 1 { + let (path, key, _) = proved_key_values.remove(0); + if path != non_unique_key_hashes_sub_tree_path_vec(public_key_hash) { + return Err(Error::Proof(ProofError::CorruptedProof( + "we did not get back an element for the correct path in non unique key hashes" + .to_string(), + ))); + } + let identity_id = key.try_into().map_err(|_| { + Error::Proof(ProofError::IncorrectValueSize("value size is incorrect")) + })?; + Ok((root_hash, Some(identity_id))) + } else { + Ok((root_hash, None)) + } + } +} diff --git a/packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/mod.rs similarity index 86% rename from packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/mod.rs rename to packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/mod.rs index 75182ffeefc..4422e600e01 100644 --- a/packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/mod.rs @@ -33,7 +33,7 @@ impl Drive { /// - An unknown or unsupported platform version is provided. /// - Any other error as documented in the specific versioned function. /// - pub fn verify_identity_id_by_public_key_hash( + pub fn verify_identity_id_by_unique_public_key_hash( proof: &[u8], is_proof_subset: bool, public_key_hash: [u8; 20], @@ -44,16 +44,16 @@ impl Drive { .methods .verify .identity - .verify_identity_id_by_public_key_hash + .verify_identity_id_by_unique_public_key_hash { - 0 => Self::verify_identity_id_by_public_key_hash_v0( + 0 => Self::verify_identity_id_by_unique_public_key_hash_v0( proof, is_proof_subset, public_key_hash, platform_version, ), version => Err(Error::Drive(DriveError::UnknownVersionMismatch { - method: "verify_identity_id_by_public_key_hash".to_string(), + method: "verify_identity_id_by_unique_public_key_hash".to_string(), known_versions: vec![0], received: version, })), diff --git a/packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/v0/mod.rs similarity index 98% rename from packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/v0/mod.rs rename to packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/v0/mod.rs index ceb8ae05b07..cccc1aa0f15 100644 --- a/packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/v0/mod.rs @@ -33,7 +33,7 @@ impl Drive { /// - More than one identity ID is found. /// #[inline(always)] - pub(super) fn verify_identity_id_by_public_key_hash_v0( + pub(super) fn verify_identity_id_by_unique_public_key_hash_v0( proof: &[u8], is_proof_subset: bool, public_key_hash: [u8; 20], diff --git a/packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/mod.rs similarity index 87% rename from packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/mod.rs rename to packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/mod.rs index 20a9d98c3d8..6d1f70f8a04 100644 --- a/packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/mod.rs @@ -36,7 +36,7 @@ impl Drive { /// - An unknown or unsupported platform version is provided. /// - Any other error as documented in the specific versioned function. /// - pub fn verify_identity_ids_by_public_key_hashes< + pub fn verify_identity_ids_by_unique_public_key_hashes< T: FromIterator<([u8; 20], Option<[u8; 32]>)>, >( proof: &[u8], @@ -49,16 +49,16 @@ impl Drive { .methods .verify .identity - .verify_identity_ids_by_public_key_hashes + .verify_identity_ids_by_unique_public_key_hashes { - 0 => Self::verify_identity_ids_by_public_key_hashes_v0( + 0 => Self::verify_identity_ids_by_unique_public_key_hashes_v0( proof, is_proof_subset, public_key_hashes, platform_version, ), version => Err(Error::Drive(DriveError::UnknownVersionMismatch { - method: "verify_identity_ids_by_public_key_hashes".to_string(), + method: "verify_identity_ids_by_unique_public_key_hashes".to_string(), known_versions: vec![0], received: version, })), diff --git a/packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/v0/mod.rs similarity index 98% rename from packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/v0/mod.rs rename to packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/v0/mod.rs index 6d1c6dc7d85..90beab8eaf3 100644 --- a/packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/v0/mod.rs @@ -37,7 +37,7 @@ impl Drive { /// - The number of proved key values does not match the number of public key hashes provided. /// - The value size of the identity ID is incorrect. /// - pub(crate) fn verify_identity_ids_by_public_key_hashes_v0< + pub(crate) fn verify_identity_ids_by_unique_public_key_hashes_v0< T: FromIterator<([u8; 20], Option<[u8; 32]>)>, >( proof: &[u8], diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs index 7a5d65bd3ea..ac3412fd85e 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs @@ -52,7 +52,7 @@ pub struct DriveAbciQueryIdentityVersions { pub balance: FeatureVersionBounds, pub identities_balances: FeatureVersionBounds, pub balance_and_revision: FeatureVersionBounds, - pub identity_by_public_key_hash: FeatureVersionBounds, + pub identity_by_unique_public_key_hash: FeatureVersionBounds, } #[derive(Clone, Debug, Default)] diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs index 494dfa6f097..829c9a218d2 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs @@ -67,7 +67,7 @@ pub const DRIVE_ABCI_QUERY_VERSIONS_V1: DriveAbciQueryVersions = DriveAbciQueryV max_version: 0, default_current_version: 0, }, - identity_by_public_key_hash: FeatureVersionBounds { + identity_by_unique_public_key_hash: FeatureVersionBounds { min_version: 0, max_version: 0, default_current_version: 0, diff --git a/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/mod.rs b/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/mod.rs index 1a2526f4fe7..ae6d64f233f 100644 --- a/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/mod.rs +++ b/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/mod.rs @@ -95,6 +95,7 @@ pub struct DriveIdentityFetchPublicKeyHashesMethodVersions { pub has_non_unique_public_key_hash: FeatureVersion, pub has_non_unique_public_key_hash_already_for_identity: FeatureVersion, pub has_unique_public_key_hash: FeatureVersion, + pub fetch_full_identity_by_non_unique_public_key_hash: FeatureVersion, } #[derive(Clone, Debug, Default)] @@ -133,6 +134,7 @@ pub struct DriveIdentityProveMethodVersions { pub prove_full_identity_by_unique_public_key_hash: FeatureVersion, pub prove_identity_id_by_unique_public_key_hash: FeatureVersion, pub prove_identity_ids_by_unique_public_key_hashes: FeatureVersion, + pub prove_full_identity_by_non_unique_public_key_hash: FeatureVersion, } #[derive(Clone, Debug, Default)] diff --git a/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/v1.rs b/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/v1.rs index beb79c65c18..fab61b69d37 100644 --- a/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/v1.rs +++ b/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/v1.rs @@ -27,6 +27,7 @@ pub const DRIVE_IDENTITY_METHOD_VERSIONS_V1: DriveIdentityMethodVersions = has_non_unique_public_key_hash: 0, has_non_unique_public_key_hash_already_for_identity: 0, has_unique_public_key_hash: 0, + fetch_full_identity_by_non_unique_public_key_hash: 0, }, attributes: DriveIdentityFetchAttributesMethodVersions { revision: 0, @@ -58,6 +59,7 @@ pub const DRIVE_IDENTITY_METHOD_VERSIONS_V1: DriveIdentityMethodVersions = prove_full_identity_by_unique_public_key_hash: 0, prove_identity_id_by_unique_public_key_hash: 0, prove_identity_ids_by_unique_public_key_hashes: 0, + prove_full_identity_by_non_unique_public_key_hash: 0, }, keys: DriveIdentityKeysMethodVersions { fetch: DriveIdentityKeysFetchMethodVersions { diff --git a/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/mod.rs b/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/mod.rs index f8082f48198..90cd83d5291 100644 --- a/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/mod.rs +++ b/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/mod.rs @@ -35,13 +35,15 @@ pub struct DriveVerifyIdentityMethodVersions { pub verify_full_identity_by_public_key_hash: FeatureVersion, pub verify_identity_balance_for_identity_id: FeatureVersion, pub verify_identity_balances_for_identity_ids: FeatureVersion, - pub verify_identity_id_by_public_key_hash: FeatureVersion, - pub verify_identity_ids_by_public_key_hashes: FeatureVersion, + pub verify_identity_id_by_unique_public_key_hash: FeatureVersion, + pub verify_identity_ids_by_unique_public_key_hashes: FeatureVersion, pub verify_identity_keys_by_identity_id: FeatureVersion, pub verify_identity_nonce: FeatureVersion, pub verify_identity_contract_nonce: FeatureVersion, pub verify_identities_contract_keys: FeatureVersion, pub verify_identity_revision_for_identity_id: FeatureVersion, + pub verify_full_identity_by_non_unique_public_key_hash: FeatureVersion, + pub verify_identity_id_by_non_unique_public_key_hash: FeatureVersion, } #[derive(Clone, Debug, Default)] diff --git a/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/v1.rs b/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/v1.rs index 666ab416ca7..a848e92c7dd 100644 --- a/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/v1.rs +++ b/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/v1.rs @@ -21,13 +21,15 @@ pub const DRIVE_VERIFY_METHOD_VERSIONS_V1: DriveVerifyMethodVersions = DriveVeri verify_full_identity_by_public_key_hash: 0, verify_identity_balance_for_identity_id: 0, verify_identity_balances_for_identity_ids: 0, - verify_identity_id_by_public_key_hash: 0, - verify_identity_ids_by_public_key_hashes: 0, + verify_identity_id_by_unique_public_key_hash: 0, + verify_identity_ids_by_unique_public_key_hashes: 0, verify_identity_keys_by_identity_id: 0, verify_identity_nonce: 0, verify_identity_contract_nonce: 0, verify_identities_contract_keys: 0, verify_identity_revision_for_identity_id: 0, + verify_full_identity_by_non_unique_public_key_hash: 0, + verify_identity_id_by_non_unique_public_key_hash: 0, }, group: DriveVerifyGroupMethodVersions { verify_group_info: 0, diff --git a/packages/rs-platform-version/src/version/mocks/v2_test.rs b/packages/rs-platform-version/src/version/mocks/v2_test.rs index 3b8d73f2735..f3e5c2c6624 100644 --- a/packages/rs-platform-version/src/version/mocks/v2_test.rs +++ b/packages/rs-platform-version/src/version/mocks/v2_test.rs @@ -200,7 +200,7 @@ pub const TEST_PLATFORM_V2: PlatformVersion = PlatformVersion { max_version: 0, default_current_version: 0, }, - identity_by_public_key_hash: FeatureVersionBounds { + identity_by_unique_public_key_hash: FeatureVersionBounds { min_version: 0, max_version: 0, default_current_version: 0, From ca2a8fede3d4e01d2a294a9940280720dee1a970 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Mon, 10 Mar 2025 12:48:33 +0100 Subject: [PATCH 02/21] feat(sdk): get identity by non-unique pubkey hashes chore: update to latest dash core 37 (#2483) feat(platform)!: token advanced distribution and updates (#2471) fix: token history contract (#2474) Co-authored-by: Ivan Shumkov Co-authored-by: QuantumExplorer fix(drive): using new rust dash core methods for reversed quorum hash to maintain backwards compatibility (#2489) feat: more granular integer document property types (#2455) Co-authored-by: Quantum Explorer docs: update comment for data contract code range (#2476) feat: validate token name localizations (#2468) feat(sdk): get identity by non-unique keys build(deps): update grovedb to current develop test: test identity by non-unique pubkey hashes fix(sdk): dash core client fails to get quorum chore: minor fixes test(drive-abci): identity by non-unique pubkey start after chore: minor changes to verify feat(sdk): token and group queries (#2449) chore: revert limit 1 => limit none chore: add non-unique key to test identities test(sdk): test vectors for test_fetch_identity_by_non_unique_public_keys fix(platform)!: token distribution fixes and tests (#2494) chore(platform): bump to version 2.0.0-dev.1 (#2495) test: update assertion fix(sdk): make some things public (#2496) feat(platform): require token for document actions (#2498) fix: data contract proof doesn't work with new auto fields (#2501) --- packages/dapi-grpc/build.rs | 6 +- packages/rs-dapi-client/src/transport/grpc.rs | 9 ++ .../create_genesis_state/test/tokens.rs | 8 +- .../mod.rs | 4 +- .../v0/mod.rs | 29 +++++- packages/rs-drive-proof-verifier/src/proof.rs | 98 +++++++++++++++++- packages/rs-drive-verify-c-binding/src/lib.rs | 13 ++- packages/rs-drive/Cargo.toml | 20 ++-- .../src/drive/identity/fetch/queries/mod.rs | 9 +- packages/rs-drive/src/drive/mod.rs | 4 +- .../v0/mod.rs | 2 +- .../v0/mod.rs | 2 +- packages/rs-platform-version/Cargo.toml | 2 +- .../rs-sdk/src/platform/types/identity.rs | 34 +++++- packages/rs-sdk/tests/fetch/identity.rs | 65 +++++++++++- .../.gitkeep | 0 ...6e167d0327209295b4a98e14c6eb0d2b7e631.json | Bin 0 -> 72399 bytes ...5f90edeec5e32ae9d35ca2f654e5a8b47ef3d.json | Bin 0 -> 45144 bytes ...d441005468aa9ae346220fcc77830f30f06c7.json | Bin 0 -> 106415 bytes ...9355939311eca86a05923cfb53d11b746428a.json | Bin 0 -> 114110 bytes ...839f231c080d4ffacee80457ecc1db6a4bbbb.json | Bin 0 -> 104070 bytes ...4f02b4d71142b0d84600aa6fd73436efb869b.json | 1 + 22 files changed, 276 insertions(+), 30 deletions(-) create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/.gitkeep create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_28e9fda4d74589e1756f49026696e167d0327209295b4a98e14c6eb0d2b7e631.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_3606368b19c21647e14e80f609e5f90edeec5e32ae9d35ca2f654e5a8b47ef3d.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_3796e6d3ed7346055d82e39618ad441005468aa9ae346220fcc77830f30f06c7.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_5f9f951aa5d5af07c588813f31e9355939311eca86a05923cfb53d11b746428a.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_a6d61bfcc12549cc29bf4e9abe2839f231c080d4ffacee80457ecc1db6a4bbbb.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/quorum_pubkey-106-15957be6ce59202ce3df8dbd0604f02b4d71142b0d84600aa6fd73436efb869b.json diff --git a/packages/dapi-grpc/build.rs b/packages/dapi-grpc/build.rs index 95c6a3682b6..4423d8f394c 100644 --- a/packages/dapi-grpc/build.rs +++ b/packages/dapi-grpc/build.rs @@ -63,7 +63,7 @@ fn configure_platform(mut platform: MappingConfig) -> MappingConfig { // Derive features for versioned messages // // "GetConsensusParamsRequest" is excluded as this message does not support proofs - const VERSIONED_REQUESTS: [&str; 40] = [ + const VERSIONED_REQUESTS: [&str; 41] = [ "GetDataContractHistoryRequest", "GetDataContractRequest", "GetDataContractsRequest", @@ -75,6 +75,7 @@ fn configure_platform(mut platform: MappingConfig) -> MappingConfig { "GetIdentityContractNonceRequest", "GetIdentityBalanceAndRevisionRequest", "GetIdentityBalanceRequest", + "GetIdentityByNonUniquePublicKeyHashRequest", "GetIdentityByPublicKeyHashRequest", "GetIdentityKeysRequest", "GetIdentityRequest", @@ -110,6 +111,9 @@ fn configure_platform(mut platform: MappingConfig) -> MappingConfig { // - "GetConsensusParamsResponse" // - "GetStatusResponse" // + // The following responses are excluded as they need custom proof handling: + // - "GetIdentityByNonUniquePublicKeyHashResponse" + // // "GetEvonodesProposedEpochBlocksResponse" is used for 2 Requests const VERSIONED_RESPONSES: [&str; 39] = [ "GetDataContractHistoryResponse", diff --git a/packages/rs-dapi-client/src/transport/grpc.rs b/packages/rs-dapi-client/src/transport/grpc.rs index d1337142bdc..57d8c28866c 100644 --- a/packages/rs-dapi-client/src/transport/grpc.rs +++ b/packages/rs-dapi-client/src/transport/grpc.rs @@ -490,6 +490,15 @@ impl_transport_request_grpc!( get_status ); +// rpc getIdentityByNonUniquePublicKeyHash(GetIdentityByNonUniquePublicKeyHashRequest) returns (GetIdentityByNonUniquePublicKeyHashResponse); +impl_transport_request_grpc!( + platform_proto::GetIdentityByNonUniquePublicKeyHashRequest, + platform_proto::GetIdentityByNonUniquePublicKeyHashResponse, + PlatformGrpcClient, + RequestSettings::default(), + get_identity_by_non_unique_public_key_hash +); + // rpc getIdentityTokenBalances(GetIdentityTokenBalancesRequest) returns (GetIdentityTokenBalancesResponse); impl_transport_request_grpc!( platform_proto::GetIdentityTokenBalancesRequest, diff --git a/packages/rs-drive-abci/src/execution/platform_events/initialization/create_genesis_state/test/tokens.rs b/packages/rs-drive-abci/src/execution/platform_events/initialization/create_genesis_state/test/tokens.rs index 28147c64bea..637638faddc 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/initialization/create_genesis_state/test/tokens.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/initialization/create_genesis_state/test/tokens.rs @@ -110,6 +110,10 @@ impl Platform { transaction: TransactionArg, platform_version: &PlatformVersion, ) -> Result<(), Error> { + let mut rng = StdRng::seed_from_u64(0u64); + let non_unique_key = + IdentityPublicKey::random_voting_key_with_rng(11, &mut rng, platform_version)?; + for id in [IDENTITY_ID_1, IDENTITY_ID_2, IDENTITY_ID_3] { // Create identity without keys let mut identity = Identity::create_basic_identity(id, platform_version)?; @@ -117,7 +121,9 @@ impl Platform { // Generate keys let seed = id.to_buffer()[0]; let mut rng = StdRng::seed_from_u64(seed as u64); - let keys = IdentityPublicKey::main_keys_with_random_authentication_keys_with_private_keys_with_rng(3, &mut rng, platform_version)?; + let mut keys = IdentityPublicKey::main_keys_with_random_authentication_keys_with_private_keys_with_rng(3, &mut rng, platform_version)?; + // every identity has the same non-unique key + keys.push(non_unique_key.clone()); for (key, private_key) in keys.iter() { let private_key = hex::encode(private_key); diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs index 33403fad6a5..9a1c7c104d5 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs @@ -7,7 +7,6 @@ use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_request: use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_response::Version as ResponseVersion; use dapi_grpc::platform::v0::{ GetIdentityByNonUniquePublicKeyHashRequest, GetIdentityByNonUniquePublicKeyHashResponse, - GetIdentityByPublicKeyHashResponse, }; use dpp::version::PlatformVersion; @@ -28,7 +27,8 @@ impl Platform { ), )); }; - + // TODO why `identity_by_unique_public_key_hash`? + // Shouldn't we rename or add new field like `identity_by_non_unique_public_key_hash`? let feature_version_bounds = &platform_version .drive_abci .query diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs index a9a3ac7dd42..f31423a2aea 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs @@ -38,7 +38,7 @@ impl Platform { ) .map(|bytes| bytes.0) .map_err(|_| QueryError::InvalidArgument( - "public key hash must be 20 bytes long".to_string() + "start_after must be 32 bytes long identity ID".to_string() )))) } else { None @@ -129,6 +129,33 @@ mod tests { )); } + #[test] + fn test_invalid_start_after() { + let (platform, state, version) = setup_platform(None, Network::Testnet, None); + + let negative_tests: Vec<&[u8]> = vec![&[0u8; 4], &[0u8; 20], &[0u8; 64]]; + + for test in negative_tests { + let request = GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: vec![0; 20], + start_after: Some(test.to_vec()), + prove: false, + }; + + let result = platform + .query_identity_by_non_unique_public_key_hash_v0(request, &state, version) + .expect("expected query to succeed"); + + assert!( + matches!( + result.errors.as_slice(), + [QueryError::InvalidArgument(msg)] if msg == &"start_after must be 32 bytes long identity ID".to_string()), + "errors: {:?}", + result.errors, + ); + } + } + #[test] fn test_identity_not_found() { let (platform, state, version) = setup_platform(None, Network::Testnet, None); diff --git a/packages/rs-drive-proof-verifier/src/proof.rs b/packages/rs-drive-proof-verifier/src/proof.rs index 6be02632ec1..a89f747e3af 100644 --- a/packages/rs-drive-proof-verifier/src/proof.rs +++ b/packages/rs-drive-proof-verifier/src/proof.rs @@ -15,7 +15,10 @@ use dapi_grpc::platform::v0::get_protocol_version_upgrade_vote_status_request::{ self, GetProtocolVersionUpgradeVoteStatusRequestV0, }; use dapi_grpc::platform::v0::security_level_map::KeyKindRequestType as GrpcKeyKind; -use dapi_grpc::platform::v0::{get_contested_resource_identity_votes_request, get_data_contract_history_request, get_data_contract_request, get_data_contracts_request, get_epochs_info_request, get_evonodes_proposed_epoch_blocks_by_ids_request, get_evonodes_proposed_epoch_blocks_by_range_request, get_group_actions_request, get_group_info_request, get_group_infos_request, get_identities_balances_request, get_identities_contract_keys_request, get_identity_balance_and_revision_request, get_identity_balance_request, get_identity_by_public_key_hash_request, get_identity_contract_nonce_request, get_identity_keys_request, get_identity_nonce_request, get_identity_request, get_path_elements_request, get_prefunded_specialized_balance_request, GetContestedResourceVotersForIdentityRequest, GetContestedResourceVotersForIdentityResponse, GetGroupActionSignersRequest, GetGroupActionSignersResponse, GetGroupActionsRequest, GetGroupActionsResponse, GetGroupInfoRequest, GetGroupInfoResponse, GetGroupInfosRequest, GetGroupInfosResponse, GetPathElementsRequest, GetPathElementsResponse, GetProtocolVersionUpgradeStateRequest, GetProtocolVersionUpgradeStateResponse, GetProtocolVersionUpgradeVoteStatusRequest, GetProtocolVersionUpgradeVoteStatusResponse, Proof, ResponseMetadata}; +use dapi_grpc::platform::v0::{ + get_contested_resource_identity_votes_request, get_data_contract_history_request, get_data_contract_request, get_data_contracts_request, get_epochs_info_request, get_evonodes_proposed_epoch_blocks_by_ids_request, get_evonodes_proposed_epoch_blocks_by_range_request, get_identities_balances_request, get_identities_contract_keys_request, get_identity_balance_and_revision_request, get_identity_balance_request, get_identity_by_non_unique_public_key_hash_request, + get_identity_by_public_key_hash_request, get_identity_contract_nonce_request, get_identity_keys_request, get_identity_nonce_request, get_identity_request, get_path_elements_request, get_prefunded_specialized_balance_request, GetContestedResourceVotersForIdentityRequest, GetContestedResourceVotersForIdentityResponse, GetPathElementsRequest, GetPathElementsResponse, GetProtocolVersionUpgradeStateRequest, GetProtocolVersionUpgradeStateResponse, GetProtocolVersionUpgradeVoteStatusRequest, GetProtocolVersionUpgradeVoteStatusResponse, Proof, ResponseMetadata +}; use dapi_grpc::platform::{ v0::{self as platform, key_request_type, KeyRequestType as GrpcKeyType}, VersionedGrpcResponse, @@ -36,6 +39,7 @@ use dpp::state_transition::proof_result::StateTransitionProofResult; use dpp::state_transition::StateTransition; use dpp::version::PlatformVersion; use dpp::voting::votes::Vote; +use drive::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; use drive::drive::identity::key::fetch::{ IdentityKeysRequest, KeyKindRequestType, KeyRequestType, PurposeU8, SecurityLevelU8, }; @@ -339,6 +343,98 @@ impl FromProof for Identity { } } +impl FromProof for Identity { + type Request = platform::GetIdentityByNonUniquePublicKeyHashRequest; + type Response = platform::GetIdentityByNonUniquePublicKeyHashResponse; + fn maybe_from_proof_with_metadata<'a, I: Into, O: Into>( + request: I, + response: O, + _network: Network, + platform_version: &PlatformVersion, + provider: &'a dyn ContextProvider, + ) -> Result<(Option, ResponseMetadata, Proof), Error> + where + Self: Sized + 'a, + { + let request = request.into(); + let response = response.into(); + // Parse response to read proof and metadata + // note that proof in this case is different + // let proof = response.proof().or(Err(Error::NoProofInResult))?; + use platform::get_identity_by_non_unique_public_key_hash_response::{ + get_identity_by_non_unique_public_key_hash_response_v0::Result as V0Result, Version::V0, + }; + + let (proved_response, mtd) = match response.version { + Some(V0(v0)) => { + let proof = if let V0Result::Proof(p) = v0.result.ok_or(Error::NoProofInResult)? { + p + } else { + return Err(Error::NoProofInResult); + }; + + (proof, v0.metadata.ok_or(Error::EmptyResponseMetadata)?) + } + _ => return Err(Error::EmptyResponseMetadata), + }; + + // let mtd = response.metadata().or(Err(Error::EmptyResponseMetadata))?; + + let (public_key_hash, after_identity) = match request.version.ok_or(Error::EmptyVersion)? { + get_identity_by_non_unique_public_key_hash_request::Version::V0(v0) => { + let public_key_hash = + v0.public_key_hash + .try_into() + .map_err(|_| Error::RequestError { + error: "Invalid public key hash length".to_string(), + })?; + + let after = v0 + .start_after + .map(|a| { + a.try_into().map_err(|_| Error::RequestError { + error: "Invalid start_after length".to_string(), + }) + }) + .transpose()?; + (public_key_hash, after) + } + }; + + // we need to convert some data to handle non-default proof structure for this response + let proof = proved_response + .grovedb_identity_public_key_hash_proof + .ok_or(Error::NoProofInResult)?; + + let proof_tuple = IdentityAndNonUniquePublicKeyHashDoubleProof { + identity_proof: proved_response.identity_proof_bytes, + identity_id_public_key_hash_proof: proof.grovedb_proof.clone(), + }; + + // Extract content from proof and verify Drive/GroveDB proofs + let (root_hash, maybe_identity) = + Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof_tuple, + public_key_hash, + after_identity, + platform_version, + ) + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), + })?; + + verify_tenderdash_proof(&proof, &mtd, &root_hash, provider)?; + + Ok((maybe_identity, mtd.clone(), proof)) + } +} + impl FromProof for IdentityPublicKeys { type Request = platform::GetIdentityKeysRequest; type Response = platform::GetIdentityKeysResponse; diff --git a/packages/rs-drive-verify-c-binding/src/lib.rs b/packages/rs-drive-verify-c-binding/src/lib.rs index 220bb70cc13..cb32ff6624b 100644 --- a/packages/rs-drive-verify-c-binding/src/lib.rs +++ b/packages/rs-drive-verify-c-binding/src/lib.rs @@ -100,7 +100,7 @@ pub unsafe extern "C" fn verify_full_identity_by_identity_id( } #[no_mangle] -pub unsafe extern "C" fn verify_identity_id_by_public_key_hash( +pub unsafe extern "C" fn verify_identity_id_by_unique_public_key_hash( proof_array: *const u8, proof_len: usize, is_proof_subset: bool, @@ -109,8 +109,11 @@ pub unsafe extern "C" fn verify_identity_id_by_public_key_hash( let proof = unsafe { slice::from_raw_parts(proof_array, proof_len) }; let public_key_hash = unsafe { std::ptr::read(public_key_hash) }; - let verification_result = - Drive::verify_identity_id_by_public_key_hash(proof, is_proof_subset, public_key_hash); + let verification_result = Drive::verify_identity_id_by_unique_public_key_hash( + proof, + is_proof_subset, + public_key_hash, + ); match verification_result { Ok((root_hash, maybe_identity_id)) => { @@ -680,13 +683,13 @@ mod tests { } #[test] - fn verify_identity_id_by_public_key_hash() { + fn verify_identity_id_by_unique_public_key_hash() { let proof = multiple_identity_proof(); let public_key_hash: PublicKeyHash = [ 31, 8, 21, 38, 154, 252, 1, 45, 228, 66, 96, 206, 178, 138, 68, 150, 211, 24, 65, 132, ]; let (_root_hash, maybe_identity_id) = - Drive::verify_identity_id_by_public_key_hash(proof, true, public_key_hash) + Drive::verify_identity_id_by_unique_public_key_hash(proof, true, public_key_hash) .expect("should verify"); let expected_identity_id: [u8; 32] = [ 15, 126, 159, 152, 150, 254, 206, 186, 180, 193, 157, 65, 233, 215, 241, 108, 23, 39, diff --git a/packages/rs-drive/Cargo.toml b/packages/rs-drive/Cargo.toml index ff2d86357d4..48ae1471b90 100644 --- a/packages/rs-drive/Cargo.toml +++ b/packages/rs-drive/Cargo.toml @@ -52,12 +52,12 @@ enum-map = { version = "2.0.3", optional = true } intmap = { version = "3.0.1", features = ["serde"], optional = true } chrono = { version = "0.4.35", optional = true } itertools = { version = "0.13", optional = true } -grovedb = { git = "https://github.com/dashpay/grovedb", rev= "f89e03e4e0ac12aa2feea5c94b38c09f4909facc", optional = true, default-features = false } -grovedb-costs = { git = "https://github.com/dashpay/grovedb", rev= "f89e03e4e0ac12aa2feea5c94b38c09f4909facc", optional = true } -grovedb-path = { git = "https://github.com/dashpay/grovedb", rev= "f89e03e4e0ac12aa2feea5c94b38c09f4909facc" } -grovedb-storage = { git = "https://github.com/dashpay/grovedb", rev= "f89e03e4e0ac12aa2feea5c94b38c09f4909facc", optional = true } -grovedb-version = { git = "https://github.com/dashpay/grovedb", rev= "f89e03e4e0ac12aa2feea5c94b38c09f4909facc" } -grovedb-epoch-based-storage-flags = { git = "https://github.com/dashpay/grovedb", rev= "f89e03e4e0ac12aa2feea5c94b38c09f4909facc" } +grovedb = { git = "https://github.com/dashpay/grovedb", rev = "f89e03e4e0ac12aa2feea5c94b38c09f4909facc", optional = true, default-features = false } +grovedb-costs = { git = "https://github.com/dashpay/grovedb", rev = "f89e03e4e0ac12aa2feea5c94b38c09f4909facc", optional = true } +grovedb-path = { git = "https://github.com/dashpay/grovedb", rev = "f89e03e4e0ac12aa2feea5c94b38c09f4909facc" } +grovedb-storage = { git = "https://github.com/dashpay/grovedb", rev = "f89e03e4e0ac12aa2feea5c94b38c09f4909facc", optional = true } +grovedb-version = { git = "https://github.com/dashpay/grovedb", rev = "f89e03e4e0ac12aa2feea5c94b38c09f4909facc" } +grovedb-epoch-based-storage-flags = { git = "https://github.com/dashpay/grovedb", rev = "f89e03e4e0ac12aa2feea5c94b38c09f4909facc" } [dev-dependencies] criterion = "0.5" @@ -85,6 +85,7 @@ name = "benchmarks" harness = false [features] + default = ["full", "verify", "fixtures-and-mocks", "cbor_query"] grovedbg = ["grovedb/grovedbg"] fee-distribution = ["dpp/fee-distribution"] @@ -117,4 +118,9 @@ server = [ full = ["server", "ciborium", "serde", "bs58", "tempfile", "base64", "chrono"] cbor_query = ["ciborium", "dpp/platform-value-cbor", "dpp/cbor"] grovedb_operations_logging = [] -verify = ["grovedb/verify", "grovedb-costs", "dpp/state-transitions", "dpp/system_contracts"] +verify = [ + "grovedb/verify", + "grovedb-costs", + "dpp/state-transitions", + "dpp/system_contracts", +] diff --git a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs index 6e1b1e4b65d..846a260eee1 100644 --- a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs @@ -1,9 +1,7 @@ use crate::drive::balances::balance_path_vec; use crate::drive::identity::key::fetch::IdentityKeysRequest; -use crate::drive::{ - identity_tree_path_vec, non_unique_key_hashes_tree_path, non_unique_key_hashes_tree_path_vec, - unique_key_hashes_tree_path_vec, Drive, -}; +use crate::drive::non_unique_key_hashes_tree_path_vec; +use crate::drive::{identity_tree_path_vec, unique_key_hashes_tree_path_vec, Drive}; use std::ops::RangeFull; use crate::error::Error; @@ -102,8 +100,9 @@ impl Drive { let non_unique_key_hashes = non_unique_key_hashes_tree_path_vec(); let mut query = Query::new_single_key(public_key_hash.to_vec()); let sub_query = if let Some(after) = after { - Query::new_single_query_item(QueryItem::RangeFrom(after.to_vec()..)) + Query::new_single_query_item(QueryItem::RangeAfter(after.to_vec()..)) } else { + // TODO: why not limit 1? Query::new_range_full() }; query.set_subquery(sub_query); diff --git a/packages/rs-drive/src/drive/mod.rs b/packages/rs-drive/src/drive/mod.rs index ce1fb9a74e5..9e11bddcd25 100644 --- a/packages/rs-drive/src/drive/mod.rs +++ b/packages/rs-drive/src/drive/mod.rs @@ -261,7 +261,7 @@ pub(crate) fn non_unique_key_hashes_tree_path() -> [&'static [u8]; 1] { } /// Returns the path to the masternode key hashes. -#[cfg(feature = "server")] +#[cfg(any(feature = "server", feature = "verify"))] pub(crate) fn non_unique_key_hashes_tree_path_vec() -> Vec> { vec![vec![ RootTree::NonUniquePublicKeyKeyHashesToIdentities as u8, @@ -278,7 +278,7 @@ pub(crate) fn non_unique_key_hashes_sub_tree_path(public_key_hash: &[u8]) -> [&[ } /// Returns the path to the masternode key hashes sub tree. -#[cfg(feature = "server")] +#[cfg(any(feature = "server", feature = "verify"))] pub(crate) fn non_unique_key_hashes_sub_tree_path_vec(public_key_hash: [u8; 20]) -> Vec> { vec![ vec![RootTree::NonUniquePublicKeyKeyHashesToIdentities as u8], diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs index ccb81c1d6fb..7f500fc7c8d 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -66,7 +66,7 @@ impl Drive { let Some(identity_proof) = &proof.identity_proof else { return Err(Error::Proof(ProofError::IncompleteProof("identity is not in proof even though identity id is set from non unique public key hash"))); }; - println!("hex {}", hex::encode(&identity_proof)); + Self::verify_full_identity_by_identity_id( identity_proof.as_slice(), false, diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs index 88fe2894242..342df383e39 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs @@ -18,7 +18,7 @@ impl Drive { /// The `public_key_hash` should contain the hash of the public key of the user. /// /// The function first verifies the identity ID associated with the given public key hash - /// by calling `verify_identity_id_by_public_key_hash()`. It then uses this identity ID to verify + /// by calling `verify_identity_id_by_unique_public_key_hash()`. It then uses this identity ID to verify /// the full identity by calling `verify_full_identity_by_identity_id()`. /// /// # Returns diff --git a/packages/rs-platform-version/Cargo.toml b/packages/rs-platform-version/Cargo.toml index a00c9f92057..70e92452683 100644 --- a/packages/rs-platform-version/Cargo.toml +++ b/packages/rs-platform-version/Cargo.toml @@ -11,7 +11,7 @@ license = "MIT" thiserror = { version = "1.0.63" } bincode = { version = "2.0.0-rc.3" } versioned-feature-core = { git = "https://github.com/dashpay/versioned-feature-core", version = "1.0.0" } -grovedb-version = { git = "https://github.com/dashpay/grovedb", rev= "f89e03e4e0ac12aa2feea5c94b38c09f4909facc" } +grovedb-version = { git = "https://github.com/dashpay/grovedb", rev = "f89e03e4e0ac12aa2feea5c94b38c09f4909facc" } once_cell = "1.19.0" [features] diff --git a/packages/rs-sdk/src/platform/types/identity.rs b/packages/rs-sdk/src/platform/types/identity.rs index 4b7b7754d0f..963dcaa45f9 100644 --- a/packages/rs-sdk/src/platform/types/identity.rs +++ b/packages/rs-sdk/src/platform/types/identity.rs @@ -9,6 +9,7 @@ use crate::{ use dapi_grpc::platform::v0::get_identities_balances_request::GetIdentitiesBalancesRequestV0; use dapi_grpc::platform::v0::get_identity_balance_and_revision_request::GetIdentityBalanceAndRevisionRequestV0; use dapi_grpc::platform::v0::get_identity_balance_request::GetIdentityBalanceRequestV0; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_request::GetIdentityByNonUniquePublicKeyHashRequestV0; use dapi_grpc::platform::v0::get_identity_by_public_key_hash_request::GetIdentityByPublicKeyHashRequestV0; use dapi_grpc::platform::v0::get_identity_contract_nonce_request::GetIdentityContractNonceRequestV0; use dapi_grpc::platform::v0::get_identity_nonce_request::GetIdentityNonceRequestV0; @@ -31,7 +32,8 @@ delegate_enum! { IdentityResponse, Identity, (GetIdentity,proto::GetIdentityRequest,proto::GetIdentityResponse), - (GetIdentityByPublicKeyHash, proto::GetIdentityByPublicKeyHashRequest, proto::GetIdentityByPublicKeyHashResponse) + (GetIdentityByPublicKeyHash, proto::GetIdentityByPublicKeyHashRequest, proto::GetIdentityByPublicKeyHashResponse), + (GetIdentityByNonUniquePublicKeyHash, proto::GetIdentityByNonUniquePublicKeyHashRequest, proto::GetIdentityByNonUniquePublicKeyHashResponse) } impl Query for dpp::prelude::Identifier { @@ -74,6 +76,36 @@ impl Query for PublicKeyHash { } } +/// Non-unique public key hash that can be used as a [Query] to find an identity. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct NonUniquePublicKeyHashQuery { + pub key_hash: [u8; 20], + pub after: Option<[u8; 32]>, +} + +impl Query for NonUniquePublicKeyHashQuery { + fn query(self, prove: bool) -> Result { + if !prove { + unimplemented!("queries without proofs are not supported yet"); + } + + let request = proto::GetIdentityByNonUniquePublicKeyHashRequest { + version: Some( + proto::get_identity_by_non_unique_public_key_hash_request::Version::V0( + GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: self.key_hash.to_vec(), + start_after: self.after.map(|a| a.to_vec()), + prove, + }, + ), + ), + } + .into(); + + Ok(request) + } +} + impl Query for dpp::prelude::Identifier { fn query(self, prove: bool) -> Result { if !prove { diff --git a/packages/rs-sdk/tests/fetch/identity.rs b/packages/rs-sdk/tests/fetch/identity.rs index ad5a458960f..dd564623f4a 100644 --- a/packages/rs-sdk/tests/fetch/identity.rs +++ b/packages/rs-sdk/tests/fetch/identity.rs @@ -1,4 +1,4 @@ -use dash_sdk::platform::types::identity::PublicKeyHash; +use dash_sdk::platform::types::identity::{NonUniquePublicKeyHashQuery, PublicKeyHash}; use dash_sdk::platform::{Fetch, FetchMany}; use dpp::identity::accessors::IdentityGettersV0; use dpp::identity::identity_public_key::accessors::v0::IdentityPublicKeyGettersV0; @@ -116,3 +116,66 @@ async fn test_identity_public_keys_all_read() { assert_eq!(id, pubkey.id()); } } + +/// Given some non-unique public key, when I fetch identity that uses this key, I get associated identities containing this key. +#[tokio::test(flavor = "multi_thread", worker_threads = 1)] +async fn test_fetch_identity_by_non_unique_public_keys() { + setup_logs(); + + let cfg = Config::new(); + let id: dpp::prelude::Identifier = cfg.existing_identity_id; + + let sdk = cfg + .setup_api("test_fetch_identity_by_non_unique_public_keys") + .await; + + // First, fetch an identity to get a non-unique public key + let identity = Identity::fetch(&sdk, id) + .await + .expect("fetch identity") + .expect("found identity"); + + let pubkeys: Vec<_> = identity + .public_keys() + .iter() + .filter(|public_key| !public_key.1.key_type().is_unique_key_type()) + .collect(); + + assert_ne!( + pubkeys.len(), + 0, + "identity must have at least one non-unique public key" + ); + + for non_unique_key in pubkeys.iter() { + let key_hash = non_unique_key.1.public_key_hash().expect("public key hash"); + let mut query = NonUniquePublicKeyHashQuery { + key_hash, + after: None, + }; + + // Now fetch identities by this non-unique public key hash + let mut count = 0; + while let Some(found) = Identity::fetch(&sdk, query) + .await + .expect("fetch identities by non-unique key hash") + { + count += 1; + tracing::debug!( + ?found, + ?key_hash, + ?count, + "fetched identities by non-unique public key hash" + ); + + query = NonUniquePublicKeyHashQuery { + key_hash, + after: Some(*found.id().as_bytes()), + }; + } + assert_eq!( + count, 3, + "expected exactly 3 identities with this non-unique public key" + ); + } +} diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/.gitkeep b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_28e9fda4d74589e1756f49026696e167d0327209295b4a98e14c6eb0d2b7e631.json b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_28e9fda4d74589e1756f49026696e167d0327209295b4a98e14c6eb0d2b7e631.json new file mode 100644 index 0000000000000000000000000000000000000000..fee35a717a9328a88eca8b555e2eeaf385b6de42 GIT binary patch literal 72399 zcmeI4(Qcd95rli~r!aiY3sMp-GcQo&st*w4F1j!ZB!%k)C=9=QzB%VerfRF^d}@fm zAwb8L?B(v6nVsG9Q|*TrFJHd;``!ERf4ckm{oD8ddG+1P4}6Pn{&x57-*12U`QaP+ z*7oQtzJB`?U;i-vc|u;k+-)vj{Mj&gVC2V7^7ZC$%OAJnH|plAli%ml zyHR+cNa5RHk%PSc^hSBd?PkB<>4bm|NKqncTx|n^g!0~h$wpzbxh@dB|7BmClOf|c9i*4{U3#-p|7L9laz22C_)gs zs8A##1Z|%x@5hIlT5QpXXN=~q1L^*q0YAi@XjDfBTy^#t6NtN>Xs|~a1IvlC;&BwY zu*Me8qyw2ga3OP|C4OG&SKa9-WOi=qWM;)7iXKl?pKHO=6LIErsdJ1(0}Eg(AIlj9 zGE8qrpVepT6fNJYB@?0V^wc)!bKHreC7trmOb2BVc(t%=H;fkDf{0$0qHzL6uSTOQ z5{dQ{we{}dK!|Aju3T#Gb(_Fvc#fp3pQI_0dtASq(Iz%<}9j6#r@bo0?o z$^IZ-tyN*cSp;?VjgWLa9PA}6z|6%{xHM5}my4uJ7mApN5w(F6Mj_?|_N+O|R<0hF z1ft2l3moGUOalu=3Xdg9wQ6n|J$E@BlJ3DIu$AT1v%n-psyo!wN%FR>r+Qq@M^4x4 zMBpD7ao1iX0bi}bSdfrba`l5ar$fnt>oLK_ESPu-B6K5Aq#z=s_X0yFCTh+sVGfWk zqO@Diz`o4`i-;nfT5O^*#)0a-s6{lp+HjJn@7egXXjmnc*{u>u3{WhkoQ@4sz{0go zG%QhSj)#Rl-BPB21z8HxpVL9AVcVS{fT-t1vkM#rnXo;Wt5gRIdUi$)?@nfPUw>1} z8Py^YhwuHC5Yc@ZCA~ic|2`@*!ygoPiRuFhiEX+-Sa8B-ehE_o9OkU*2v=u1x0hKU z1s2D?yH|pY%9v8YvuB@lqTgqcA~1D=Og}mk*Et%pbeOp9QHdmVs#|aZ8PNTlja0-fLt!#WH?2A>MbsHs6cJ%TjG&|^!4dtDZQivNIxHmik^E(CmpAA z-p-oZduB2!ak^afc;!%{ReR)}JkA6HbFCL4DiW87=z&0|Kh6r~+ys^LOBxJtY&!+}=@_>$aoLsa6U^@TTzN+X;1L_gb`~PY^C78tafkfri zJIo5~Nlc*IG)AfI%nF0Fe9uL|WW(&QFv-|Q1(SY{N-nj4C6LL|J7Gec%-rW(y~CTK z;6@1m_PRp<(Uo|jsG2ln2@I$B`_s8x0>%iKz807;2>Np^*Xr$VHJUJlK4j(U$8iFO zEgN^me-tc9+B;tAOC#I_kxM4)$*2feX}pYI3jwrB}+)>}4q$2S5xacI60|jsmz7 zdM2G)1E3rG5*by)rpR7ki4uO^hon_OvafIk)>i2#qu2Vuk#$IHHK2hY`@r}lV7j?a zy543dae})9D#OeH`yO{u;#>wpaOlOK5TFp66!K6N3Meiit(#pCyC7EmKMP$1a}mrp zA{g;Ntlv(>Xg*tuQJc6#i1NgmAs%*}CnPstvFNIsEQ#_2Ph9lWC1s`Scftg6RndEO zk7qLSd`3KlK=XhjmRJ^M;TR*G354_>mUJ{PVN+nz(}yyJIKv%9I+>ACoNmI>XIyyp z4VW|P$7{4wFKnZ+c?T4{<7R?|XFMWkm#*}H72;`WL5*NYso=AkymbOdw#YRhD$7 zRWWB8f^rEjvCV@ceByIQ1BrRJnk{+dsK(90>>`|(KG!lh<#|4i}T_BF#kH1NW z48&*pQiF`LwTWsgo7EIbsLAL`#WZ>@QYwLBSR?P=5@#E_KzKhJ^`Ss1EqFRuT2I42 zg}_w9dXcD&PPafP>%rF-XVGrSB`TalpXfP5vfR5M=#RU(>1q=Ri@SteK>kL4WVD~}7x7=j|5p}2Q|UYtFpW0W{#I`po&8Fd zbFsAahLAx0Se0G6-+2Oyy|wvLfz*Mc9_HkXd<53-TDgCTE5jgBq+47#65S?0kZGGD zb#j9ictT%)nFA25dI(haX@PZuJ%j=4*mh|(58 zph!1Jm&Qv*(L|!tm8BS>L8C8EJQ;9Ii4&b6M6|F#EN#d*6ZQ+I{SZt#RM-2AdsvaE zUU;%X08!&FnDqE3%J~vIk7|7RU{Dl^AjSLwe{%KG3X;T%jA}F_o$ffe6L`8lWd^Bd zvo0zW^s)?VY&#;5hq_|O(x<(^NF>x1owx;QVj}LR$Wo|F?~SNS2~5yEo9;YQ+qqEO zwHM?RiO^&Mm3^&_a|6ptC5(%{n~fkA0K5ordfSYNsTq`s&^K%_!TsC;*;RTH!#>LC zg&JfOq)FGCQld!bkfBtL+5Tt>qggQ+WcoTo{d|>mE@Z?3Cxb#W5YZzOSoEgqerSq} zLf8~3Fd1OB8Vh%{t>BK~B6cK-B%KKC0g0G}k|DZB!)+a=!hXFmR?ufL2wbwX`V@%K z;JsDG7JB0{dUj2Uji>>a1+2$VxUqqvFU*_;`xVPiUz-~%;zXB_cFbju?dPcpp-;!e zuPdB^eUULPx25+ciLVNi3?Nk4_n0i9DW~&%J|NPm)1^r9 z#Ed9sLA~7>4GZ){VZAE~BNfMYZ736onbNa!9^B59MI65m$tKfF+E_6H=o=vh-YX|j z;<=XI4H((|6*%V-fQ)5HU_!3!6#`5Y;ym1qi~)twOkn^*#$)8;Ot`v3@Y=({xs zq+xr3i8aZDP@XV7rI5O#`~DILNcR?-5;{N6iN2vMf>05X8P`i%gv3Bx$1ynY5bZzW zbYoO8RqM7@d(=e6a7UdgW1{-;F_#nJ=jnYOuc=Ss;!!SeByAatMB%*`+d5;vE+!)f z>@&_%9gmuhBz0#L&OCCNHl#0vyfF$f+JwEZgNgEH6ZP9vn!WF!^j&|n$bxI zK3yD%)Ag97PIse+IMJ%Tu7nilp<2fiup0yb?JT%O554>Z+R>hHLZJi&ow3cn0Ud6w zL}iv9>ToKqgzk1TtOsC@8rTsl<_LI4)vfZlI~Bp1fx!ubUp893K@DngU1@oS$+FP3sdU; zC^kd6KlJhw=v_i;RbP1N2loZ!zw-ab)P?;&Bm4cbk=3;9eGzh=l_qmsATVzPV&70t zfSJy@xu|_Ifuhd27xhG^RLhbdvU^@BE4;dJR9#5Ma;^P639z{Nu5H>>SL@_*kiIRMVoDZx?~)9 z)}+rd)Hf{Y>Ri}tPj{Sw)AfM?L?Nlu;b5Z9q{EH`1--zMsE=9{^o@{o?wP0i{EMi5 zQCax+Qb`M_CqK!^L){+F1iDd9FEE#TF4h@)RMUMN&~0%J8H#l2RQF9T_-}24r0dNy z=@@)H5eZZuJBlLH@4Zn^Bwb(_?@7S00L5zT<|MKITl;`$0z!xI`d+hyc71^gtAfrl&WOCPb&1AvRCpQfasB9tN^rp8U z?MaNx`Z_aDMXu>xy`|)IZlP5&OF9M1tka0I^ar63LNsOKM2N~6tg!`-)|)wGtyc+t zSVsT7?*)Gzf%d+ajA~~N=?a<9Cv1TRcWVaS-NOQ&c?lx)MX3iH>H;>4d8J5+*axa_X^ zb<3|Ezw$MI-X1oGJ-^*-4*8q;$7|Ssv$?+79Qc>p{Ncn`CQE<03;XJGt^|%#VQ1x%@R}2WZEZOJ7fu$FDq7)00cPSW$XK%d5JV67?73zYx-gR?_?O6-#w2w$P zTA=PYbu?Guj7qDP8+F#{ULirU=Q>5YyCmvT&ty_3Nvo+boIiD1feB<{CKT!B2!`2j zI};X?o;d9K$(y;FUzxz$Ici_e0h~d+-EH*9P3>s5M=LPvsf+uHYZ*EcO=;z@4k+|a zMs;CLMgsd@2`um^76cd+MBpE;OGwunWOkpPg#a?CZ(Uri21%#PBd1efW|0)_z_jm& zc24J}4VZ``!9;WcvJ_F2$N23haER;E1xfWZH&K17t}f^qPoD}*P_h^$An9zuHjXIL zBMJCDCNo{TfhLM2RKel{ds)UzgeK>F>}NsJ^_UQ1igY8Dqm9!U#>LTC6-0NBB88YU z0z^9eyytqgfbbJe*Kbfs_l&t?WZ4%PX$g_Q8phRoSLhxKLn~%CJ2k^64>t>DEyC)F>YF{hyW6WXC3h=?ZUG1t64(%4WC@nOyh&hj!ZrbO_}`S{SF8IcsoBO z%_CeM-_B1-|1Teya&zv@I#BX*I%6a6H^M2G4#n}=K>~Ts<*osvo$9>@mlu$t6fufa z@4;Pq&$&ED>*bN05rw<V}1ZcbIDhLd>Wf3ERoyc+k!|;FaIq$rZcuP^C zpA)zRLV%9UkKxRmIdf*-OUC;6;_B-9kNdkn|7-u|?)BY=uRr|d?VG>9dHwVI{omgI zb>^82#=w%OYL z>T0nW4(_cMhxewN)$HziJ->T%cz15O+#WtRt!9>sX*YXzJBwjjj)%{#XE7|snbT=G z9lmy2&0aihW>IWruf3VwTTFAyX?#HCyGIY#@9tjR-o1SF@4Nl2Kt9BA`S_}ecen5N{RN-ePyhGRi%&29@Z#h3Uw^p%>f33t9HwztF4}E1Y$x5- zpJ_8}{mj_!>Zcx>#&!K%Ov6TZ^<#UZM{F<-H-6Fv{wd>W+l+5)yB&6Vp@{8gce`P$ z*VvEWVKMCV%E^zchUIEl-#D0gWcTq+li$MrR^7zT%Kx*bhi|ZwgXi4TYacAeU3a-@ zfeMorZ@YK)`#ipz0(SyNX*9%6Wb+u1-*FryLu;9=26g*meGUUV{QJs~77X?7y9Tn~ z+o86CC~AAGa{1+BmB%%1&-_YUtDPg#P3WB&R2534RDqhugYqiJbKm8jc)boSSf{(5 zf#&!cX#UIQ{r; z)n8i49$RJ-FrAS`d|u{O{}%0fX<{|=`+h&#!iH6^y3g~xH>tOEJQf_krQ_-vVP;pMTC<(jvQ|r1`@dzXVcGjcQq}HOPj$I^ zu)KF2WN~IL&w;kH`;*n}Dt}apb36LT&_cCzSDnT1BXewcZciO~w0xN?pAQkuQNjZ~ zo+-1(IjSf$&2v@x$SE(S=0K9^nG^CTV9&BMN6?Y2lJz{HAlqD8tj3{Kv^nKfl=b9U z%EK1sG;4<~+|C@PPnJwu0A-6^PXSR?FUl4te#6V%Bd79Z^V<3_SKI!@IOBTJ$lV~= zw#Y_HX1}^oS;K~+D3=9w2ky;47Sz$$uR!iAKO$%EELvrm`<6(G%&Ck!cPSEOxXpz; z6>~F<+IH6zcAHC5Fjq@XlR1)zlKf&>sFG2_V5XVm)TxUlbBeZY#k8cwL>212GfiE_ zq(-erR4fA%G!vjSc$oE$2<|o64%L|7B+P-1v=&d&&?T%o6DU@3igXhB9Co?Foqi_ znPUYOPm<9S*us1%ucA3sAY*RGkRF<>y$PR#E0*GwsTGtM4`_EWoj5(_WSLY`P|O7v z&sxO_vUcLsT!uL10z5LOVcWGp3mVh221`n?*z{siS+IH7BVu^qy~mHXvNUpW6%!^% z**u}D;{Kv6wNsTFk9hF}pH^f`yCaG@z{lZ@Vu zJlmh$BUp=)4JQy`15RDbHd1e0aiV}1!yNzvyoiczuoA}%kNirDXvz*-lZ$GR&6DGUs6r$6+(}j=6*@7eG8jQEe$v+PIu|sx zu)-8gE2ZlRbh)x&JUDR4sDh-{PX zNRd<5o#7ZX(Hm1G?@intG-ku4Vo{1#XC=k*%ngH2KvQ=lLqu#zh^p?U#~&9?Rm0t# zT#ZrKw7aeUG-g+d+M1|}IjFU5sksa-`fc=VH z5y3Qrk}6YMqn(V-WFZ$h@yV;HfoBjq1gWQ}m>97{$J83MST<~ez_NCC=!VCKBFFVg z5DWHs!zHRDT9nM4q&ot?RcL-`0#7<=AZ|(JD|!dy zS~C09jj{zB`V>qVp2z~6Q{lk`jdGmk#qiWWucXH>;9S6gB0zVr3*|19yHM^F2k;7^ zy~W2sx@Oy!ZcXZFH#OawY_5$WI2PGFIG&`z2WxiEo$h`vdUir_(P`oAJv3M81J-dF z36^}9EMt4MDhytSGTh^JI0_m~RB`Tff#*%C7Nc1U?M_r2Hbtzv4z8W5J9vXg(gW89 zjp|aDwE(wp9AHhuh?OQ_hMOvspFCMf87^^fcr>-bFMwSx!yf%pqNi5MdffaJ?1742 zF2k~apE=R+5@CGl{&#vARy@~J<%#l_Y*G|;n71M zhtY$Nz2id5HXL3Fw`s~2qT~ZaWlgDKw1pTwcetJFhW0BbuE_CvrebB0C+3l@+LMY> zfrbeqz_DvDmK1OqnrWFR;knbj)3KFo)VNYEv`%@rI|MjJTf>VUFfl`%Z*m1Dyviza zQhGS@VC3060KA*y#-v%hSeRfUXSid?9Jhw3Lg6a3v1^-G?#aZYn?^Eid*pOcG2qD+ zPVEUaYZptdY2;2k(4{g@g7@@E)R?x3BbQ$ayThhSj(a5OP+DvI4F-XELJu!STMKPs z9>|)}Zf(M@ly;IMP2(g9_J2OP@W{Iup1!PlDaSKcm@nX5z+tj{Va|m)j5!|Y*~+bb zJK9>=F46WkzOvRK1z%0YhQZ$G^@(oyJ`+(}GbAW+nd3^?Jh+_LgU9s}=JZ%3*Q_Od zjVL)C1RrhKb#j!GYkVRLt}?mG+PX2|?GVamsE%WJ=a(%$@~bZro~Ws_qc^U#>lG-W z3mtT&^#orFEviu5oHeyIXtcQ!xIXI(8#|yLS^y^hf_MMYd;fxW-}kR&*Dt=;@1P%N zzmomoH^{SJhTgA!pq0b;OCIScvAM{Dg`#XJ?VWh=zmWS9A5B~-W6m4!uvKZxxL85G ziY@vt+W~BzU{2>kgm&;jj|+`kLxYS~*##2R*mY|wR^lY(B<8Z^Evb!#vdd^w1#{TJ z&fs3o&Jhj(Ot*F|)RAWt#kf`CMjlVS;6PEJ3AZn@MR;BCA8!-mu=7H(sb#`T#W)S& z`cNFiSh^J929bP180?-QiF)F=d)gevs$=iT!BiTr*2n9QDxRG1==$TB<(f*hw+~t` zyKZvJBqv#X>^hJuyAA;NicODqnh$slan{LfOg3~OMkkvGGPbSFi*6GxG<9vXRV>OA z+fqU3orTv`sR2ez`O+oAXeUBMp?1K^mxi69tuke zbu`d4OEFes)hlrwG`NYXS|>cuu7jrD75x7QIxmM*oz&nFVYH;-VI*5BEG5XvEaQ@x zoXtfN43~OS=Csg8OrxJFgUv;6YVJ9ry_$|8dk+ryHV*_@=6(l?ymJo=&oRMX(>%y z=KY4gT40(ffMf!;v+EbxR8D_E(O)rGc!^nP$vF6mfuU(%xjQqyY4KS|81hM1kh`jDNsP=_QzeIn=g|{GYNiAd>_?h+N-c|49?_c&+N;}!<|DR7!IqqjtJo)q! OPs|?bcl6`CPyYc(FHNuj literal 0 HcmV?d00001 diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_3796e6d3ed7346055d82e39618ad441005468aa9ae346220fcc77830f30f06c7.json b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_3796e6d3ed7346055d82e39618ad441005468aa9ae346220fcc77830f30f06c7.json new file mode 100644 index 0000000000000000000000000000000000000000..ac0b4a4182283cad0f1a0d444703a2dde6f1369c GIT binary patch literal 106415 zcmeI5O>dl6a)q<@uMoWEg3S+6oMkqXAV88$R)c}zBnrlufjzP10Sv?cJ?DAvl|@F9 z&GS@-HYN?UB{j+FsycP*RNdQc>HAM!y!iCDuiyORf4=_m%@=RJ`}^5KpPr`O;8!{^`p;nUx|_&z_HAO6ehufO@?t1o~20$;j(@GQ^&d{}<= z$JgI|_J?KryTcay7cZ_}UOaeqbM^4q>DA4nr?)r|Zjy-@Sbl!`0;@r>E=F!=F9fJo@3&%STbXeDt%g9zDA{J+?eu{)o!&-+B1- zn>U|-{pPdJ|Le``uLJT!93Q>f`N@kHzXA+CSi(Pl{ncMyr+9C^{`U3$fgg_F`SpiS ze)#0?KKcIBfBEC5zxmzi>iXhzd2xMpJZ>)TPI;RDPA@O+_|D~in&0!K)8*~@{pxh_ zGEei{-K%^BgUgFod=mqHkMZg5Fn)#Y-Nk)=FvRA&_xBff`5FB13Rf5R`NkC9%_7i=V#)&xhJALg5DE@T7^m}Q=r4+gYqWFr@qQt;_W&d!IJLnBhZNN0F6Jq zeEYb&h_>?eeC6&U-+DTvrxTG=9q1Ld?<{y47zyJkWW?>$YxseRQMGjFTf*iKN6Fr` z%qhTZMuPa`G9UFnqP@L_Scm!Bet*cJ64%xUn7s;hG~36u?5HKx{@*fnV7dDwQgu9CJ+OGIh7i8%nvN=Y0q{k=9 z?E5*UC^pTfs`7hIeJL{sCYe5QLcSL;v+Ri@=snvc%RI3lV;(I|<5((UZh1Az`ot{t zVFz=YHDL$0Cl1pOmMmLw#qT*F_Dy*TNx+!D3WD3=0VXOsR9GQqx{AyWPC9{OlOoQasDaDGpMH|~NZPH?)iuJykmM(Mh zGKhR9u3fMBWvdqND|eZAs&?EpNb|w6MA(BthIw?!iYir`vE3oIi)F)JSu;%>&zGWF zvf(`yx6=f!W<8!r#PBE=^Fi=z*V;S^W2*-26JgBqU?(_|uE+Nv=G`Wi0^BC%s7vLjv8#b;3+MqF+HCj>zi_I>Ul?BX)JrafwyzltcRyK`1T-Agb zqzq52s?cASr8rHw^@tBo^l2rwwmU{K53u66q3N;oWMEF#8tt4HtZQ^^FzYd&@MK1B zBA@Ng^oZ7?$%Zo!VF0(TV>?ps+&Edl7sCm_0ltW;c|N;p(^;hROH2J76Q zse_fK=(JL~Pq51shU3AZOSU@8Pt5eSEm?}?{bs4N^xk4(^@M!2?OF&2ku$mUnI)iU z+eAJXTZnKXw^YGAOw+=W_SA{TsTJWc4YIk7Za_2OO0gzrM~d~-OV(MgG-w&tx-EEIQ6fRZM z-95RDN!Yf#(SI8=m9n-FH8Brrjjc70Au>KOPrW8>GI*m6=qB4v%m}C|f9kQVP zhFv3qWd=>EY;8e%GCC&yu4(H<3*l^Q4sKSsu+0&EA|TN&JuFgg%a+w3x~WM#k=s)+=EH-=U|LZx8##z+ z+eJ28GW_OY*`f_O1zUzsWI@iU;lV_Wa-HVc@MC|yQoeo$=M0V%0lR~pDR-vanQ|ZF zfLXD1YjZVQL^p?OL_ zV4atd!IDqOW^7-rDud6VO!xRY91R*RR3Ue}!22dmi`lG$_9m(h+ak7I5AIIW9lk*# z=>yk*W_4-H+5orVIMAA=5nGy|84gt}KYg;1GF{@q;Sp*LKLB>VhJEj^68+dpxxNlR z1N%Tl&)2Z-?`NK9_!41#b^rhL8g{mqY7OTYVz(o$|7XLW(7iJ_XK>tFb!N_)IcMg4 zj03+&cd)xouB!4W0(&6UV?Bnf6E4G310|OT5f= zr7+K#ba3B7@Vd|gj%mO-*I1GOSGHw+_Z!!f3;(#bpN2}xqbd(R(u0`fVL29djcjda z(H9e*q%Q`iD%#4{8kP=}kj}EokL%$(I|p0b4nx)!bI-BC@!3bK(>xn~{08z|j*o55 z`T7~0GdNO~&zf`A9Q7`t-b3^>*B*ZX!(+{JEy<4ODiWvJ(CkvZS5tck+U9mK4lDFZ z&-e>y7B^N?du$}@!B$Mnxs9=);eE2=<{)De2lGU3$+pcc*;d81l5Fqx*bD4gx~%z- zZ(hw-*!JMOF!$0lyxR#;?a4QhPlDfUnKWr~A9M;#)8aOm2xyBXIRQ^%9vo_D&q3r| z1M0}f7gMsoY-=Rb>LV!kLb0>VocJf}^N-6P|71PS`G@B3Kk$#tUuPD-;-4&c|GfL8 z&7n~@nl7u#Jp?~1a*q!Jsry~+QBLqVGrIJp_gaxmJ)!NZpZZz zky{}Gv;l4_ab7`{o2(>2NuOzkH=>=tnwH~hyoad~miLh*7f6DN{CT@2R; zvI+MsmXu<+L1ZTAgF-l{oBrsYY2vzjjybGV2iQYl+kb$Ty4$^vitCaM-j5~bxVBQw z_F?OV>qQtPIg`Z)*8{o2^+4*l@zO1pEyk`PWIdUUlMOwH>COthwQX?DDq*(?3T<73 z_T*+XwpI{!XIY-01{$&Sg&urCdm==$megTS(N38xV#~E(*#`e_J+6o5mfnb%=JuNM zc4pz)(u_H)!=9`MN%_dy^F*hdbx1485w>m(CaiXSoJBlbgR?J{vO6R#Ii8iC-ZpGf z)&x7Y?h#3jrO4Hq`L0Fm;HMS@QcmPnPlo1{EDxJIk;mL_R@-|3bO(pH9?slhmmRf} z!A}%*VjHUK-E2v*R^!xb;(E~F5H+=)@O&@}{tvt-_+JP*mP4yfX>ew@CQOzzJ&a;& zg@t-eZWShyI60e#BpR;79CKS}5X(x%57{HFLf7xx67|MIH$e01S%eTINH1@eXfzvXA} zw*L5|H~Ddb+-uko0?>I(IGVHW;C=yH+hEH!JafAa{61RLnKOYj?BSOloP(}TzAI~& zGPX(CmK(G)NOa-{SH1UWv7JLCKA7BOZKwW(qJ(X`V^}F%+f(v|pCp%b6b8_+U??5==$wA3aZ^Zl^Lbpn`7z~k)tESt*5KcL7*46aUDDXy|zJotzK z(B%EVCqr|(s*vDy*II+uV|^kLzP2M`-61GG`*3Vo`LWAbcdULW__arPpGVQQz! zCw84A7+(UEc?)U#uH0fxf>;&`Fkh!wq+-yGSspsR@Nu zZwNDwQ%}ok612*LVY`$D%>LXa>2e}?6wH_9- z>v~0ArXeYJKbsVuIi?+4U7CNL6&Ee_sgXt0!2*#8@4v z8BCIwLQ<;blG%Qg1B`6Wat;6%+- zN4PlY+@5AY8d!SHP0P{6`r`unCQ8k2qj^9nn9J` zYNaMrqBQ|Mt$|5RuSpIQw>>J7Py%%d#xMg~^j+}m@0}Qzy-Kg5i}Ec=5pe>GDg;&N z6LhH&WKU_rb8{s6+&2O+s`ZaSDY8qDYTH?0icmMl`l92})x!C5ZJzS=?vRweb^y}P z$5_GVp3=BWc0Hvr%$nM3CWeZf&R0ENlay%H9(hNPX96K}ttTOKtv^J-hZs5rIyH_} zvJ)k9<38>p1-*dmP7!ft{t6DK5H7)_)}M4RS&#B$y=o^DbahYpL^}jbr~g`4Rr#Qq z9s#}n@77aM$!c3|%TA;M6rXEKJa1+sYb?snriYU#sr66|Sd zreub)z{IW`vT#8owG(`fp<4q|?#P$$s2Y|adxBvl1hzNX9oI*=3~Q-$B}=dMgM)QQ zY&DCJyYYkZx)rvMPETdZ1$*r)1z<-5Uk=+sVIS1s8W`aE$&%N*c{Y` z(}rgjFmZ%=785{I2LYylsw{h&SkCRzoRRAbol4UhfJgD79B5Vi=p01$^{+$nz2O1(!HClLxt ztGi`N5L!Bt1WemOq6%WIH6_aoazdov2@UwpV+tn*U#jNjZ0=5bLlz0uap9zJhjv9A zdRQ>jOP#9VR=>W3cF*<_ZQ4O&k}C9Up$b}6oQOpLU7Txy6o(E~adEm~X7#>y2lbdR zM|w`KinrO(0(tcAXn}sVZdy8mDV2^#%BPf*X#4VE6}BjFh_-Bmln*Oq=$WQOX@x2v zg`%mxt3}ocXBfrq$KTG;8HT8Rs37Am3Jz3TzCrH<>G*c2jq#XS){N(y zM=;$6ax_j$W;{l_1fMgx+}f#yJsw2`(+G;v5T;L1N@XbRW~*8qA(*PTU>C~J^O-|> zKT_B1XoCcKwGR`dNG-!uh@J){4c^ljSx{Cr_+ftEo@bL9=QGIS=z2E!D}R#b+{ouf zuKpGF=OGN^FKg+e(^l;eNm8|m)}N5SpW}O~ia1j*;m-Ao>QH*KzlZ>kK1ka6 znP<0qTPlTiaRwHl26np9p&^(MNe7EMp)VORU2t$rWQWHdlWiq>L20sEN15F{!B%Qw z;>1KCxmVx51nd~SS|&R-ZQ*XM==~sOU-9wF^xGkx?on$VRAoGX#qP@VV7v1xKo!WG(zO7hjjBNZtZ#OgAXa^l zZjW++Y?_I!AV{jaN8*fO56h}ueytbUXW0{kGMHSRCH+)K2#8qWa3r`V3i>R#nSzR3 zp2Y<162nfIWbWAUfszX}B0;sk4x+c_a=Yg*kNtLp$CRVrA27`Q zs%YmJsw+MqRij+=DOGUX*269yT@~CLXNS8$YMR2mYoj{CwLG;GBOuI737o?8Y$3v< ziA)JXGbb;R2$x`J*0W{xk!V|xvG1pFNOaZDp{6&a5WSVDf-77<6om8%1kCK{5nyTv zU3HGcG!%~hhP*sa7qH><`fN<>RE>r>Jr96_-T=~H;5a~tP=K%3p$sXZ#>XHwnDpAh z_S;(}DWt6L{XK&FZGrW^FC1~I{Sd=Ud@-%mxnRX<^e61fL+Lhk@5xCa1T3jS-{Zhy zIo9s6Bqh*@RQBxy6n)7+L(L8hC#PePU<5Ob&^IjQjzssny}{F!K#;%3K~NEsp1=Y( zNZ5C!aTFHycPzq@gU&R*U_#H*XRD5YIG}IagZmI@P zrRqQ`S~6B@o5EdMMznys`P|c|FW-(+$Zrx%EgVHN)BEb{JtsNVJ5^YHB-k3_E04bJ z&t!7&xlT%?BQVdVEmoAZclC5KtO##=!ZSFsc=U!aP%RQzSwox(flclD1d&!ZK8~Wd zG-jl_5fGtM1mg(r5{zLl=J;|KS&O25j?m0<+c6C*y;JE?o}t_`M|1?)1yA8C5eo4P zAbqnFWXLHLe(91RwiDi@)=w@d-D9#E!MJm~Pzu+d$S@IA6T+?z+Q?V`fsM1tuki1t zJD2iI%QG#nuP&bWIQPpl^oyC!ZpfBXOavxOSVz@5#J(qX%Pdj-vVkvstrSBM=Q=d? zg(g(BVXhoo!ef=S%EcLE#MYfnIEG>wj+Kiu$6n#i9Yzvlf8b3OL=*XntIK?*N$vqu z#eJx$oho|U2+2!03MNpiS0oJc5Mb(e*jDU>q6t^?1y=%I9MRl|y*V0aQh z97)8nS6vz`0ENI}7$NEj2J-#NfxGzX4WX8%WfFAkQsO|42~}LJtVZY!@feAAIfidy zvq}-2l&Pslf|1<5n=n*F1<$?^uv9H_xD(sap||ErId_cWB6{_pf`vl#*K^Qh3MM>!*pLCDr^z3I7+LY-KmwwUXc{&WiIrife2GIF~U~2iplP(7I(cbvOD@t ztW~h9?*fEKFu9*TGQ49-3Z4i~ViNp&K@e;Nb4Px+zl9rO1fwN+Hdh}uf|9jm7fA@bJ9C$jOIAkcFkcBk3TQkz#b{v>O z=z~&EGl^sf*6Spy)z6Mi%U;iE^TgSoa52;Z8+ZLewR7ytN2snBVkIb@M(8U@kV&nF vMOV;}Gk`db^^M@5c;e?D?yoO?_9stQw?FkmAN}?RzVYZw`$>NL{SW^G1qGz% literal 0 HcmV?d00001 diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_5f9f951aa5d5af07c588813f31e9355939311eca86a05923cfb53d11b746428a.json b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_5f9f951aa5d5af07c588813f31e9355939311eca86a05923cfb53d11b746428a.json new file mode 100644 index 0000000000000000000000000000000000000000..03a98a10115aaa66a917d100a2cceab02420d83a GIT binary patch literal 114110 zcmeI5&u^XQafQ3~UtxI71uuVyVwT-BMS-@Pu7bdD8x=7U*r_cCFbx0iJPvdBub zd7c3nqq)GfWl|i@%$YN1X1+^HKYa4(r=R`Po45b`hc{on{qpVi|M>mCe*M*deD&r3 zeD~(xzx&TWfB9ek^5*;Be(~)eKl{z6KjcgE#eaSC&9`5E{nbyO;8T|mKFY^`IV?Z_ z)0^)<|Kqa#-C>LUr=MQEx_I!>&DFz?POonseRzBO_`}x^KYVO?efRM9PB)J%uTJ-m ze)sNC45#bMhrfIKD2A)cM@~=Ir-xsAx_R`)r&o`nc=hOOUqAZj>h#$1bomo1zkl!H zvv1#i@y*-MzxeOBZ@vl0k8phdRRQ(>kCyN+-+cYoH!0rRZ@zo8zu?E?SN{Fu zCqI7j51;(-*}whiv)}ygbaj1ky1cl)I$k#ycc*-qe@?G1?)aU{{b7F1r%so*>({H( z#jAXnU+!M#GZBDbu69+%%W_|5X7MJ(i<*OsmU~KyhHhhWbCGU;U!RA8J^b$*AYCx5?|y%P%=aeLqacYI zkE>jK`Q<7PH9mgl7vj3PC!)N9-ZO(*g-R(?pu^*X@+QaUewTN|+jTgCCEeRcpb_5z z8vpU?-Rtfm+REqinY)Yp*7G4fors+3K(DZUXTgVokuaV^M%+HXh99XIRZEAyBW(WT zDB1g#IR%)_NDzNq=A-^cw71s~>o9-U?+-b2{P*eV_Lrgs5JCVHAb>@X>H&p6*18|K zB9KQ218ctOU7pYTChOZ04}!zDB(ALyFnbm1Xtt-d?5HKx{=YJHV7dDwQgwW|dTPtn zPnPdo53+b>EI09+7i8%nvN=Y0 zq{nBw#qT*HIbBXOsR9GQqx{AyWPC9{OlOoQasDaDGpMH|~NZPH?)iuJyk zmM(MhGKhR9u3fMBWvdqND|eZAs&+hUkmiGBiLeKQ4D;xc6;-M>W4l9a7t4mdvSyk% zo-akUWW#$ZZl?)c&3ZhMh~X(0^Fi=z*V;S^W2*-26JgBqU?(_|uE+Nv=G`Wic7MooxD+`zpdn61Wc;E4>t!x^3 zxT*;=NEx13RiVEuOL3ZV>k%KG=xZgmwmU{K53u6g(DYb(GB77=jdtz@>lz&!%zDfx zJekp($Y=XAJ)*T}vf&Iw7{IOT*pAdYPn;~^i{S*|0AEDaJfA&k(^;$p*GJ_^nwzi->8J&}bJml1u*P#Z?Aa?|%r>vNO*hI(D8n##%9)jSqraSV| z<0FyldQA`;?DN1Cs!X(KGIu83G4Q(z?H^6(Nf!;&Etz~n?*TczZCu->2!e&5p|=tu z%%l9etrGdfwJjC|%N{!cZIxStHpp66${A|$VJ_=QVW-=u2M2w``ec*cO=MHXJgbNm zy=`ts%qZ(jxFdv*J)8Ged_RihzeiVInp-^;NIJ^(d zQ~Ckxyo?N%d`dQB`)XAge2y~RZoZl9rs=Y5%po@AA^`w0ui z@bt*rVfOIByDoHWhr=rsw?~ss86ldyftaiw<0)kF`L#3-FBHEL3}z+k#Nd{Utv`}J7uY9>8tt|}6GY7l%EpuHd%m>%O4qn%5z_GZNnw)`dNkYDZClx$3wdZGK z7T5N9MN-yR$;#RTp2#OR(cmpVj&9po^u>fHX=*FK03LH&Ys?2qNTIFr<2tzMdw)^I z;zriic7Y}(Mc@0A(|h3b<S7vV1mt3etT349*#xv*tX-fxn0D&^wFj z2xZ&Mkg|s}=980Z+1e$a#0UTXdoD7aczRQB_y9gUOzcHQu?F>8Y#EVlg1NndM`))~ znxB*PZ+3T6DSek6ZRIo>Zd<>Tn3t_@$vmI2?4lhkLkN5DgoIw6JvAi&G2M=zk0o*| zM1VHHZ6(fk&_8!Hczn7*3vSiVtlBN&Hg5QbZQ}Cco`vGoS|&~;f4Ug14`dVWTP!KX zaD&K9&<{rkPuhkg+KKD#Ip(ld9lTE-!yh}NUw=|@U9!P*S-2Y4R;t{7Bg7~CbMy}p$9QL*%-*$c5PmEo1oCvH5^N^EKjhtg0MTw z@&q-|h$UY=_=5ICh%D5rC3VmHFL zGN_9+n9+-$S`bM2SYmZkPs}M)4GvLL>j}?i&q1f&6a3!@I+jCyfD8$fB~1^b*jiy(iaMEPUJ@s_c}Sw+ zN^i#878=Ad`t6E{dFXA;83fs@?HI!Q;K0XxAjmQ2J6Pn&eNeasU66l~2ll+xf7(yz zFXQs^{S$gq{F{q=0?$ABuJ`=t!rl5A0^StJ8v^{2ui<@x^`FD{19*yLM+iXYHQ{J3 znZfe~Y;A)r+wjcoI`I2wQD@Es(y(U?=)pPY>g2n!b}3_^0n6Bm7jMw ze%OJ}?TA=+2#TM5IJT^O*=2rm^7h!0j*t`jVSM|3?`nUrQtS{2YdF^rGhqU~`YlH| ziV3~c>lcPu*!wAkNo;$;b&`RvwFi{lzT@mg3d7V+l~3$CNieUlqL227=6*a~E_3cK>fo#H2P3Tf)u!bIpho!E}@ zwPzK1w9b_ej~V)|0sA<23HrM4^7V%C$g59q8}>!SFfFgZiEYa?kWud@#gJC&TONS@dSx3JIM7osr9gsUC$Hg$T2zK_gf+nW{K*lg|GfG z2r!1pqSnXGASMVNlU-kNf>cFk`uBxEzIuX1OpMi$n!zM_DI}#@E}88|Ij{(;V_?Z$ zJ>#G+Is{WY7)>M+%;@QR7JU{CtEd7XX#|I41hBue$S$X@fF)<0XxLcwVhb#Zvv-GK zm?>ZhEk)5M9VGbNM+hM5c@g;A5-?$Ps16o&Uz1?!?r4_o>mM0Wjx@XIz9@XJ5-}0o zhtl>DCN*J22?py5PRlKt64+NY1DMH;G>x+&@R}1IMwRy_dyF*g?+5t#EA7cfddrIRj+4YphFl%bBnHVZ^ zI$!m8O;Vy&d*mHGUI>KDwVs5`wf=~J4>5EMbZQ){WG71I#(msH3VH$Aog(7Q{1qHd zAzXq+bwE)a)nEeTZ$KJ}zpjyBPGLxlOLQEXZ@|Z&Ce6R3kc!kEjBLM7q z75>Akei?90x@auAh9LXw1dLEoI9h!zge!xfcehomm%H6)LJ)eB?W-Th7#^}Lc*1;C zu;8>81mes~)LlRRN}G!~GvkE1#;R9_jigX8yPc+kT+v~Rs9h?a@zK+aGrX_DZUOxIOvcc)Ko*bQ-EJI5EqyE}!Jd|8N@gevOzg@b3l}s}JHh7|x-}r> zj(iD^s$mJTCm2>jV0)9@aeai#u$D?!vh-R%I9P|oRhH|Z2d|};S?ZP#iLSPG|h$T7T_aP(YfT9cIepxGQHZHqC3e=Mq==# zYCRMX%&x)M_Ps-DN~PUGMb(ZR>xK=x7BkCE$6eoduIYAmy*q*oVVsWjG-IZVj*C@4 zdUk3e$X@goL(JR}Xjs*PjeWV4SY##PnN0u0^zWvQ>a-XNQmGz;NNV*zxB`v) za?@*3*WMj4#$&Gun-o{z|n>(M?AVWMAU`@*<9&U2(@hE(HW}J z?+S{ie2Z+(FgdDC0ERnoV(99aPVn2074C4(NRbsr&yN+Q@Hdwn3a!(&_VU4p&^ zaF;45WZ+mYWq9nn05FvnnJ(y)N=AZ~NlK_z00em@T$8;8zx^>qx%;t&uk|WDLS|9E zOml%}e7V$=I4?j%?_j7r!p|PexN||`uGdW5^^?w736f~=SA>FK%;H{Pao5*6fbiAxSYDA-llu%>XFV)-{y(GpR9+hG0{$i!41d3@o zyOv|FuzW@VEkoGeUS;s zCKh*MCCKO$`*8Ad>)+bj-KL}ty|skM9D}C|HIRzkIN+oleG$nqZ%p(|aAf-|xLXyVO1I1hIo;Z-q(jzCJ;k-MLHM`=ZAe z2YXL5X{9A1h9bzL=u(v8vBy(926y4LYXgN28odySA>t|}GI{|M(ZU8X{bWxlw&%kVbE3U&nj9Px`#&DjdLqGi6em@#o4$5C$le`>eH9>BdZBN243}-GEp%TWC)tH& zLR(VqsB|yX@I*lhmO#3`%- zXc_Lg<>ex1!(sj@Uu=cmhk0$u4>yr&O~Pu5Th&GBIRj$$G)0%6*3# zW$w`yUd|>p*#u{VNi@L;Y_BAS&!PpE_zPCY(Cs$8@*5~baeEUkGBH`4qkg8GW#k29 zggv&Wv@yx7SA1h@Qtvv^cRg2Q#ire3g1gviL(w2{H-NNd7iUY4=Uyp$oxw2KDQ%|^ z{gD+h6y;m|r26G!ocI#%NKIh^t3?%ZAUk*N`G#sA7Il$Y2&PJ)d4fAZ%fxx~LQ62H z5=!^W8}5`x+&Q)cncgs|nYss%UdNWCn1&U;dJ_{^FIht61k}iW-_FvJ7`uISUIw=p z@*>jjO{6mS)nt5^gBvLjq8fb@I7Ell%`V8mY*L!wp8nxW021U~kSTQUZUrey0Izbk zh@qB>3#$y!p2^9}Fj>EyB)Z1(pT|38U&H^NA&jb?kSVdKR~)hP^z^_myP3cw_-7{< zuqZHzQyqN)lUIK}pj~1oPrLPCl3m}V8F<8jK%(%W-}C4ZWqc3?uTiRmwb-9z7g0HRFHIQuP2LIrUa^Ya2EyZn+`)IER-*s zUhi=k3P>?LdbZsnb}f3&#Bc%IdPD$4_Z^T5WC$|cPPFArS%y;N6>Us*uKNTt-8Anp zH+6f|>7}vn zcREo`A0}*{-I|i7CD$_0D<^oSWdzMqg}&}2sGiO4INhL3E&5RLXdlwXw$%q7Eefk+ zt#aAnuCMzGnH+p>F*LCW%yW!~!=#_c zNv&4eaJa8Bu`Ozqmy0Xe#2qrL5~NRs%!=8n4wj9HpjO=VsztfOe?hQVX(d{T2O`L*rQ7KshgS{d!{YU2&KhPb>FH*{$0yp-1XeX-Q>NM-lGT- zB%J`N%@*)8%GER%g`)&%Ti5TRgM_?i!iqxo?It?(Y*Guwxi0O>cSh`t*!fRNf8L)H zew~HzMf$}I=wEAKCYQ`{yP}xIJJ&(Kl(O6CcZgyr)|+)%^o1tWfJtkXH`ulE?L}Z8IfoEOxP&&4K!^U0fYX{lm$yMIFOX17|v9z0b zjFzO_z!GR!UWo`j75lzkJk0oLd6nfvlx)*pXo6!%ta=U;qH^4^UocK~r(6n6D30ER+lqhM;Wq;M|Iz|t>5xXS<%RX7q9g+SZ$>O;k<=QFAG z<-;W8>}dv=A~h!|nXP@R0*DhLy#u7_Yzb;r3HW_>zEx9hF$)5S2=eGSha^F&63FSl zw7nPG1c^s+AO{@P4tR9k*9W<>Yboulgm%RdwCtHlgb@cfMBIT8D{r=U1=FyKX1G(ZtT@FiZcPM3Kwi+} zC|MgGIH;f8BStFuL)79S}R literal 0 HcmV?d00001 diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_a6d61bfcc12549cc29bf4e9abe2839f231c080d4ffacee80457ecc1db6a4bbbb.json b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_a6d61bfcc12549cc29bf4e9abe2839f231c080d4ffacee80457ecc1db6a4bbbb.json new file mode 100644 index 0000000000000000000000000000000000000000..4af3de76387412e9987a01137e9938b648ed4dbe GIT binary patch literal 104070 zcmeI5O>dl6a)q<@uQ0skg3S+6oMkpi5Fp7WtHHo0OZ=#=s#B*<)xF&&fB59nPe1#IS8x95kFQ?8`SQ*8fB*fzeEs_0Uw`@E z-@W>`@BZVfFaPtOUw!|NUwr$g&wlsm5Bb)7^Iukzy9eHeCqPSNBQ_Khvnyg ze)aw5e_FP`KWwr8^wX;s7Y{zVxqA4~>E+F%4{vWDfB5p@hmS3U96; zt9OrLI9*>p{Oawa7_KfKIXzvU9)9oX=FvBwUObB8#iQ?i`RJpo(__oiH*Y@w;=kU!`X(U1{_d-<9(?gH-+cYoSNZgtZ@zoAt$#fJ=D$CF^5Z9e_sI{R z{p+7U``zzPSJxM(%Zux)<8gCwcglzPb9!-c$5$@*hxt37I$hqbzpqXgFY;mjx_g<= zU~qZyk}qPw-!VSj9mX%Qy}P*2HwH4kdVha$m+!$3&v12dpYJ^JnVXC2n~U3*1oJqu zxAC1$JO%!4wh7Lb|BsqJe1e-e_?VmZy+2u8-fx#LjzELS5%2NQlb`3^vn6m60HsSH zCXpBK0^%7jFGBF!nA}{1NAK$MFtCUJz5&t&!+Q4n17x0?P>+HnYCNuT@$HwZJkTZ>PuM|)1==Tf}s%yIHNeq9_TpK>Mr{433d_k5TBAa7`M|yms%)Xyviel4z zsw%(d)R!`IV3O$*C**qpGs~Vhg5I-Dvdj|;GUn0ZG>)Yr=9X8ZtWV5RA9gUeSrc|} zd*U$t*^*@oux!EgX+YG}%d&;UU;1+I$gO-~zP3Ki)nos1oS|MeavDV2F0$E@;WrN} zYuYdr^|Fxe(7id3g>SI;B`Kw`gMOD{H2SR!W>)4LeJ2y@i@WpTfaDXqOYM#%o+H}^dL1fA{S)dL{Y^8`fkBNW>ZC&Wt zS|wbGYle^fMvH99gq_J{wFvXc@gY>Pku!HDt5FI)F}E@}f;#-Pt-(4sXzF04DLSoG z?i1{Ch2eN`=#s6@@)I+CZA+G7dB0ieEWNjwSUn+MZMzo2LF7y>eP#)0+BT67#ug%+ z$SqYc57V@;q&;=wacV_4OoME0qZ`l+xKgYM+L2;C^^$d#D-BwPwQi8uPPU^&Ze4GN zYtTY(O_h0X>h7>H16PV=DY`mqQY>cfF!%{*>5j<|30o#aO?TVl4~0wBbazi~V-mLQ zZuH;AOr@+XL`}?tT4QU?V~C7T%u}yPn+#rP1G>q!6Z1jj=z~=!V2|qwXOg&WaBv%V zC-^jrgJ%pi9NN>5oa0kMgWr8R7^FgygoWleYFp~pue*Y%npHrVHZ zD^!_i(PZvSx?|vX7201-=t&n1)Ge8OL+=4Oy=`3Er3iwBpP{!BBFv-wx~&rV#I-FJ z1j`;f0d19AgEq)oSIQY`@nJ6ONnxkks0Rmq#QJ2D-A!au#yqQt6}@e4N6aYeOyof$ zfVs_@u!EZwE^Kpzp9n~_OAm{b+p=Xfh;C{UPvrI#jQQ}OF_>1=%SH}j+IEr6mJGjn zShi?GPQjMp6IqaRYIrbFqg4(QV=IJ~U712dwilGFb8{ z*^KS0Rb}uwl<6K{hoeEGg(~E37kJ;KX)&91(B4GVVOzwu>%rY=y2Cd}Bz@o-(5x

SB4^LR)nypSf@fKQc@D6X3yQ({*MpW~>FspgwayUJ zjSp1@yD^B_8xm^0a#t5DnCH}Nal>{PxqagnfKM)L2U(MHT-(UOe4tCqT$c>-EqO>Yhc0X(sHm=ro^_KEpG);3RUtvAVSz4UTRy=-f&m^DSW zxn|m+*1K4?uVJZup_8nG`CxiZrFd|!l@Xv5C*=$gOSWx3&_j?PtXl+c&2gO#)0z+Y zrU%dt+bSw0vn@7sz07suiF^|L=Bib;o%^6uV46jq+_mdyizPV$PhuXN-~fCOITKJv zo@L4Q7h!9T4HlK^g|mC1*x7>K{$}sv7wz&fzaBTgfS6xX{ODKX{`8xC-7lq`v^n^P zb(0ri(`&Y0=+oHRI&O`!+>1o$#OXD={{XmMy+AAv+cMkS)@!jv3kBvA%ypqBXvbFb zbFwcDem)xoI>v!JQbFv$Maq+ymo1U2<7aeWGQu=mggv+}pq(>(2klG%V!9n~^&xUA zMDfc*)9gr`XHexP>j=1&1+Xs7a8S>*Wp4Wq&{B81_fc_uz#81G5_4Q@&<3~BxL$-& zk~3L+a6OPKTo0r!BJEpj@U!m7dNLa)8+s7aofUd(+qHS=sW=E5v?n*Cv9*G*JIih{ zsDVZ-eW3?m(4GjdBTbeOvb=Z^j zASoYNd!Fc&2k$Q+=3}RFVPmUB>^O@!zq>EAjwNfmL*jZoD?Pn!*c4h5Y%#>@=OUON zI)fR#_^AaUwWi!wPlo1{EDtN-L>}|OQnvzd9g6L6<_^2;sGSUcqNo$wP<^0jNU>Jq z)NA58WV7ZFHMO4be0C2y^`78u?r9U!*XBvwsuQj!gvpYohf!?fUEN3k*W^}VB8ii; zc}Sw+O3X30g$A*Veya?dpVpi~kiFWDA-oR`_?Qm_Ip#crMV{OTgGc>d&B@9*=^yY(IdUKGd+0{`c4C&;~q9U%an$AqIf>)QQz zk<4Ixyq<-5?mF#Z8-5=x>f|`xmxJA*_gesI-$j~3fG zMB;EN5~lPmD_iFSNq0FF%c2gaIPO_!UTHt z#|YslCiGITUl?X#@2Ax6pI&gCWDw`t1D;Ij^}nF+ID3)8P~8>~YQO8miiYj++{pBX z8yUqJLahX2y%8-H?3V-T-uv3H^o4k4cf_gg`-wAPDjmaCAd^+tl`rlTKZ#RFQ_mJA zLNCEPI?(qQ2|CGAFSy|@br%UHH8r8I>J4G$aq4MVO@dZ= zFl_ZIVD{%;f{d;zt#jqWV}`zKz&_4hg1+v%e7#{j^5_%XhJ6t+Ov@{9V%steWYnuk zF{G9Hmd9gn{1fIPStp z0ZW1u8XGIss+=JhCdTSW&0vze6p~Ucm(2E~ z99V?aF|g#Wo^j9@9fGMHj3yEZX7uzui$05nRa60xG=f7i0@$Ck$Sy}+0ZWc`qG4mz zi!HDu&fXn{VWxm3v=l|3bdcb4A0dFK`y%kSC1AqpP#rAld`*I>yQ5jUufH;)9BFpZ zeNp&cC1N7F52fuROlrc45)9TAoR(WOC9rqk%vne5%r7BI0VisvI>NWek-^LJgihZ3wmR_Z6ftTr4%stMJ5K#zfEUL?{W<(+sNgRx35360Hg7X$?$j zdQEbexb0Dqgc7JzFoqe>qVIxdfA7S&>{WUdU6gN0iii_fR3WHBpP);PAbUy^o|_}l z=e`kuQLTRrN|9ZHRNKx1Q-r!X))yU*t`^RhYx9(^cZa0(wF8iTKE?_@_msw6vg;|0 zVb;`MGci=;biV5GnxsUl_Q*SWJQE0+Ydr~>YyBYtKE%*5(5Z2(lAS1-8~1S+Dd+`c zcZ!HJ^H*>VTbwf>}o$$FG0>s33MpsRbzC)y!kI{nwWs>%n=^a$wnf480@47R;t zta$Vavkm(qV(2!FBh+?gQNgr)Ukkv@hS?us@Yq{<8B_}xL1wb_N{ET0S*|IB&i4v$ zhF56ZI|9I-SK&Xr>X!l6q>IL)YY4K>PQVBig`?HiLbx&rdUsp3db!(;CIq23*}nR5 zjNu{6f+x&J1q)7lK_JdNMBVlESK3^}nHeY4HCDYcY$Sz(+3hqP~?U4NP@jbDD7rY5_J6@Je>?C)E=4Lw!NlN_nt7RAoI()>zjQO#ucJx3)!^? zL)Ujo`Uo%D-d9Z?u>*S#2oZ)_J(DqXEs({dcefkIQA;0|lVDFvGbJ;W1txaokcA5x zsh!|+4BZ-#a!0;|N7b+d*%J&aA+Wv4?zld}Wmrq4D_MH29~`VhVyjt%>8%JDGii4gyq-=NnPKaTSDtfQ( z@x){9XT&YUI&_OAHVbo4Fj_3Wgkg%_!-Bg!giM7nefh#nK~8FyARW!(QDnDZ={;^d z`vxROeZ7`e>WOVQw$6Z}?ol&Qg~1g;yL55;B=U0iWA1K8O#}!T-RsZ$yXksE;ftG@ zzGdWK4fvv2*1+no$Uq~uJ+)#bIuu{FYq0H+fna&`^AUGKVBLDNs?fV5$aoZ8pcX1x zwMP=Go|!u}QE}0#2SdyV5NHH>VU-9&53zS0hQ_1o(*gfQc92BPI1EFr9#0HYKDElm zooUsGvyD>-=0>J`!QBDEU=`YxO{4E1@`^j9u@tWNn5k;4T$@i0k4~Herdk_Z!e%Q zweuL4CQkM8VyGfjJ%s7^YAKZ&1a7vf)lr^g`}ToXQHGw+RIMMWt_48e9TpO#NG-!u z41Enql%c0FdZMh_zGekdOIPbiNZi@v4Dzq!Eu;N>Ka>AV{@*hBGnJla0@7&5+TZex z(b=zLMz!aJOsWis&}^ z1DUodh%+Z>foICsAEqcl&z2pe+IwYosrzx2(c%e`DnZVDeS!#@b9ZP(n;nJ7am*d! zgq4$I9tab{^IE@?uZ~8A6Kr75w9?rxr-YR(MpS0e8CNQagdC z>(k62bvNs*C_zt4s+Mg>80Jz}4B7N)Pp~)=?1Cq5L0XuI^J!=)*roTzuuBn4lzTPZ z^Gt20La1v`$VC#tNeq>I?T&K;%Tk4ki@sZpAQpgVQNZbC6A@E0h>6fQESN<7T!PtE z`XW;OFsCOf$S6p0*NYNXap#aps7bSZYf475svyYpaZ>vADC6A7hy+&!1!q!-9vNWK zo2q-$R6Gh|CrE+G0cOiFYL~VZwaai3J5qv-J7L%Z0+|JqQ|KNIw{@5Z`}M|1LGQ&N zaQ4#jQy`WG@1+u1=!MJh*)?foL=89=upUF9#s-Q$Fq2E zUI2dQH6&hekQW!)aSVD4M$W zWZOCR>mnYLVDE8->guS=fm4^GQ0BvzX~Xn|pm&5qj5Z-J*}+73tBLw;ie~RSh`#Gn zgC1GG`&x5^>D{47$*#wQINgapg%hpX^GZ;0CDkgPfZZSv(4GZ1_|Vf&pbhtg5(*|L z=!|Uk4d`%dDJs46P*YQJBy^{nC6!LQqL%%3iax~9jh^-`Uh$>o4nYRbK0CTTwxT6t zm1~N=?zl_K2%5P&_O$@?<=b%z`CYYe6wP=QTZg)6g3^6Ad3qspxv#~}u}78eOM-5TbI2q}=T3Fs zszv@=%V6B~;)%NqzP=C`s+S!_ko0>m)C);xNX7f2lUiRm;!NB2LXDv+TurU?c#l84 z>Is>QBox807h5n4yLx2+`JN%TQ}Pug-QjL0=-3sfeckaG!u0#VY9C>U$ILI6qZ;lM zvF~(>7_vE*s%`=qMO5%q6=W!gZqn;VAQkIJv_e>|9#pVU2qA{)zMf|I3eEaBF-#%U zOriVwP+^OJ#Zg-IESU7dB9a0(vgjcZXp{l_JTiUE4LQ3a)%| z(@26!767Lgy$xw!L}=E>$vhRlmUi`$LUwMURW!q$f@Q^N$XV)xU`R}K$wWpWl@nMa z3rVdP^Au}8O7N+2_}}Xu_~#0=*F8L{otdI5WGJ7Ig*3QZGt%8XEWwk9D1<&J^a<<78Z+3Wdk1b6V)0vmU3OHAwaaJa_Xy|z7ZT0 jPkgu7{q@B!e&^}x_G91l!7u;J7ao0Te~`ca@Z Date: Mon, 17 Mar 2025 22:04:36 +0700 Subject: [PATCH 03/21] Squashed commit of the following: commit 61ad58a887ab59b45910dc48b083d7dc5a4cf7e4 Merge: 8d99708470 6161f75533 Author: QuantumExplorer Date: Mon Mar 17 21:39:12 2025 +0700 Merge branch 'v2.0-dev' into feat/getIdentitiesByPublicKeyHashes1 commit 8d997084709fd5bb39f415ea26bd65adf1ffe71d Author: Quantum Explorer Date: Thu Mar 6 21:04:45 2025 +0700 more work commit 0f26760b78bf5c3e48fb090332416bad4eebb031 Author: Quantum Explorer Date: Mon Mar 3 21:03:26 2025 +0700 temp work --- .../protos/platform/v0/platform.proto | 31 +++ .../mod.rs | 70 ++++++ .../v0/mod.rs | 192 +++++++++++++++ .../mod.rs | 6 +- .../v0/mod.rs | 8 +- .../src/query/identity_based_queries/mod.rs | 3 +- packages/rs-drive-abci/src/query/service.rs | 13 ++ .../tests/strategy_tests/query.rs | 2 +- packages/rs-drive-proof-verifier/src/proof.rs | 2 +- .../mod.rs | 53 +++++ .../v0/mod.rs | 53 +++++ .../mod.rs | 50 +++- .../v0/mod.rs | 33 ++- .../fetch/fetch_by_public_key_hashes/mod.rs | 6 +- .../src/drive/identity/fetch/prove/mod.rs | 1 + .../mod.rs | 75 ++++++ .../v0/mod.rs | 219 ++++++++++++++++++ .../v0/mod.rs | 2 +- .../v0/mod.rs | 2 +- .../v0/mod.rs | 2 +- .../src/drive/identity/fetch/queries/mod.rs | 39 +++- ...non_unique_public_key_hash_double_proof.rs | 21 ++ packages/rs-drive/src/drive/identity/mod.rs | 4 + packages/rs-drive/src/verify/identity/mod.rs | 8 +- .../v0/mod.rs | 2 +- .../mod.rs | 79 +++++++ .../v0/mod.rs | 82 +++++++ .../mod.rs | 6 +- .../v0/mod.rs | 4 +- .../mod.rs | 65 ++++++ .../v0/mod.rs | 69 ++++++ .../mod.rs | 8 +- .../v0/mod.rs | 2 +- .../mod.rs | 8 +- .../v0/mod.rs | 2 +- .../drive_abci_query_versions/mod.rs | 2 +- .../drive_abci_query_versions/v1.rs | 2 +- .../drive_identity_method_versions/mod.rs | 2 + .../drive_identity_method_versions/v1.rs | 2 + .../drive_verify_method_versions/mod.rs | 6 +- .../drive_verify_method_versions/v1.rs | 6 +- .../src/version/mocks/v2_test.rs | 2 +- 42 files changed, 1186 insertions(+), 58 deletions(-) create mode 100644 packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs rename packages/rs-drive-abci/src/query/identity_based_queries/{identity_by_public_key_hash => identity_by_unique_public_key_hash}/mod.rs (92%) rename packages/rs-drive-abci/src/query/identity_based_queries/{identity_by_public_key_hash => identity_by_unique_public_key_hash}/v0/mod.rs (94%) create mode 100644 packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/v0/mod.rs create mode 100644 packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs create mode 100644 packages/rs-drive/src/drive/identity/identity_and_non_unique_public_key_hash_double_proof.rs create mode 100644 packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs rename packages/rs-drive/src/verify/identity/{verify_full_identity_by_public_key_hash => verify_full_identity_by_unique_public_key_hash}/mod.rs (89%) rename packages/rs-drive/src/verify/identity/{verify_full_identity_by_public_key_hash => verify_full_identity_by_unique_public_key_hash}/v0/mod.rs (96%) create mode 100644 packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/mod.rs create mode 100644 packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/v0/mod.rs rename packages/rs-drive/src/verify/identity/{verify_identity_id_by_public_key_hash => verify_identity_id_by_unique_public_key_hash}/mod.rs (86%) rename packages/rs-drive/src/verify/identity/{verify_identity_id_by_public_key_hash => verify_identity_id_by_unique_public_key_hash}/v0/mod.rs (98%) rename packages/rs-drive/src/verify/identity/{verify_identity_ids_by_public_key_hashes => verify_identity_ids_by_unique_public_key_hashes}/mod.rs (87%) rename packages/rs-drive/src/verify/identity/{verify_identity_ids_by_public_key_hashes => verify_identity_ids_by_unique_public_key_hashes}/v0/mod.rs (98%) diff --git a/packages/dapi-grpc/protos/platform/v0/platform.proto b/packages/dapi-grpc/protos/platform/v0/platform.proto index 905ef6c1d23..4d8ddd09363 100644 --- a/packages/dapi-grpc/protos/platform/v0/platform.proto +++ b/packages/dapi-grpc/protos/platform/v0/platform.proto @@ -33,6 +33,8 @@ service Platform { rpc getDocuments(GetDocumentsRequest) returns (GetDocumentsResponse); rpc getIdentityByPublicKeyHash(GetIdentityByPublicKeyHashRequest) returns (GetIdentityByPublicKeyHashResponse); + rpc getIdentityByNonUniquePublicKeyHash(GetIdentityByNonUniquePublicKeyHashRequest) + returns (GetIdentityByNonUniquePublicKeyHashResponse); rpc waitForStateTransitionResult(WaitForStateTransitionResultRequest) returns (WaitForStateTransitionResultResponse); rpc getConsensusParams(GetConsensusParamsRequest) @@ -620,6 +622,35 @@ message GetIdentityByPublicKeyHashResponse { oneof version { GetIdentityByPublicKeyHashResponseV0 v0 = 1; } } +message GetIdentityByNonUniquePublicKeyHashRequest { + message GetIdentityByNonUniquePublicKeyHashRequestV0 { + bytes public_key_hash = 1; + optional bytes start_after = 2; // Give one result after a previous result + bool prove = 3; + } + oneof version { GetIdentityByNonUniquePublicKeyHashRequestV0 v0 = 1; } +} + +message GetIdentityByNonUniquePublicKeyHashResponse { + message GetIdentityByNonUniquePublicKeyHashResponseV0 { + message IdentityResponse { + optional bytes identity = 1; + } + + message IdentityProvedResponse { + Proof grovedb_identity_public_key_hash_proof = 1; + optional bytes identity_proof_bytes = 2; // A hack, we return 2 proofs + } + oneof result { + IdentityResponse identity = 1; + IdentityProvedResponse proof = 2; + } + + ResponseMetadata metadata = 3; // Metadata about the blockchain state + } + oneof version { GetIdentityByNonUniquePublicKeyHashResponseV0 v0 = 1; } +} + message WaitForStateTransitionResultRequest { message WaitForStateTransitionResultRequestV0 { bytes state_transition_hash = 1; // The hash of the state transition to wait for diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..33403fad6a5 --- /dev/null +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,70 @@ +use crate::error::query::QueryError; +use crate::error::Error; +use crate::platform_types::platform::Platform; +use crate::platform_types::platform_state::PlatformState; +use crate::query::QueryValidationResult; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_request::Version as RequestVersion; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_response::Version as ResponseVersion; +use dapi_grpc::platform::v0::{ + GetIdentityByNonUniquePublicKeyHashRequest, GetIdentityByNonUniquePublicKeyHashResponse, + GetIdentityByPublicKeyHashResponse, +}; +use dpp::version::PlatformVersion; + +mod v0; + +impl Platform { + /// Querying of an identity by a public key hash + pub fn query_identity_by_non_unique_public_key_hash( + &self, + GetIdentityByNonUniquePublicKeyHashRequest { version }: GetIdentityByNonUniquePublicKeyHashRequest, + platform_state: &PlatformState, + platform_version: &PlatformVersion, + ) -> Result, Error> { + let Some(version) = version else { + return Ok(QueryValidationResult::new_with_error( + QueryError::DecodingError( + "could not decode identity by public key non unique hash query".to_string(), + ), + )); + }; + + let feature_version_bounds = &platform_version + .drive_abci + .query + .identity_based_queries + .identity_by_unique_public_key_hash; + + let feature_version = match &version { + RequestVersion::V0(_) => 0, + }; + + if !feature_version_bounds.check_version(feature_version) { + return Ok(QueryValidationResult::new_with_error( + QueryError::UnsupportedQueryVersion( + "identity_by_non_unique_public_key_hash".to_string(), + feature_version_bounds.min_version, + feature_version_bounds.max_version, + platform_version.protocol_version, + feature_version, + ), + )); + } + + match version { + RequestVersion::V0(request_v0) => { + let request = self.query_identity_by_non_unique_public_key_hash_v0( + request_v0, + platform_state, + platform_version, + )?; + + Ok( + request.map(|response_v0| GetIdentityByNonUniquePublicKeyHashResponse { + version: Some(ResponseVersion::V0(response_v0)), + }), + ) + } + } + } +} diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..a9a3ac7dd42 --- /dev/null +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,192 @@ +use crate::error::query::QueryError; +use crate::error::Error; +use crate::platform_types::platform::Platform; +use crate::platform_types::platform_state::PlatformState; +use crate::query::QueryValidationResult; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_request::GetIdentityByNonUniquePublicKeyHashRequestV0; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_response::{ + get_identity_by_non_unique_public_key_hash_response_v0, GetIdentityByNonUniquePublicKeyHashResponseV0, +}; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_response::get_identity_by_non_unique_public_key_hash_response_v0::{IdentityProvedResponse, IdentityResponse}; +use dpp::check_validation_result_with_data; +use dpp::platform_value::{Bytes20, Bytes32}; +use dpp::serialization::PlatformSerializable; +use dpp::validation::ValidationResult; +use dpp::version::PlatformVersion; + +impl Platform { + pub(super) fn query_identity_by_non_unique_public_key_hash_v0( + &self, + GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash, + start_after, + prove, + }: GetIdentityByNonUniquePublicKeyHashRequestV0, + platform_state: &PlatformState, + platform_version: &PlatformVersion, + ) -> Result, Error> { + let public_key_hash = + check_validation_result_with_data!(Bytes20::from_vec(public_key_hash) + .map(|bytes| bytes.0) + .map_err(|_| QueryError::InvalidArgument( + "public key hash must be 20 bytes long".to_string() + ))); + + let start_after = if let Some(start_after) = start_after { + Some(check_validation_result_with_data!(Bytes32::from_vec( + start_after + ) + .map(|bytes| bytes.0) + .map_err(|_| QueryError::InvalidArgument( + "public key hash must be 20 bytes long".to_string() + )))) + } else { + None + }; + + let response = if prove { + let proof = self + .drive + .prove_full_identity_by_non_unique_public_key_hash( + public_key_hash, + start_after, + None, + platform_version, + )?; + + GetIdentityByNonUniquePublicKeyHashResponseV0 { + result: Some( + get_identity_by_non_unique_public_key_hash_response_v0::Result::Proof( + IdentityProvedResponse { + grovedb_identity_public_key_hash_proof: Some(self.response_proof_v0( + platform_state, + proof.identity_id_public_key_hash_proof, + )), + identity_proof_bytes: proof.identity_proof, + }, + ), + ), + metadata: Some(self.response_metadata_v0(platform_state)), + } + } else { + let maybe_identity = self + .drive + .fetch_full_identity_by_non_unique_public_key_hash( + public_key_hash, + start_after, + None, + platform_version, + )?; + + let serialized_identity = maybe_identity + .map(|identity| { + identity + .serialize_consume_to_bytes() + .map_err(Error::Protocol) + }) + .transpose()?; + + GetIdentityByNonUniquePublicKeyHashResponseV0 { + metadata: Some(self.response_metadata_v0(platform_state)), + result: Some( + get_identity_by_non_unique_public_key_hash_response_v0::Result::Identity( + IdentityResponse { + identity: serialized_identity, + }, + ), + ), + } + }; + + Ok(QueryValidationResult::new_with_data(response)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::query::tests::setup_platform; + use dapi_grpc::platform::v0::ResponseMetadata; + use dpp::dashcore::Network; + + #[test] + fn test_invalid_public_key_hash() { + let (platform, state, version) = setup_platform(None, Network::Testnet, None); + + let request = GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: vec![0; 8], + start_after: None, + prove: false, + }; + + let result = platform + .query_identity_by_non_unique_public_key_hash_v0(request, &state, version) + .expect("expected query to succeed"); + + assert!(matches!( + result.errors.as_slice(), + [QueryError::InvalidArgument(msg)] if msg == &"public key hash must be 20 bytes long".to_string() + )); + } + + #[test] + fn test_identity_not_found() { + let (platform, state, version) = setup_platform(None, Network::Testnet, None); + + let public_key_hash = vec![0; 20]; + let request = GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: public_key_hash.clone(), + start_after: None, + prove: false, + }; + + let result = platform + .query_identity_by_non_unique_public_key_hash_v0(request, &state, version) + .expect("expected query to succeed"); + + assert_eq!( + result.data, + Some(GetIdentityByNonUniquePublicKeyHashResponseV0 { + metadata: Some(ResponseMetadata { + height: 0, + core_chain_locked_height: 0, + epoch: 0, + time_ms: 0, + protocol_version: 9, + chain_id: "chain_id".to_string() + }), + result: Some( + get_identity_by_non_unique_public_key_hash_response_v0::Result::Identity( + IdentityResponse { identity: None } + ) + ), + }) + ); + } + + #[test] + fn test_identity_absence_proof() { + let (platform, state, version) = setup_platform(None, Network::Testnet, None); + + let public_key_hash = vec![0; 20]; + let request = GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: public_key_hash.clone(), + start_after: None, + prove: true, + }; + + let result = platform + .query_identity_by_non_unique_public_key_hash_v0(request, &state, version) + .expect("expected query to succeed"); + + assert!(matches!( + result.data, + Some(GetIdentityByNonUniquePublicKeyHashResponseV0 { + result: Some( + get_identity_by_non_unique_public_key_hash_response_v0::Result::Proof(_) + ), + metadata: Some(_), + }) + )); + } +} diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/mod.rs similarity index 92% rename from packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/mod.rs rename to packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/mod.rs index db20b658b6f..2585d26afb8 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/mod.rs @@ -32,7 +32,7 @@ impl Platform { .drive_abci .query .identity_based_queries - .identity_by_public_key_hash; + .identity_by_unique_public_key_hash; let feature_version = match &version { RequestVersion::V0(_) => 0, @@ -41,7 +41,7 @@ impl Platform { if !feature_version_bounds.check_version(feature_version) { return Ok(QueryValidationResult::new_with_error( QueryError::UnsupportedQueryVersion( - "identity_by_public_key_hash".to_string(), + "identity_by_unique_public_key_hash".to_string(), feature_version_bounds.min_version, feature_version_bounds.max_version, platform_version.protocol_version, @@ -52,7 +52,7 @@ impl Platform { match version { RequestVersion::V0(request_v0) => { - let request = self.query_identity_by_public_key_hash_v0( + let request = self.query_identity_by_unique_public_key_hash_v0( request_v0, platform_state, platform_version, diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/v0/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/v0/mod.rs similarity index 94% rename from packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/v0/mod.rs rename to packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/v0/mod.rs index 0d3db5c8023..012f289e027 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_public_key_hash/v0/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_unique_public_key_hash/v0/mod.rs @@ -14,7 +14,7 @@ use dpp::validation::ValidationResult; use dpp::version::PlatformVersion; impl Platform { - pub(super) fn query_identity_by_public_key_hash_v0( + pub(super) fn query_identity_by_unique_public_key_hash_v0( &self, GetIdentityByPublicKeyHashRequestV0 { public_key_hash, @@ -91,7 +91,7 @@ mod tests { }; let result = platform - .query_identity_by_public_key_hash_v0(request, &state, version) + .query_identity_by_unique_public_key_hash_v0(request, &state, version) .expect("expected query to succeed"); assert!(matches!( @@ -111,7 +111,7 @@ mod tests { }; let result = platform - .query_identity_by_public_key_hash_v0(request, &state, version) + .query_identity_by_unique_public_key_hash_v0(request, &state, version) .expect("expected query to succeed"); assert!(matches!( @@ -131,7 +131,7 @@ mod tests { }; let result = platform - .query_identity_by_public_key_hash_v0(request, &state, version) + .query_identity_by_unique_public_key_hash_v0(request, &state, version) .expect("expected query to succeed"); assert!(matches!( diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/mod.rs index 29fea765215..98b92b25128 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/mod.rs @@ -3,7 +3,8 @@ mod balance_and_revision; mod balances; mod identities_contract_keys; mod identity; -mod identity_by_public_key_hash; +mod identity_by_non_unique_public_key_hash; +mod identity_by_unique_public_key_hash; mod identity_contract_nonce; mod identity_nonce; mod keys; diff --git a/packages/rs-drive-abci/src/query/service.rs b/packages/rs-drive-abci/src/query/service.rs index 156ae629983..9d77f180745 100644 --- a/packages/rs-drive-abci/src/query/service.rs +++ b/packages/rs-drive-abci/src/query/service.rs @@ -28,6 +28,7 @@ use dapi_grpc::platform::v0::{ GetIdentitiesTokenBalancesResponse, GetIdentitiesTokenInfosRequest, GetIdentitiesTokenInfosResponse, GetIdentityBalanceAndRevisionRequest, GetIdentityBalanceAndRevisionResponse, GetIdentityBalanceRequest, GetIdentityBalanceResponse, + GetIdentityByNonUniquePublicKeyHashRequest, GetIdentityByNonUniquePublicKeyHashResponse, GetIdentityByPublicKeyHashRequest, GetIdentityByPublicKeyHashResponse, GetIdentityContractNonceRequest, GetIdentityContractNonceResponse, GetIdentityKeysRequest, GetIdentityKeysResponse, GetIdentityNonceRequest, GetIdentityNonceResponse, GetIdentityRequest, @@ -411,6 +412,18 @@ impl PlatformService for QueryService { .await } + async fn get_identity_by_non_unique_public_key_hash( + &self, + request: Request, + ) -> Result, Status> { + self.handle_blocking_query( + request, + Platform::::query_identity_by_non_unique_public_key_hash, + "get_identity_by_non_unique_public_key_hash", + ) + .await + } + async fn wait_for_state_transition_result( &self, _request: Request, diff --git a/packages/rs-drive-abci/tests/strategy_tests/query.rs b/packages/rs-drive-abci/tests/strategy_tests/query.rs index 4a4103c4dbc..5e0aaba3d85 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/query.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/query.rs @@ -266,7 +266,7 @@ impl QueryStrategy { let (proof_root_hash, identity): ( RootHash, Option, - ) = Drive::verify_full_identity_by_public_key_hash( + ) = Drive::verify_full_identity_by_unique_public_key_hash( &proof.grovedb_proof, key_hash, platform_version, diff --git a/packages/rs-drive-proof-verifier/src/proof.rs b/packages/rs-drive-proof-verifier/src/proof.rs index ec15e3df0c9..6be02632ec1 100644 --- a/packages/rs-drive-proof-verifier/src/proof.rs +++ b/packages/rs-drive-proof-verifier/src/proof.rs @@ -326,7 +326,7 @@ impl FromProof for Identity { }; // Extract content from proof and verify Drive/GroveDB proofs - let (root_hash, maybe_identity) = Drive::verify_full_identity_by_public_key_hash( + let (root_hash, maybe_identity) = Drive::verify_full_identity_by_unique_public_key_hash( &proof.grovedb_proof, public_key_hash, platform_version, diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..7945cb0799c --- /dev/null +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,53 @@ +mod v0; + +use crate::drive::Drive; +use crate::error::{drive::DriveError, Error}; +use dpp::identity::Identity; + +use dpp::version::PlatformVersion; +use grovedb::TransactionArg; + +impl Drive { + /// Fetches an identity with all its related information from storage based on a non unique public key hash. + /// + /// This function leverages the versioning system to direct the fetch operation to the appropriate handler based on the `DriveVersion` provided. + /// + /// # Arguments + /// + /// * `public_key_hash` - A non-unique public key hash corresponding to the identity to be fetched. + /// * `after` - An identity ID after which we want to get back our identity. Basically "don't get back this identity, get the next one" + /// * `transaction` - Transaction arguments. + /// * `drive_version` - A reference to the drive version. + /// + /// # Returns + /// + /// Returns a `Result` containing an `Option` of the `Identity` if it exists, otherwise an `Error` if the fetch operation fails or the version is not supported. + pub fn fetch_full_identity_by_non_unique_public_key_hash( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + platform_version: &PlatformVersion, + ) -> Result, Error> { + match platform_version + .drive + .methods + .identity + .fetch + .public_key_hashes + .fetch_full_identity_by_non_unique_public_key_hash + { + 0 => self.fetch_full_identity_by_non_unique_public_key_hash_v0( + public_key_hash, + after, + transaction, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "fetch_full_identity_by_non_unique_public_key_hash".to_string(), + known_versions: vec![0], + received: version, + })), + } + } +} diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..fa6dd8b1618 --- /dev/null +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,53 @@ +use crate::drive::Drive; + +use crate::error::Error; +use crate::fees::op::LowLevelDriveOperation; + +use dpp::identity::Identity; +use dpp::version::PlatformVersion; + +use grovedb::TransactionArg; + +impl Drive { + /// Fetches an identity with all its information from storage. + pub(super) fn fetch_full_identity_by_non_unique_public_key_hash_v0( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + platform_version: &PlatformVersion, + ) -> Result, Error> { + let mut drive_operations: Vec = vec![]; + self.fetch_full_identity_by_non_unique_public_key_hash_operations_v0( + public_key_hash, + after, + transaction, + &mut drive_operations, + platform_version, + ) + } + + /// Given an identity, fetches the identity with its flags from storage. + pub(super) fn fetch_full_identity_by_non_unique_public_key_hash_operations_v0( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + drive_operations: &mut Vec, + platform_version: &PlatformVersion, + ) -> Result, Error> { + let identity_ids = self.fetch_identity_ids_by_non_unique_public_key_hash_operations( + public_key_hash, + Some(1), + after, + transaction, + drive_operations, + platform_version, + )?; + if let Some(identity_id) = identity_ids.first() { + self.fetch_full_identity(*identity_id, transaction, platform_version) + } else { + Ok(None) + } + } +} diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/mod.rs index 9c444e59731..13c3e8111f8 100644 --- a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/mod.rs @@ -2,8 +2,9 @@ mod v0; use crate::drive::Drive; use crate::error::{drive::DriveError, Error}; -use dpp::version::drive_versions::DriveVersion; +use crate::fees::op::LowLevelDriveOperation; use grovedb::TransactionArg; +use platform_version::version::PlatformVersion; impl Drive { /// Fetches identity ids from storage based on a non-unique public key hash. @@ -13,8 +14,9 @@ impl Drive { /// # Arguments /// /// * `public_key_hash` - A non-unique public key hash corresponding to the identity ids to be fetched. + /// * `limit` - An optional limit. /// * `transaction` - Transaction arguments. - /// * `drive_version` - A reference to the drive version. + /// * `platform_version` - A reference to the platform version. /// /// # Returns /// @@ -22,10 +24,13 @@ impl Drive { pub fn fetch_identity_ids_by_non_unique_public_key_hash( &self, public_key_hash: [u8; 20], + limit: Option, + after: Option<[u8; 32]>, transaction: TransactionArg, - drive_version: &DriveVersion, + platform_version: &PlatformVersion, ) -> Result, Error> { - match drive_version + match platform_version + .drive .methods .identity .fetch @@ -34,8 +39,10 @@ impl Drive { { 0 => self.fetch_identity_ids_by_non_unique_public_key_hash_v0( public_key_hash, + limit, + after, transaction, - drive_version, + platform_version, ), version => Err(Error::Drive(DriveError::UnknownVersionMismatch { method: "fetch_identity_ids_by_non_unique_public_key_hash".to_string(), @@ -44,4 +51,37 @@ impl Drive { })), } } + + pub(crate) fn fetch_identity_ids_by_non_unique_public_key_hash_operations( + &self, + public_key_hash: [u8; 20], + limit: Option, + after: Option<[u8; 32]>, + transaction: TransactionArg, + drive_operations: &mut Vec, + platform_version: &PlatformVersion, + ) -> Result, Error> { + match platform_version + .drive + .methods + .identity + .fetch + .public_key_hashes + .fetch_identity_ids_by_non_unique_public_key_hash + { + 0 => self.fetch_identity_ids_by_non_unique_public_key_hash_operations_v0( + public_key_hash, + limit, + after, + transaction, + drive_operations, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "fetch_identity_ids_by_non_unique_public_key_hash_operations".to_string(), + known_versions: vec![0], + received: version, + })), + } + } } diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/v0/mod.rs index ee462d63281..0eeacb54b23 100644 --- a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/fetch_identity_ids_by_non_unique_public_key_hash/v0/mod.rs @@ -6,10 +6,9 @@ use crate::query::QueryItem; use grovedb::query_result_type::QueryResultType; -use dpp::version::drive_versions::DriveVersion; - use grovedb::{PathQuery, TransactionArg}; +use platform_version::version::PlatformVersion; use std::ops::RangeFull; impl Drive { @@ -17,37 +16,49 @@ impl Drive { pub(super) fn fetch_identity_ids_by_non_unique_public_key_hash_v0( &self, public_key_hash: [u8; 20], + limit: Option, + after: Option<[u8; 32]>, transaction: TransactionArg, - drive_version: &DriveVersion, + platform_version: &PlatformVersion, ) -> Result, Error> { let mut drive_operations: Vec = vec![]; self.fetch_identity_ids_by_non_unique_public_key_hash_operations_v0( public_key_hash, + limit, + after, transaction, &mut drive_operations, - drive_version, + platform_version, ) } - /// Gets identity ids from non unique public key hashes. + /// Gets identity ids from non-unique public key hashes. pub(super) fn fetch_identity_ids_by_non_unique_public_key_hash_operations_v0( &self, public_key_hash: [u8; 20], + limit: Option, + after: Option<[u8; 32]>, transaction: TransactionArg, drive_operations: &mut Vec, - drive_version: &DriveVersion, + platform_version: &PlatformVersion, ) -> Result, Error> { let non_unique_key_hashes = non_unique_key_hashes_sub_tree_path_vec(public_key_hash); - let path_query = PathQuery::new_single_query_item( - non_unique_key_hashes, - QueryItem::RangeFull(RangeFull), - ); + let mut path_query = if let Some(after) = after { + PathQuery::new_single_query_item( + non_unique_key_hashes, + QueryItem::RangeAfter(after.to_vec()..), + ) + } else { + PathQuery::new_single_query_item(non_unique_key_hashes, QueryItem::RangeFull(RangeFull)) + }; + path_query.query.limit = limit; + let (results, _) = self.grove_get_path_query( &path_query, transaction, QueryResultType::QueryKeyElementPairResultType, drive_operations, - drive_version, + &platform_version.drive, )?; results .to_keys() diff --git a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/mod.rs b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/mod.rs index 859982396ab..cbd75b76d42 100644 --- a/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/fetch_by_public_key_hashes/mod.rs @@ -1,4 +1,5 @@ mod fetch_full_identities_by_unique_public_key_hashes; +mod fetch_full_identity_by_non_unique_public_key_hash; mod fetch_full_identity_by_unique_public_key_hash; mod fetch_identity_id_by_unique_public_key_hash; mod fetch_identity_ids_by_non_unique_public_key_hash; @@ -23,7 +24,6 @@ mod tests { fn test_fetch_all_keys_on_identity() { let drive = setup_drive(None, None); let platform_version = PlatformVersion::latest(); - let drive_version = &platform_version.drive; let transaction = drive.grove.start_transaction(); @@ -71,8 +71,10 @@ mod tests { let identity_ids = drive .fetch_identity_ids_by_non_unique_public_key_hash( hash, + None, + None, Some(&transaction), - &drive_version, + platform_version, ) .expect("expected to get identity ids"); assert!(identity_ids.contains(&identity.id().to_buffer())); diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/mod.rs index b095b6bf427..7ca607c9f0a 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/mod.rs @@ -1,6 +1,7 @@ mod prove_full_identities; mod prove_full_identities_by_unique_public_key_hashes; mod prove_full_identity; +mod prove_full_identity_by_non_unique_public_key_hash; mod prove_full_identity_by_unique_public_key_hash; mod prove_identities_contract_keys; mod prove_identity_id_by_unique_public_key_hash; diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..5712e0df0d4 --- /dev/null +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,75 @@ +mod v0; + +use crate::drive::Drive; +use crate::error::drive::DriveError; +use crate::error::Error; + +use crate::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; +use dpp::version::PlatformVersion; +use grovedb::TransactionArg; + +impl Drive { + /// Generates a proof for an identity associated with a given non-unique public key hash. + /// + /// This function retrieves an identity along with its associated proofs from storage. + /// It utilizes versioning to call the appropriate handler based on the provided + /// `PlatformVersion`. + /// + /// # Arguments + /// + /// - `public_key_hash` - A 20-byte array representing the hash of the public key + /// for which the identity should be fetched. + /// - `after` - An optional identity ID specifying the starting point for retrieval. + /// If provided, the function will return the identity that appears after the given ID, + /// ensuring that the specified identity itself is not included. + /// - `transaction` - A transaction argument used for database operations. + /// - `platform_version` - A reference to the platform version, ensuring that the + /// correct version-specific function is used. + /// + /// # Returns + /// + /// Returns a `Result` containing an [`IdentityAndNonUniquePublicKeyHashDoubleProof`], which + /// includes both the proof of the identity and the proof linking the public key hash to + /// an identity ID. If the operation fails or the platform version is unsupported, an `Error` + /// is returned. + /// + /// # Errors + /// + /// This function will return an `Error` if: + /// - The identity retrieval operation fails. + /// - The provided public key hash does not correspond to a known identity. + /// - The requested platform version is unknown or not supported. + /// + /// # Versioning + /// + /// - Currently, only version `0` of `prove_full_identity_by_non_unique_public_key_hash` + /// is implemented. If an unsupported version is provided, an `UnknownVersionMismatch` + /// error is returned. + pub fn prove_full_identity_by_non_unique_public_key_hash( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + platform_version: &PlatformVersion, + ) -> Result { + match platform_version + .drive + .methods + .identity + .prove + .prove_full_identity_by_non_unique_public_key_hash + { + 0 => self.prove_full_identity_by_non_unique_public_key_hash_v0( + public_key_hash, + after, + transaction, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "prove_full_identity_by_non_unique_public_key_hash".to_string(), + known_versions: vec![0], + received: version, + })), + } + } +} diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..93c8d26c519 --- /dev/null +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,219 @@ +use crate::drive::Drive; + +use crate::error::Error; + +use crate::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; +use dpp::version::PlatformVersion; +use grovedb::TransactionArg; + +impl Drive { + /// Fetches an identity with all its information from storage. + pub(super) fn prove_full_identity_by_non_unique_public_key_hash_v0( + &self, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + transaction: TransactionArg, + platform_version: &PlatformVersion, + ) -> Result { + let identity_ids = self.fetch_identity_ids_by_non_unique_public_key_hash_operations( + public_key_hash, + Some(1), + after, + transaction, + &mut vec![], + platform_version, + )?; + // We only prove the absence of the public key hash + let mut path_query = + Self::identity_id_by_non_unique_public_key_hash_query(public_key_hash, after); + path_query.query.limit = Some(1); + let identity_id_public_key_hash_proof = self.grove_get_proved_path_query( + &path_query, + transaction, + &mut vec![], + &platform_version.drive, + )?; + let identity_proof = if let Some(identity_id) = identity_ids.first() { + let full_identity_query = + Self::full_identity_query(&identity_id, &platform_version.drive.grove_version)?; + Some(self.grove_get_proved_path_query( + &full_identity_query, + transaction, + &mut vec![], + &platform_version.drive, + )?) + } else { + None + }; + + Ok(IdentityAndNonUniquePublicKeyHashDoubleProof { + identity_proof, + identity_id_public_key_hash_proof, + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::util::test_helpers::setup::setup_drive_with_initial_state_structure; + use dpp::block::block_info::BlockInfo; + use dpp::identity::accessors::IdentityGettersV0; + use dpp::identity::identity_public_key::accessors::v0::IdentityPublicKeyGettersV0; + use dpp::identity::identity_public_key::methods::hash::IdentityPublicKeyHashMethodsV0; + use dpp::identity::{Identity, IdentityPublicKey, KeyType}; + use dpp::version::PlatformVersion; + use rand::prelude::StdRng; + use rand::SeedableRng; + + #[test] + fn should_prove_a_single_identity_with_non_unique_key() { + let drive = setup_drive_with_initial_state_structure(None); + + let platform_version = PlatformVersion::latest(); + + let identity = Identity::random_identity(3, Some(14), platform_version) + .expect("expected a random identity"); + + drive + .add_new_identity( + identity.clone(), + false, + &BlockInfo::default(), + true, + None, + platform_version, + ) + .expect("expected to add an identity"); + + let first_key_hash = identity + .public_keys() + .values() + .find(|public_key| !public_key.key_type().is_unique_key_type()) + .expect("expected a unique key") + .public_key_hash() + .expect("expected to hash data"); + + let proof = drive + .prove_full_identity_by_non_unique_public_key_hash( + first_key_hash, + None, + None, + platform_version, + ) + .expect("should not error when proving an identity"); + + let (_, proved_identity) = Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof, + first_key_hash, + None, + platform_version, + ) + .expect("expect that this be verified"); + + assert_eq!(proved_identity, Some(identity)); + } + + #[test] + fn should_prove_a_single_identity_with_non_unique_key_when_two_have_same_key() { + let drive = setup_drive_with_initial_state_structure(None); + + let platform_version = PlatformVersion::latest(); + + let mut identity_1 = Identity::random_identity(3, Some(14), platform_version) + .expect("expected a random identity"); + + let mut identity_2 = Identity::random_identity(3, Some(15), platform_version) + .expect("expected a random identity"); + + let mut rng = StdRng::seed_from_u64(506); + + let key = IdentityPublicKey::random_voting_key_with_rng(3, &mut rng, platform_version) + .expect("expected key") + .0; + + identity_1.add_public_key(key.clone()); + identity_2.add_public_key(key.clone()); + + drive + .add_new_identity( + identity_1.clone(), + false, + &BlockInfo::default(), + true, + None, + platform_version, + ) + .expect("expected to add an identity"); + + drive + .add_new_identity( + identity_2.clone(), + false, + &BlockInfo::default(), + true, + None, + platform_version, + ) + .expect("expected to add an identity"); + + let key_hash = key.public_key_hash().expect("expected key hash"); + + let proof = drive + .prove_full_identity_by_non_unique_public_key_hash( + key_hash, + None, + None, + platform_version, + ) + .expect("should not error when proving an identity"); + + let (_, proved_identity) = Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof, + key_hash, + None, + platform_version, + ) + .expect("expect that this be verified"); + + assert_eq!(proved_identity, Some(identity_1.clone())); + + let proof = drive + .prove_full_identity_by_non_unique_public_key_hash( + key_hash, + Some(identity_1.id().to_buffer()), + None, + platform_version, + ) + .expect("should not error when proving an identity"); + + let (_, proved_identity) = Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof, + key_hash, + Some(identity_1.id().to_buffer()), + platform_version, + ) + .expect("expect that this be verified"); + + assert_eq!(proved_identity, Some(identity_2.clone())); + + let proof = drive + .prove_full_identity_by_non_unique_public_key_hash( + key_hash, + Some(identity_2.id().to_buffer()), + None, + platform_version, + ) + .expect("should not error when proving an identity"); + + let (_, proved_identity) = Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof, + key_hash, + Some(identity_2.id().to_buffer()), + platform_version, + ) + .expect("expect that this be verified"); + + assert_eq!(proved_identity, None); + } +} diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_unique_public_key_hash/v0/mod.rs index f474fa52315..085920640ba 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_unique_public_key_hash/v0/mod.rs @@ -87,7 +87,7 @@ mod tests { .prove_full_identity_by_unique_public_key_hash(first_key_hash, None, platform_version) .expect("should not error when proving an identity"); - let (_, proved_identity) = Drive::verify_full_identity_by_public_key_hash( + let (_, proved_identity) = Drive::verify_full_identity_by_unique_public_key_hash( proof.as_slice(), first_key_hash, platform_version, diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_id_by_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_id_by_unique_public_key_hash/v0/mod.rs index 8900c2e2dcc..83af15538f1 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_id_by_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_id_by_unique_public_key_hash/v0/mod.rs @@ -65,7 +65,7 @@ mod tests { ) .expect("should not error when proving an identity"); - let (_, proved_identity_id) = Drive::verify_identity_id_by_public_key_hash( + let (_, proved_identity_id) = Drive::verify_identity_id_by_unique_public_key_hash( proof.as_slice(), false, first_key_hash, diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_ids_by_unique_public_key_hashes/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_ids_by_unique_public_key_hashes/v0/mod.rs index 1b4636e6b5a..d9c54243db5 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_ids_by_unique_public_key_hashes/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_identity_ids_by_unique_public_key_hashes/v0/mod.rs @@ -89,7 +89,7 @@ mod tests { .expect("should not error when proving an identity"); let (_, proved_identity_id): ([u8; 32], BTreeMap<[u8; 20], Option<[u8; 32]>>) = - Drive::verify_identity_ids_by_public_key_hashes( + Drive::verify_identity_ids_by_unique_public_key_hashes( proof.as_slice(), false, &key_hashes, diff --git a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs index f7832d2274f..6e1b1e4b65d 100644 --- a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs @@ -1,6 +1,9 @@ use crate::drive::balances::balance_path_vec; use crate::drive::identity::key::fetch::IdentityKeysRequest; -use crate::drive::{identity_tree_path_vec, unique_key_hashes_tree_path_vec, Drive}; +use crate::drive::{ + identity_tree_path_vec, non_unique_key_hashes_tree_path, non_unique_key_hashes_tree_path_vec, + unique_key_hashes_tree_path_vec, Drive, +}; use std::ops::RangeFull; use crate::error::Error; @@ -90,6 +93,23 @@ impl Drive { PathQuery::new_single_key(unique_key_hashes, public_key_hash.to_vec()) } + /// The query for proving an identity id from a non-unique public key hash. + /// This should be used for absence proofs + pub fn identity_id_by_non_unique_public_key_hash_query( + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + ) -> PathQuery { + let non_unique_key_hashes = non_unique_key_hashes_tree_path_vec(); + let mut query = Query::new_single_key(public_key_hash.to_vec()); + let sub_query = if let Some(after) = after { + Query::new_single_query_item(QueryItem::RangeFrom(after.to_vec()..)) + } else { + Query::new_range_full() + }; + query.set_subquery(sub_query); + PathQuery::new(non_unique_key_hashes, SizedQuery::new(query, None, None)) + } + /// The query for proving identity ids from a vector of public key hashes. pub fn identity_ids_by_unique_public_key_hash_query( public_key_hashes: &[[u8; 20]], @@ -213,6 +233,23 @@ impl Drive { .map_err(Error::GroveDB) } + /// This query gets the full identity and the public key hash + pub fn full_identity_with_non_unique_public_key_hash_query( + public_key_hash: [u8; 20], + identity_id: [u8; 32], + after: Option<[u8; 32]>, + grove_version: &GroveVersion, + ) -> Result { + let full_identity_query = Self::full_identity_query(&identity_id, grove_version)?; + let identity_id_by_public_key_hash_query = + Self::identity_id_by_non_unique_public_key_hash_query(public_key_hash, after); + PathQuery::merge( + vec![&full_identity_query, &identity_id_by_public_key_hash_query], + grove_version, + ) + .map_err(Error::GroveDB) + } + /// The query full identities with key hashes too pub fn full_identities_with_keys_hashes_query( identity_ids: &[[u8; 32]], diff --git a/packages/rs-drive/src/drive/identity/identity_and_non_unique_public_key_hash_double_proof.rs b/packages/rs-drive/src/drive/identity/identity_and_non_unique_public_key_hash_double_proof.rs new file mode 100644 index 00000000000..ccd439ca080 --- /dev/null +++ b/packages/rs-drive/src/drive/identity/identity_and_non_unique_public_key_hash_double_proof.rs @@ -0,0 +1,21 @@ +/// Represents a proof containing an optional identity proof and a required +/// proof for the identity ID and non-unique public key hash. +/// +/// This struct is used to verify the authenticity and validity of an identity +/// and its associated non-unique public key hash. +/// +/// # Fields +/// +/// * `identity_proof` - An optional proof for the identity, represented as a +/// serialized byte vector. This may be `None` if no additional proof is required. +/// * `identity_id_public_key_hash_proof` - A required proof verifying the +/// association between an identity ID and its non-unique public key hash, +/// stored as a serialized byte vector. +pub struct IdentityAndNonUniquePublicKeyHashDoubleProof { + /// Optional proof of identity, stored as a serialized byte vector. + pub identity_proof: Option>, + + /// Proof linking an identity ID to a non-unique public key hash, + /// stored as a serialized byte vector. + pub identity_id_public_key_hash_proof: Vec, +} diff --git a/packages/rs-drive/src/drive/identity/mod.rs b/packages/rs-drive/src/drive/identity/mod.rs index 1922e74b884..a75be0ea563 100644 --- a/packages/rs-drive/src/drive/identity/mod.rs +++ b/packages/rs-drive/src/drive/identity/mod.rs @@ -40,6 +40,10 @@ pub mod key; #[cfg(feature = "server")] pub mod update; +/// A module for a struct encapsulating an identity and a non-unique public key hash to identity id proof +#[cfg(any(feature = "server", feature = "verify"))] +pub mod identity_and_non_unique_public_key_hash_double_proof; + use crate::drive::identity::contract_info::ContractInfoStructure; use crate::error::drive::DriveError; use crate::error::Error; diff --git a/packages/rs-drive/src/verify/identity/mod.rs b/packages/rs-drive/src/verify/identity/mod.rs index dd24b00f678..a9308182abb 100644 --- a/packages/rs-drive/src/verify/identity/mod.rs +++ b/packages/rs-drive/src/verify/identity/mod.rs @@ -1,13 +1,15 @@ mod verify_full_identities_by_public_key_hashes; mod verify_full_identity_by_identity_id; -mod verify_full_identity_by_public_key_hash; +mod verify_full_identity_by_non_unique_public_key_hash; +mod verify_full_identity_by_unique_public_key_hash; mod verify_identities_contract_keys; mod verify_identity_balance_and_revision_for_identity_id; mod verify_identity_balance_for_identity_id; mod verify_identity_balances_for_identity_ids; mod verify_identity_contract_nonce; -mod verify_identity_id_by_public_key_hash; -mod verify_identity_ids_by_public_key_hashes; +mod verify_identity_id_by_non_unique_public_key_hash; +mod verify_identity_id_by_unique_public_key_hash; +mod verify_identity_ids_by_unique_public_key_hashes; mod verify_identity_keys_by_identity_id; mod verify_identity_nonce; mod verify_identity_revision_for_identity_id; diff --git a/packages/rs-drive/src/verify/identity/verify_full_identities_by_public_key_hashes/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identities_by_public_key_hashes/v0/mod.rs index 7c1f0545d05..43ce89e6d00 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identities_by_public_key_hashes/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identities_by_public_key_hashes/v0/mod.rs @@ -48,7 +48,7 @@ impl Drive { platform_version: &PlatformVersion, ) -> Result<(RootHash, T), Error> { let (root_hash, identity_ids_by_key_hashes) = - Self::verify_identity_ids_by_public_key_hashes::>( + Self::verify_identity_ids_by_unique_public_key_hashes::>( proof, true, public_key_hashes, diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..4e593caeb67 --- /dev/null +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,79 @@ +mod v0; + +use crate::drive::Drive; + +use crate::error::drive::DriveError; + +use crate::error::Error; + +use crate::verify::RootHash; + +pub use dpp::prelude::Identity; + +use crate::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; +use dpp::version::PlatformVersion; + +impl Drive { + /// Verifies the full identity of a user using their non-unique public key hash. + /// + /// This function acts as a dispatcher that selects the appropriate version-specific + /// verification method based on the provided platform version. + /// + /// # Parameters + /// + /// - `proof`: A proof containing both the identity proof (if applicable) and the + /// proof linking the public key hash to an identity ID. + /// - `public_key_hash`: A 20-byte array representing the hash of the user's public key. + /// - `after`: An optional 32-byte array specifying an identity after which + /// the search should begin when retrieving the identity. + /// - `platform_version`: A reference to the platform version, ensuring that + /// the correct verification method is used. + /// + /// # Returns + /// + /// Returns a `Result` containing: + /// - `RootHash`: The root hash of GroveDB after verification. + /// - `Option`: The full identity of the user, if it exists. + /// + /// If no identity is found, the returned `Option` will be `None`. + /// + /// # Errors + /// + /// This function returns an `Error` if: + /// - The provided proof is invalid. + /// - The public key hash does not correspond to a valid identity ID. + /// - The identity ID exists but does not correspond to a valid full identity. + /// - The provided platform version is unknown or unsupported. + /// + /// # Versioning + /// + /// - Currently, only version `0` of `verify_full_identity_by_non_unique_public_key_hash` + /// is implemented. If an unsupported version is provided, an `UnknownVersionMismatch` + /// error is returned. + pub fn verify_full_identity_by_non_unique_public_key_hash( + proof: &IdentityAndNonUniquePublicKeyHashDoubleProof, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + platform_version: &PlatformVersion, + ) -> Result<(RootHash, Option), Error> { + match platform_version + .drive + .methods + .verify + .identity + .verify_full_identity_by_non_unique_public_key_hash + { + 0 => Self::verify_full_identity_by_non_unique_public_key_hash_v0( + proof, + public_key_hash, + after, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "verify_full_identity_by_non_unique_public_key_hash".to_string(), + known_versions: vec![0], + received: version, + })), + } + } +} diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..ccb81c1d6fb --- /dev/null +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,82 @@ +use crate::drive::Drive; + +use crate::error::Error; + +use crate::verify::RootHash; + +pub use dpp::prelude::Identity; + +use crate::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; +use crate::error::proof::ProofError; +use dpp::version::PlatformVersion; + +impl Drive { + /// Verifies the full identity of a user using their non-unique public key hash. + /// + /// This function performs a two-step verification process: + /// 1. It verifies the identity ID associated with the given public key hash + /// by calling [`verify_identity_id_by_non_unique_public_key_hash()`]. + /// 2. If an identity ID is found, it then verifies the full identity by calling + /// [`verify_full_identity_by_identity_id()`]. + /// + /// # Arguments + /// + /// * `proof` - A proof containing both the identity proof (if applicable) and + /// the proof linking the public key hash to an identity ID. + /// * `public_key_hash` - A 20-byte array representing the hash of the user's public key. + /// * `after` - An optional 32-byte array used to specify a search point in the proof verification process. + /// * `platform_version` - A reference to the platform version, ensuring compatibility. + /// + /// # Returns + /// + /// If verification is successful, returns a `Result` containing: + /// - `RootHash` - The root hash of GroveDB after verification. + /// - `Option` - The full identity of the user, if it exists. + /// + /// If no identity is found, the returned `Option` will be `None`. + /// + /// # Errors + /// + /// This function will return an `Error` if: + /// * The provided proof is invalid. + /// * The public key hash does not correspond to a valid identity ID. + /// * The identity ID exists but the associated identity proof is missing. + /// * The identity verification process fails. + /// + /// # Inline Optimization + /// + /// This function is marked with `#[inline(always)]` to hint the compiler to + /// aggressively inline it for performance optimization. + #[inline(always)] + pub(super) fn verify_full_identity_by_non_unique_public_key_hash_v0( + proof: &IdentityAndNonUniquePublicKeyHashDoubleProof, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + platform_version: &PlatformVersion, + ) -> Result<(RootHash, Option), Error> { + let (root_hash, identity_id) = Self::verify_identity_id_by_non_unique_public_key_hash( + &proof.identity_id_public_key_hash_proof, + false, + public_key_hash, + after, + platform_version, + )?; + let maybe_identity = identity_id + .map(|identity_id| { + let Some(identity_proof) = &proof.identity_proof else { + return Err(Error::Proof(ProofError::IncompleteProof("identity is not in proof even though identity id is set from non unique public key hash"))); + }; + println!("hex {}", hex::encode(&identity_proof)); + Self::verify_full_identity_by_identity_id( + identity_proof.as_slice(), + false, + identity_id, + platform_version, + ) + .map(|(_, maybe_identity)| maybe_identity) + }) + .transpose()? + .flatten(); + Ok((root_hash, maybe_identity)) + } +} diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/mod.rs similarity index 89% rename from packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/mod.rs rename to packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/mod.rs index 6ce7cc6f263..17cd33c1995 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/mod.rs @@ -36,7 +36,7 @@ impl Drive { /// - The identity ID does not correspond to a valid full identity. /// - An unknown or unsupported platform version is provided. /// - pub fn verify_full_identity_by_public_key_hash( + pub fn verify_full_identity_by_unique_public_key_hash( proof: &[u8], public_key_hash: [u8; 20], platform_version: &PlatformVersion, @@ -48,13 +48,13 @@ impl Drive { .identity .verify_full_identity_by_public_key_hash { - 0 => Self::verify_full_identity_by_public_key_hash_v0( + 0 => Self::verify_full_identity_by_unique_public_key_hash_v0( proof, public_key_hash, platform_version, ), version => Err(Error::Drive(DriveError::UnknownVersionMismatch { - method: "verify_full_identity_by_public_key_hash".to_string(), + method: "verify_full_identity_by_unique_public_key_hash".to_string(), known_versions: vec![0], received: version, })), diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs similarity index 96% rename from packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/v0/mod.rs rename to packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs index e481343470a..88fe2894242 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identity_by_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs @@ -38,12 +38,12 @@ impl Drive { /// * The identity ID does not correspond to a valid full identity. /// #[inline(always)] - pub(super) fn verify_full_identity_by_public_key_hash_v0( + pub(super) fn verify_full_identity_by_unique_public_key_hash_v0( proof: &[u8], public_key_hash: [u8; 20], platform_version: &PlatformVersion, ) -> Result<(RootHash, Option), Error> { - let (root_hash, identity_id) = Self::verify_identity_id_by_public_key_hash( + let (root_hash, identity_id) = Self::verify_identity_id_by_unique_public_key_hash( proof, true, public_key_hash, diff --git a/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/mod.rs new file mode 100644 index 00000000000..0d1ccae9f99 --- /dev/null +++ b/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/mod.rs @@ -0,0 +1,65 @@ +mod v0; + +use crate::drive::Drive; + +use crate::error::drive::DriveError; + +use crate::error::Error; + +use crate::verify::RootHash; + +use dpp::version::PlatformVersion; + +impl Drive { + /// Verifies the identity ID of a user by their public key hash. + /// + /// # Parameters + /// + /// - `proof`: A byte slice representing the proof of authentication from the user. + /// - `is_proof_subset`: A boolean indicating whether the proof is a subset. + /// - `public_key_hash`: A 20-byte array representing the hash of the public key of the user. + /// - `after`: A 32 byte array representing an identity after which we want to get the identity id. + /// - `platform_version`: The platform version against which to verify the identity ID. + /// + /// # Returns + /// + /// If the verification is successful, it returns a `Result` with a tuple of `RootHash` and + /// an `Option` of a 32-byte array. The `RootHash` represents the root hash of GroveDB, + /// and the `Option<[u8; 32]>` represents the identity ID of the user if it exists. + /// + /// # Errors + /// + /// Returns an `Error` if: + /// + /// - An unknown or unsupported platform version is provided. + /// - Any other error as documented in the specific versioned function. + /// + pub fn verify_identity_id_by_non_unique_public_key_hash( + proof: &[u8], + is_proof_subset: bool, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + platform_version: &PlatformVersion, + ) -> Result<(RootHash, Option<[u8; 32]>), Error> { + match platform_version + .drive + .methods + .verify + .identity + .verify_identity_id_by_non_unique_public_key_hash + { + 0 => Self::verify_identity_id_by_non_unique_public_key_hash_v0( + proof, + is_proof_subset, + public_key_hash, + after, + platform_version, + ), + version => Err(Error::Drive(DriveError::UnknownVersionMismatch { + method: "verify_identity_id_by_non_unique_public_key_hash".to_string(), + known_versions: vec![0], + received: version, + })), + } + } +} diff --git a/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/v0/mod.rs new file mode 100644 index 00000000000..1d3730b5562 --- /dev/null +++ b/packages/rs-drive/src/verify/identity/verify_identity_id_by_non_unique_public_key_hash/v0/mod.rs @@ -0,0 +1,69 @@ +use crate::drive::{non_unique_key_hashes_sub_tree_path_vec, Drive}; + +use crate::error::proof::ProofError; +use crate::error::Error; + +use crate::verify::RootHash; + +use grovedb::GroveDb; +use platform_version::version::PlatformVersion; + +impl Drive { + /// Verifies the identity ID of a user by their public key hash. + /// + /// # Parameters + /// + /// - `proof`: A byte slice representing the proof of authentication from the user. + /// - `is_proof_subset`: A boolean indicating whether the proof is a subset. + /// - `public_key_hash`: A 20-byte array representing the hash of the public key of the user. + /// - `after`: A 32 byte array representing an identity after which we want to get the identity id. + /// + /// # Returns + /// + /// If the verification is successful, it returns a `Result` with a tuple of `RootHash` and + /// an `Option` of a 32-byte array. The `RootHash` represents the root hash of GroveDB, + /// and the `Option<[u8; 32]>` represents the identity ID of the user if it exists. + /// + /// # Errors + /// + /// Returns an `Error` if: + /// + /// - The proof of authentication is not valid. + /// - The public key hash does not correspond to a valid identity ID. + /// - The proved key value is not for the correct path or key in unique key hashes. + /// - More than one identity ID is found. + /// + #[inline(always)] + pub(super) fn verify_identity_id_by_non_unique_public_key_hash_v0( + proof: &[u8], + is_proof_subset: bool, + public_key_hash: [u8; 20], + after: Option<[u8; 32]>, + platform_version: &PlatformVersion, + ) -> Result<(RootHash, Option<[u8; 32]>), Error> { + let mut path_query = + Self::identity_id_by_non_unique_public_key_hash_query(public_key_hash, after); + path_query.query.limit = Some(1); + let (root_hash, mut proved_key_values) = if is_proof_subset { + GroveDb::verify_subset_query(proof, &path_query, &platform_version.drive.grove_version)? + } else { + GroveDb::verify_query(proof, &path_query, &platform_version.drive.grove_version)? + }; + + if proved_key_values.len() == 1 { + let (path, key, _) = proved_key_values.remove(0); + if path != non_unique_key_hashes_sub_tree_path_vec(public_key_hash) { + return Err(Error::Proof(ProofError::CorruptedProof( + "we did not get back an element for the correct path in non unique key hashes" + .to_string(), + ))); + } + let identity_id = key.try_into().map_err(|_| { + Error::Proof(ProofError::IncorrectValueSize("value size is incorrect")) + })?; + Ok((root_hash, Some(identity_id))) + } else { + Ok((root_hash, None)) + } + } +} diff --git a/packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/mod.rs similarity index 86% rename from packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/mod.rs rename to packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/mod.rs index 75182ffeefc..4422e600e01 100644 --- a/packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/mod.rs @@ -33,7 +33,7 @@ impl Drive { /// - An unknown or unsupported platform version is provided. /// - Any other error as documented in the specific versioned function. /// - pub fn verify_identity_id_by_public_key_hash( + pub fn verify_identity_id_by_unique_public_key_hash( proof: &[u8], is_proof_subset: bool, public_key_hash: [u8; 20], @@ -44,16 +44,16 @@ impl Drive { .methods .verify .identity - .verify_identity_id_by_public_key_hash + .verify_identity_id_by_unique_public_key_hash { - 0 => Self::verify_identity_id_by_public_key_hash_v0( + 0 => Self::verify_identity_id_by_unique_public_key_hash_v0( proof, is_proof_subset, public_key_hash, platform_version, ), version => Err(Error::Drive(DriveError::UnknownVersionMismatch { - method: "verify_identity_id_by_public_key_hash".to_string(), + method: "verify_identity_id_by_unique_public_key_hash".to_string(), known_versions: vec![0], received: version, })), diff --git a/packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/v0/mod.rs similarity index 98% rename from packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/v0/mod.rs rename to packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/v0/mod.rs index ceb8ae05b07..cccc1aa0f15 100644 --- a/packages/rs-drive/src/verify/identity/verify_identity_id_by_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_identity_id_by_unique_public_key_hash/v0/mod.rs @@ -33,7 +33,7 @@ impl Drive { /// - More than one identity ID is found. /// #[inline(always)] - pub(super) fn verify_identity_id_by_public_key_hash_v0( + pub(super) fn verify_identity_id_by_unique_public_key_hash_v0( proof: &[u8], is_proof_subset: bool, public_key_hash: [u8; 20], diff --git a/packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/mod.rs similarity index 87% rename from packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/mod.rs rename to packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/mod.rs index 20a9d98c3d8..6d1f70f8a04 100644 --- a/packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/mod.rs @@ -36,7 +36,7 @@ impl Drive { /// - An unknown or unsupported platform version is provided. /// - Any other error as documented in the specific versioned function. /// - pub fn verify_identity_ids_by_public_key_hashes< + pub fn verify_identity_ids_by_unique_public_key_hashes< T: FromIterator<([u8; 20], Option<[u8; 32]>)>, >( proof: &[u8], @@ -49,16 +49,16 @@ impl Drive { .methods .verify .identity - .verify_identity_ids_by_public_key_hashes + .verify_identity_ids_by_unique_public_key_hashes { - 0 => Self::verify_identity_ids_by_public_key_hashes_v0( + 0 => Self::verify_identity_ids_by_unique_public_key_hashes_v0( proof, is_proof_subset, public_key_hashes, platform_version, ), version => Err(Error::Drive(DriveError::UnknownVersionMismatch { - method: "verify_identity_ids_by_public_key_hashes".to_string(), + method: "verify_identity_ids_by_unique_public_key_hashes".to_string(), known_versions: vec![0], received: version, })), diff --git a/packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/v0/mod.rs similarity index 98% rename from packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/v0/mod.rs rename to packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/v0/mod.rs index 6d1c6dc7d85..90beab8eaf3 100644 --- a/packages/rs-drive/src/verify/identity/verify_identity_ids_by_public_key_hashes/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_identity_ids_by_unique_public_key_hashes/v0/mod.rs @@ -37,7 +37,7 @@ impl Drive { /// - The number of proved key values does not match the number of public key hashes provided. /// - The value size of the identity ID is incorrect. /// - pub(crate) fn verify_identity_ids_by_public_key_hashes_v0< + pub(crate) fn verify_identity_ids_by_unique_public_key_hashes_v0< T: FromIterator<([u8; 20], Option<[u8; 32]>)>, >( proof: &[u8], diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs index 7a5d65bd3ea..ac3412fd85e 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs @@ -52,7 +52,7 @@ pub struct DriveAbciQueryIdentityVersions { pub balance: FeatureVersionBounds, pub identities_balances: FeatureVersionBounds, pub balance_and_revision: FeatureVersionBounds, - pub identity_by_public_key_hash: FeatureVersionBounds, + pub identity_by_unique_public_key_hash: FeatureVersionBounds, } #[derive(Clone, Debug, Default)] diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs index 494dfa6f097..829c9a218d2 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs @@ -67,7 +67,7 @@ pub const DRIVE_ABCI_QUERY_VERSIONS_V1: DriveAbciQueryVersions = DriveAbciQueryV max_version: 0, default_current_version: 0, }, - identity_by_public_key_hash: FeatureVersionBounds { + identity_by_unique_public_key_hash: FeatureVersionBounds { min_version: 0, max_version: 0, default_current_version: 0, diff --git a/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/mod.rs b/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/mod.rs index 1a2526f4fe7..ae6d64f233f 100644 --- a/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/mod.rs +++ b/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/mod.rs @@ -95,6 +95,7 @@ pub struct DriveIdentityFetchPublicKeyHashesMethodVersions { pub has_non_unique_public_key_hash: FeatureVersion, pub has_non_unique_public_key_hash_already_for_identity: FeatureVersion, pub has_unique_public_key_hash: FeatureVersion, + pub fetch_full_identity_by_non_unique_public_key_hash: FeatureVersion, } #[derive(Clone, Debug, Default)] @@ -133,6 +134,7 @@ pub struct DriveIdentityProveMethodVersions { pub prove_full_identity_by_unique_public_key_hash: FeatureVersion, pub prove_identity_id_by_unique_public_key_hash: FeatureVersion, pub prove_identity_ids_by_unique_public_key_hashes: FeatureVersion, + pub prove_full_identity_by_non_unique_public_key_hash: FeatureVersion, } #[derive(Clone, Debug, Default)] diff --git a/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/v1.rs b/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/v1.rs index beb79c65c18..fab61b69d37 100644 --- a/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/v1.rs +++ b/packages/rs-platform-version/src/version/drive_versions/drive_identity_method_versions/v1.rs @@ -27,6 +27,7 @@ pub const DRIVE_IDENTITY_METHOD_VERSIONS_V1: DriveIdentityMethodVersions = has_non_unique_public_key_hash: 0, has_non_unique_public_key_hash_already_for_identity: 0, has_unique_public_key_hash: 0, + fetch_full_identity_by_non_unique_public_key_hash: 0, }, attributes: DriveIdentityFetchAttributesMethodVersions { revision: 0, @@ -58,6 +59,7 @@ pub const DRIVE_IDENTITY_METHOD_VERSIONS_V1: DriveIdentityMethodVersions = prove_full_identity_by_unique_public_key_hash: 0, prove_identity_id_by_unique_public_key_hash: 0, prove_identity_ids_by_unique_public_key_hashes: 0, + prove_full_identity_by_non_unique_public_key_hash: 0, }, keys: DriveIdentityKeysMethodVersions { fetch: DriveIdentityKeysFetchMethodVersions { diff --git a/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/mod.rs b/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/mod.rs index f8082f48198..90cd83d5291 100644 --- a/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/mod.rs +++ b/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/mod.rs @@ -35,13 +35,15 @@ pub struct DriveVerifyIdentityMethodVersions { pub verify_full_identity_by_public_key_hash: FeatureVersion, pub verify_identity_balance_for_identity_id: FeatureVersion, pub verify_identity_balances_for_identity_ids: FeatureVersion, - pub verify_identity_id_by_public_key_hash: FeatureVersion, - pub verify_identity_ids_by_public_key_hashes: FeatureVersion, + pub verify_identity_id_by_unique_public_key_hash: FeatureVersion, + pub verify_identity_ids_by_unique_public_key_hashes: FeatureVersion, pub verify_identity_keys_by_identity_id: FeatureVersion, pub verify_identity_nonce: FeatureVersion, pub verify_identity_contract_nonce: FeatureVersion, pub verify_identities_contract_keys: FeatureVersion, pub verify_identity_revision_for_identity_id: FeatureVersion, + pub verify_full_identity_by_non_unique_public_key_hash: FeatureVersion, + pub verify_identity_id_by_non_unique_public_key_hash: FeatureVersion, } #[derive(Clone, Debug, Default)] diff --git a/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/v1.rs b/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/v1.rs index 666ab416ca7..a848e92c7dd 100644 --- a/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/v1.rs +++ b/packages/rs-platform-version/src/version/drive_versions/drive_verify_method_versions/v1.rs @@ -21,13 +21,15 @@ pub const DRIVE_VERIFY_METHOD_VERSIONS_V1: DriveVerifyMethodVersions = DriveVeri verify_full_identity_by_public_key_hash: 0, verify_identity_balance_for_identity_id: 0, verify_identity_balances_for_identity_ids: 0, - verify_identity_id_by_public_key_hash: 0, - verify_identity_ids_by_public_key_hashes: 0, + verify_identity_id_by_unique_public_key_hash: 0, + verify_identity_ids_by_unique_public_key_hashes: 0, verify_identity_keys_by_identity_id: 0, verify_identity_nonce: 0, verify_identity_contract_nonce: 0, verify_identities_contract_keys: 0, verify_identity_revision_for_identity_id: 0, + verify_full_identity_by_non_unique_public_key_hash: 0, + verify_identity_id_by_non_unique_public_key_hash: 0, }, group: DriveVerifyGroupMethodVersions { verify_group_info: 0, diff --git a/packages/rs-platform-version/src/version/mocks/v2_test.rs b/packages/rs-platform-version/src/version/mocks/v2_test.rs index 3b8d73f2735..f3e5c2c6624 100644 --- a/packages/rs-platform-version/src/version/mocks/v2_test.rs +++ b/packages/rs-platform-version/src/version/mocks/v2_test.rs @@ -200,7 +200,7 @@ pub const TEST_PLATFORM_V2: PlatformVersion = PlatformVersion { max_version: 0, default_current_version: 0, }, - identity_by_public_key_hash: FeatureVersionBounds { + identity_by_unique_public_key_hash: FeatureVersionBounds { min_version: 0, max_version: 0, default_current_version: 0, From 09afcb66df153d8865826765608c99a7a0a3d100 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Mon, 10 Mar 2025 12:48:33 +0100 Subject: [PATCH 04/21] feat(sdk): get identity by non-unique pubkey hashes chore: update to latest dash core 37 (#2483) feat(platform)!: token advanced distribution and updates (#2471) fix: token history contract (#2474) Co-authored-by: Ivan Shumkov Co-authored-by: QuantumExplorer fix(drive): using new rust dash core methods for reversed quorum hash to maintain backwards compatibility (#2489) feat: more granular integer document property types (#2455) Co-authored-by: Quantum Explorer docs: update comment for data contract code range (#2476) feat: validate token name localizations (#2468) feat(sdk): get identity by non-unique keys build(deps): update grovedb to current develop test: test identity by non-unique pubkey hashes fix(sdk): dash core client fails to get quorum chore: minor fixes test(drive-abci): identity by non-unique pubkey start after chore: minor changes to verify feat(sdk): token and group queries (#2449) chore: revert limit 1 => limit none chore: add non-unique key to test identities test(sdk): test vectors for test_fetch_identity_by_non_unique_public_keys fix(platform)!: token distribution fixes and tests (#2494) chore(platform): bump to version 2.0.0-dev.1 (#2495) test: update assertion fix(sdk): make some things public (#2496) feat(platform): require token for document actions (#2498) fix: data contract proof doesn't work with new auto fields (#2501) --- packages/dapi-grpc/build.rs | 6 +- packages/rs-dapi-client/src/transport/grpc.rs | 9 ++ .../create_genesis_state/test/tokens.rs | 8 +- .../mod.rs | 4 +- .../v0/mod.rs | 29 +++++- packages/rs-drive-proof-verifier/src/proof.rs | 98 +++++++++++++++++- packages/rs-drive-verify-c-binding/src/lib.rs | 13 ++- packages/rs-drive/Cargo.toml | 1 + .../src/drive/identity/fetch/queries/mod.rs | 9 +- packages/rs-drive/src/drive/mod.rs | 4 +- .../v0/mod.rs | 2 +- .../v0/mod.rs | 2 +- .../rs-sdk/src/platform/types/identity.rs | 34 +++++- packages/rs-sdk/tests/fetch/identity.rs | 65 +++++++++++- .../.gitkeep | 0 ...6e167d0327209295b4a98e14c6eb0d2b7e631.json | Bin 0 -> 72399 bytes ...5f90edeec5e32ae9d35ca2f654e5a8b47ef3d.json | Bin 0 -> 45144 bytes ...d441005468aa9ae346220fcc77830f30f06c7.json | Bin 0 -> 106415 bytes ...9355939311eca86a05923cfb53d11b746428a.json | Bin 0 -> 114110 bytes ...839f231c080d4ffacee80457ecc1db6a4bbbb.json | Bin 0 -> 104070 bytes ...4f02b4d71142b0d84600aa6fd73436efb869b.json | 1 + 21 files changed, 263 insertions(+), 22 deletions(-) create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/.gitkeep create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_28e9fda4d74589e1756f49026696e167d0327209295b4a98e14c6eb0d2b7e631.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_3606368b19c21647e14e80f609e5f90edeec5e32ae9d35ca2f654e5a8b47ef3d.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_3796e6d3ed7346055d82e39618ad441005468aa9ae346220fcc77830f30f06c7.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_5f9f951aa5d5af07c588813f31e9355939311eca86a05923cfb53d11b746428a.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_a6d61bfcc12549cc29bf4e9abe2839f231c080d4ffacee80457ecc1db6a4bbbb.json create mode 100644 packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/quorum_pubkey-106-15957be6ce59202ce3df8dbd0604f02b4d71142b0d84600aa6fd73436efb869b.json diff --git a/packages/dapi-grpc/build.rs b/packages/dapi-grpc/build.rs index 95c6a3682b6..4423d8f394c 100644 --- a/packages/dapi-grpc/build.rs +++ b/packages/dapi-grpc/build.rs @@ -63,7 +63,7 @@ fn configure_platform(mut platform: MappingConfig) -> MappingConfig { // Derive features for versioned messages // // "GetConsensusParamsRequest" is excluded as this message does not support proofs - const VERSIONED_REQUESTS: [&str; 40] = [ + const VERSIONED_REQUESTS: [&str; 41] = [ "GetDataContractHistoryRequest", "GetDataContractRequest", "GetDataContractsRequest", @@ -75,6 +75,7 @@ fn configure_platform(mut platform: MappingConfig) -> MappingConfig { "GetIdentityContractNonceRequest", "GetIdentityBalanceAndRevisionRequest", "GetIdentityBalanceRequest", + "GetIdentityByNonUniquePublicKeyHashRequest", "GetIdentityByPublicKeyHashRequest", "GetIdentityKeysRequest", "GetIdentityRequest", @@ -110,6 +111,9 @@ fn configure_platform(mut platform: MappingConfig) -> MappingConfig { // - "GetConsensusParamsResponse" // - "GetStatusResponse" // + // The following responses are excluded as they need custom proof handling: + // - "GetIdentityByNonUniquePublicKeyHashResponse" + // // "GetEvonodesProposedEpochBlocksResponse" is used for 2 Requests const VERSIONED_RESPONSES: [&str; 39] = [ "GetDataContractHistoryResponse", diff --git a/packages/rs-dapi-client/src/transport/grpc.rs b/packages/rs-dapi-client/src/transport/grpc.rs index d1337142bdc..57d8c28866c 100644 --- a/packages/rs-dapi-client/src/transport/grpc.rs +++ b/packages/rs-dapi-client/src/transport/grpc.rs @@ -490,6 +490,15 @@ impl_transport_request_grpc!( get_status ); +// rpc getIdentityByNonUniquePublicKeyHash(GetIdentityByNonUniquePublicKeyHashRequest) returns (GetIdentityByNonUniquePublicKeyHashResponse); +impl_transport_request_grpc!( + platform_proto::GetIdentityByNonUniquePublicKeyHashRequest, + platform_proto::GetIdentityByNonUniquePublicKeyHashResponse, + PlatformGrpcClient, + RequestSettings::default(), + get_identity_by_non_unique_public_key_hash +); + // rpc getIdentityTokenBalances(GetIdentityTokenBalancesRequest) returns (GetIdentityTokenBalancesResponse); impl_transport_request_grpc!( platform_proto::GetIdentityTokenBalancesRequest, diff --git a/packages/rs-drive-abci/src/execution/platform_events/initialization/create_genesis_state/test/tokens.rs b/packages/rs-drive-abci/src/execution/platform_events/initialization/create_genesis_state/test/tokens.rs index 28147c64bea..637638faddc 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/initialization/create_genesis_state/test/tokens.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/initialization/create_genesis_state/test/tokens.rs @@ -110,6 +110,10 @@ impl Platform { transaction: TransactionArg, platform_version: &PlatformVersion, ) -> Result<(), Error> { + let mut rng = StdRng::seed_from_u64(0u64); + let non_unique_key = + IdentityPublicKey::random_voting_key_with_rng(11, &mut rng, platform_version)?; + for id in [IDENTITY_ID_1, IDENTITY_ID_2, IDENTITY_ID_3] { // Create identity without keys let mut identity = Identity::create_basic_identity(id, platform_version)?; @@ -117,7 +121,9 @@ impl Platform { // Generate keys let seed = id.to_buffer()[0]; let mut rng = StdRng::seed_from_u64(seed as u64); - let keys = IdentityPublicKey::main_keys_with_random_authentication_keys_with_private_keys_with_rng(3, &mut rng, platform_version)?; + let mut keys = IdentityPublicKey::main_keys_with_random_authentication_keys_with_private_keys_with_rng(3, &mut rng, platform_version)?; + // every identity has the same non-unique key + keys.push(non_unique_key.clone()); for (key, private_key) in keys.iter() { let private_key = hex::encode(private_key); diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs index 33403fad6a5..9a1c7c104d5 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs @@ -7,7 +7,6 @@ use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_request: use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_response::Version as ResponseVersion; use dapi_grpc::platform::v0::{ GetIdentityByNonUniquePublicKeyHashRequest, GetIdentityByNonUniquePublicKeyHashResponse, - GetIdentityByPublicKeyHashResponse, }; use dpp::version::PlatformVersion; @@ -28,7 +27,8 @@ impl Platform { ), )); }; - + // TODO why `identity_by_unique_public_key_hash`? + // Shouldn't we rename or add new field like `identity_by_non_unique_public_key_hash`? let feature_version_bounds = &platform_version .drive_abci .query diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs index a9a3ac7dd42..f31423a2aea 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/v0/mod.rs @@ -38,7 +38,7 @@ impl Platform { ) .map(|bytes| bytes.0) .map_err(|_| QueryError::InvalidArgument( - "public key hash must be 20 bytes long".to_string() + "start_after must be 32 bytes long identity ID".to_string() )))) } else { None @@ -129,6 +129,33 @@ mod tests { )); } + #[test] + fn test_invalid_start_after() { + let (platform, state, version) = setup_platform(None, Network::Testnet, None); + + let negative_tests: Vec<&[u8]> = vec![&[0u8; 4], &[0u8; 20], &[0u8; 64]]; + + for test in negative_tests { + let request = GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: vec![0; 20], + start_after: Some(test.to_vec()), + prove: false, + }; + + let result = platform + .query_identity_by_non_unique_public_key_hash_v0(request, &state, version) + .expect("expected query to succeed"); + + assert!( + matches!( + result.errors.as_slice(), + [QueryError::InvalidArgument(msg)] if msg == &"start_after must be 32 bytes long identity ID".to_string()), + "errors: {:?}", + result.errors, + ); + } + } + #[test] fn test_identity_not_found() { let (platform, state, version) = setup_platform(None, Network::Testnet, None); diff --git a/packages/rs-drive-proof-verifier/src/proof.rs b/packages/rs-drive-proof-verifier/src/proof.rs index 6be02632ec1..a89f747e3af 100644 --- a/packages/rs-drive-proof-verifier/src/proof.rs +++ b/packages/rs-drive-proof-verifier/src/proof.rs @@ -15,7 +15,10 @@ use dapi_grpc::platform::v0::get_protocol_version_upgrade_vote_status_request::{ self, GetProtocolVersionUpgradeVoteStatusRequestV0, }; use dapi_grpc::platform::v0::security_level_map::KeyKindRequestType as GrpcKeyKind; -use dapi_grpc::platform::v0::{get_contested_resource_identity_votes_request, get_data_contract_history_request, get_data_contract_request, get_data_contracts_request, get_epochs_info_request, get_evonodes_proposed_epoch_blocks_by_ids_request, get_evonodes_proposed_epoch_blocks_by_range_request, get_group_actions_request, get_group_info_request, get_group_infos_request, get_identities_balances_request, get_identities_contract_keys_request, get_identity_balance_and_revision_request, get_identity_balance_request, get_identity_by_public_key_hash_request, get_identity_contract_nonce_request, get_identity_keys_request, get_identity_nonce_request, get_identity_request, get_path_elements_request, get_prefunded_specialized_balance_request, GetContestedResourceVotersForIdentityRequest, GetContestedResourceVotersForIdentityResponse, GetGroupActionSignersRequest, GetGroupActionSignersResponse, GetGroupActionsRequest, GetGroupActionsResponse, GetGroupInfoRequest, GetGroupInfoResponse, GetGroupInfosRequest, GetGroupInfosResponse, GetPathElementsRequest, GetPathElementsResponse, GetProtocolVersionUpgradeStateRequest, GetProtocolVersionUpgradeStateResponse, GetProtocolVersionUpgradeVoteStatusRequest, GetProtocolVersionUpgradeVoteStatusResponse, Proof, ResponseMetadata}; +use dapi_grpc::platform::v0::{ + get_contested_resource_identity_votes_request, get_data_contract_history_request, get_data_contract_request, get_data_contracts_request, get_epochs_info_request, get_evonodes_proposed_epoch_blocks_by_ids_request, get_evonodes_proposed_epoch_blocks_by_range_request, get_identities_balances_request, get_identities_contract_keys_request, get_identity_balance_and_revision_request, get_identity_balance_request, get_identity_by_non_unique_public_key_hash_request, + get_identity_by_public_key_hash_request, get_identity_contract_nonce_request, get_identity_keys_request, get_identity_nonce_request, get_identity_request, get_path_elements_request, get_prefunded_specialized_balance_request, GetContestedResourceVotersForIdentityRequest, GetContestedResourceVotersForIdentityResponse, GetPathElementsRequest, GetPathElementsResponse, GetProtocolVersionUpgradeStateRequest, GetProtocolVersionUpgradeStateResponse, GetProtocolVersionUpgradeVoteStatusRequest, GetProtocolVersionUpgradeVoteStatusResponse, Proof, ResponseMetadata +}; use dapi_grpc::platform::{ v0::{self as platform, key_request_type, KeyRequestType as GrpcKeyType}, VersionedGrpcResponse, @@ -36,6 +39,7 @@ use dpp::state_transition::proof_result::StateTransitionProofResult; use dpp::state_transition::StateTransition; use dpp::version::PlatformVersion; use dpp::voting::votes::Vote; +use drive::drive::identity::identity_and_non_unique_public_key_hash_double_proof::IdentityAndNonUniquePublicKeyHashDoubleProof; use drive::drive::identity::key::fetch::{ IdentityKeysRequest, KeyKindRequestType, KeyRequestType, PurposeU8, SecurityLevelU8, }; @@ -339,6 +343,98 @@ impl FromProof for Identity { } } +impl FromProof for Identity { + type Request = platform::GetIdentityByNonUniquePublicKeyHashRequest; + type Response = platform::GetIdentityByNonUniquePublicKeyHashResponse; + fn maybe_from_proof_with_metadata<'a, I: Into, O: Into>( + request: I, + response: O, + _network: Network, + platform_version: &PlatformVersion, + provider: &'a dyn ContextProvider, + ) -> Result<(Option, ResponseMetadata, Proof), Error> + where + Self: Sized + 'a, + { + let request = request.into(); + let response = response.into(); + // Parse response to read proof and metadata + // note that proof in this case is different + // let proof = response.proof().or(Err(Error::NoProofInResult))?; + use platform::get_identity_by_non_unique_public_key_hash_response::{ + get_identity_by_non_unique_public_key_hash_response_v0::Result as V0Result, Version::V0, + }; + + let (proved_response, mtd) = match response.version { + Some(V0(v0)) => { + let proof = if let V0Result::Proof(p) = v0.result.ok_or(Error::NoProofInResult)? { + p + } else { + return Err(Error::NoProofInResult); + }; + + (proof, v0.metadata.ok_or(Error::EmptyResponseMetadata)?) + } + _ => return Err(Error::EmptyResponseMetadata), + }; + + // let mtd = response.metadata().or(Err(Error::EmptyResponseMetadata))?; + + let (public_key_hash, after_identity) = match request.version.ok_or(Error::EmptyVersion)? { + get_identity_by_non_unique_public_key_hash_request::Version::V0(v0) => { + let public_key_hash = + v0.public_key_hash + .try_into() + .map_err(|_| Error::RequestError { + error: "Invalid public key hash length".to_string(), + })?; + + let after = v0 + .start_after + .map(|a| { + a.try_into().map_err(|_| Error::RequestError { + error: "Invalid start_after length".to_string(), + }) + }) + .transpose()?; + (public_key_hash, after) + } + }; + + // we need to convert some data to handle non-default proof structure for this response + let proof = proved_response + .grovedb_identity_public_key_hash_proof + .ok_or(Error::NoProofInResult)?; + + let proof_tuple = IdentityAndNonUniquePublicKeyHashDoubleProof { + identity_proof: proved_response.identity_proof_bytes, + identity_id_public_key_hash_proof: proof.grovedb_proof.clone(), + }; + + // Extract content from proof and verify Drive/GroveDB proofs + let (root_hash, maybe_identity) = + Drive::verify_full_identity_by_non_unique_public_key_hash( + &proof_tuple, + public_key_hash, + after_identity, + platform_version, + ) + .map_err(|e| match e { + drive::error::Error::GroveDB(e) => Error::GroveDBError { + proof_bytes: proof.grovedb_proof.clone(), + height: mtd.height, + time_ms: mtd.time_ms, + error: e.to_string(), + }, + _ => e.into(), + })?; + + verify_tenderdash_proof(&proof, &mtd, &root_hash, provider)?; + + Ok((maybe_identity, mtd.clone(), proof)) + } +} + impl FromProof for IdentityPublicKeys { type Request = platform::GetIdentityKeysRequest; type Response = platform::GetIdentityKeysResponse; diff --git a/packages/rs-drive-verify-c-binding/src/lib.rs b/packages/rs-drive-verify-c-binding/src/lib.rs index 220bb70cc13..cb32ff6624b 100644 --- a/packages/rs-drive-verify-c-binding/src/lib.rs +++ b/packages/rs-drive-verify-c-binding/src/lib.rs @@ -100,7 +100,7 @@ pub unsafe extern "C" fn verify_full_identity_by_identity_id( } #[no_mangle] -pub unsafe extern "C" fn verify_identity_id_by_public_key_hash( +pub unsafe extern "C" fn verify_identity_id_by_unique_public_key_hash( proof_array: *const u8, proof_len: usize, is_proof_subset: bool, @@ -109,8 +109,11 @@ pub unsafe extern "C" fn verify_identity_id_by_public_key_hash( let proof = unsafe { slice::from_raw_parts(proof_array, proof_len) }; let public_key_hash = unsafe { std::ptr::read(public_key_hash) }; - let verification_result = - Drive::verify_identity_id_by_public_key_hash(proof, is_proof_subset, public_key_hash); + let verification_result = Drive::verify_identity_id_by_unique_public_key_hash( + proof, + is_proof_subset, + public_key_hash, + ); match verification_result { Ok((root_hash, maybe_identity_id)) => { @@ -680,13 +683,13 @@ mod tests { } #[test] - fn verify_identity_id_by_public_key_hash() { + fn verify_identity_id_by_unique_public_key_hash() { let proof = multiple_identity_proof(); let public_key_hash: PublicKeyHash = [ 31, 8, 21, 38, 154, 252, 1, 45, 228, 66, 96, 206, 178, 138, 68, 150, 211, 24, 65, 132, ]; let (_root_hash, maybe_identity_id) = - Drive::verify_identity_id_by_public_key_hash(proof, true, public_key_hash) + Drive::verify_identity_id_by_unique_public_key_hash(proof, true, public_key_hash) .expect("should verify"); let expected_identity_id: [u8; 32] = [ 15, 126, 159, 152, 150, 254, 206, 186, 180, 193, 157, 65, 233, 215, 241, 108, 23, 39, diff --git a/packages/rs-drive/Cargo.toml b/packages/rs-drive/Cargo.toml index b6908320de0..8af99c2a4b3 100644 --- a/packages/rs-drive/Cargo.toml +++ b/packages/rs-drive/Cargo.toml @@ -85,6 +85,7 @@ name = "benchmarks" harness = false [features] + default = ["full", "verify", "fixtures-and-mocks", "cbor_query"] grovedbg = ["grovedb/grovedbg"] fee-distribution = ["dpp/fee-distribution"] diff --git a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs index 6e1b1e4b65d..846a260eee1 100644 --- a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs @@ -1,9 +1,7 @@ use crate::drive::balances::balance_path_vec; use crate::drive::identity::key::fetch::IdentityKeysRequest; -use crate::drive::{ - identity_tree_path_vec, non_unique_key_hashes_tree_path, non_unique_key_hashes_tree_path_vec, - unique_key_hashes_tree_path_vec, Drive, -}; +use crate::drive::non_unique_key_hashes_tree_path_vec; +use crate::drive::{identity_tree_path_vec, unique_key_hashes_tree_path_vec, Drive}; use std::ops::RangeFull; use crate::error::Error; @@ -102,8 +100,9 @@ impl Drive { let non_unique_key_hashes = non_unique_key_hashes_tree_path_vec(); let mut query = Query::new_single_key(public_key_hash.to_vec()); let sub_query = if let Some(after) = after { - Query::new_single_query_item(QueryItem::RangeFrom(after.to_vec()..)) + Query::new_single_query_item(QueryItem::RangeAfter(after.to_vec()..)) } else { + // TODO: why not limit 1? Query::new_range_full() }; query.set_subquery(sub_query); diff --git a/packages/rs-drive/src/drive/mod.rs b/packages/rs-drive/src/drive/mod.rs index ce1fb9a74e5..9e11bddcd25 100644 --- a/packages/rs-drive/src/drive/mod.rs +++ b/packages/rs-drive/src/drive/mod.rs @@ -261,7 +261,7 @@ pub(crate) fn non_unique_key_hashes_tree_path() -> [&'static [u8]; 1] { } /// Returns the path to the masternode key hashes. -#[cfg(feature = "server")] +#[cfg(any(feature = "server", feature = "verify"))] pub(crate) fn non_unique_key_hashes_tree_path_vec() -> Vec> { vec![vec![ RootTree::NonUniquePublicKeyKeyHashesToIdentities as u8, @@ -278,7 +278,7 @@ pub(crate) fn non_unique_key_hashes_sub_tree_path(public_key_hash: &[u8]) -> [&[ } /// Returns the path to the masternode key hashes sub tree. -#[cfg(feature = "server")] +#[cfg(any(feature = "server", feature = "verify"))] pub(crate) fn non_unique_key_hashes_sub_tree_path_vec(public_key_hash: [u8; 20]) -> Vec> { vec![ vec![RootTree::NonUniquePublicKeyKeyHashesToIdentities as u8], diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs index ccb81c1d6fb..7f500fc7c8d 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -66,7 +66,7 @@ impl Drive { let Some(identity_proof) = &proof.identity_proof else { return Err(Error::Proof(ProofError::IncompleteProof("identity is not in proof even though identity id is set from non unique public key hash"))); }; - println!("hex {}", hex::encode(&identity_proof)); + Self::verify_full_identity_by_identity_id( identity_proof.as_slice(), false, diff --git a/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs index 88fe2894242..342df383e39 100644 --- a/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/verify/identity/verify_full_identity_by_unique_public_key_hash/v0/mod.rs @@ -18,7 +18,7 @@ impl Drive { /// The `public_key_hash` should contain the hash of the public key of the user. /// /// The function first verifies the identity ID associated with the given public key hash - /// by calling `verify_identity_id_by_public_key_hash()`. It then uses this identity ID to verify + /// by calling `verify_identity_id_by_unique_public_key_hash()`. It then uses this identity ID to verify /// the full identity by calling `verify_full_identity_by_identity_id()`. /// /// # Returns diff --git a/packages/rs-sdk/src/platform/types/identity.rs b/packages/rs-sdk/src/platform/types/identity.rs index 4b7b7754d0f..963dcaa45f9 100644 --- a/packages/rs-sdk/src/platform/types/identity.rs +++ b/packages/rs-sdk/src/platform/types/identity.rs @@ -9,6 +9,7 @@ use crate::{ use dapi_grpc::platform::v0::get_identities_balances_request::GetIdentitiesBalancesRequestV0; use dapi_grpc::platform::v0::get_identity_balance_and_revision_request::GetIdentityBalanceAndRevisionRequestV0; use dapi_grpc::platform::v0::get_identity_balance_request::GetIdentityBalanceRequestV0; +use dapi_grpc::platform::v0::get_identity_by_non_unique_public_key_hash_request::GetIdentityByNonUniquePublicKeyHashRequestV0; use dapi_grpc::platform::v0::get_identity_by_public_key_hash_request::GetIdentityByPublicKeyHashRequestV0; use dapi_grpc::platform::v0::get_identity_contract_nonce_request::GetIdentityContractNonceRequestV0; use dapi_grpc::platform::v0::get_identity_nonce_request::GetIdentityNonceRequestV0; @@ -31,7 +32,8 @@ delegate_enum! { IdentityResponse, Identity, (GetIdentity,proto::GetIdentityRequest,proto::GetIdentityResponse), - (GetIdentityByPublicKeyHash, proto::GetIdentityByPublicKeyHashRequest, proto::GetIdentityByPublicKeyHashResponse) + (GetIdentityByPublicKeyHash, proto::GetIdentityByPublicKeyHashRequest, proto::GetIdentityByPublicKeyHashResponse), + (GetIdentityByNonUniquePublicKeyHash, proto::GetIdentityByNonUniquePublicKeyHashRequest, proto::GetIdentityByNonUniquePublicKeyHashResponse) } impl Query for dpp::prelude::Identifier { @@ -74,6 +76,36 @@ impl Query for PublicKeyHash { } } +/// Non-unique public key hash that can be used as a [Query] to find an identity. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct NonUniquePublicKeyHashQuery { + pub key_hash: [u8; 20], + pub after: Option<[u8; 32]>, +} + +impl Query for NonUniquePublicKeyHashQuery { + fn query(self, prove: bool) -> Result { + if !prove { + unimplemented!("queries without proofs are not supported yet"); + } + + let request = proto::GetIdentityByNonUniquePublicKeyHashRequest { + version: Some( + proto::get_identity_by_non_unique_public_key_hash_request::Version::V0( + GetIdentityByNonUniquePublicKeyHashRequestV0 { + public_key_hash: self.key_hash.to_vec(), + start_after: self.after.map(|a| a.to_vec()), + prove, + }, + ), + ), + } + .into(); + + Ok(request) + } +} + impl Query for dpp::prelude::Identifier { fn query(self, prove: bool) -> Result { if !prove { diff --git a/packages/rs-sdk/tests/fetch/identity.rs b/packages/rs-sdk/tests/fetch/identity.rs index ad5a458960f..dd564623f4a 100644 --- a/packages/rs-sdk/tests/fetch/identity.rs +++ b/packages/rs-sdk/tests/fetch/identity.rs @@ -1,4 +1,4 @@ -use dash_sdk::platform::types::identity::PublicKeyHash; +use dash_sdk::platform::types::identity::{NonUniquePublicKeyHashQuery, PublicKeyHash}; use dash_sdk::platform::{Fetch, FetchMany}; use dpp::identity::accessors::IdentityGettersV0; use dpp::identity::identity_public_key::accessors::v0::IdentityPublicKeyGettersV0; @@ -116,3 +116,66 @@ async fn test_identity_public_keys_all_read() { assert_eq!(id, pubkey.id()); } } + +/// Given some non-unique public key, when I fetch identity that uses this key, I get associated identities containing this key. +#[tokio::test(flavor = "multi_thread", worker_threads = 1)] +async fn test_fetch_identity_by_non_unique_public_keys() { + setup_logs(); + + let cfg = Config::new(); + let id: dpp::prelude::Identifier = cfg.existing_identity_id; + + let sdk = cfg + .setup_api("test_fetch_identity_by_non_unique_public_keys") + .await; + + // First, fetch an identity to get a non-unique public key + let identity = Identity::fetch(&sdk, id) + .await + .expect("fetch identity") + .expect("found identity"); + + let pubkeys: Vec<_> = identity + .public_keys() + .iter() + .filter(|public_key| !public_key.1.key_type().is_unique_key_type()) + .collect(); + + assert_ne!( + pubkeys.len(), + 0, + "identity must have at least one non-unique public key" + ); + + for non_unique_key in pubkeys.iter() { + let key_hash = non_unique_key.1.public_key_hash().expect("public key hash"); + let mut query = NonUniquePublicKeyHashQuery { + key_hash, + after: None, + }; + + // Now fetch identities by this non-unique public key hash + let mut count = 0; + while let Some(found) = Identity::fetch(&sdk, query) + .await + .expect("fetch identities by non-unique key hash") + { + count += 1; + tracing::debug!( + ?found, + ?key_hash, + ?count, + "fetched identities by non-unique public key hash" + ); + + query = NonUniquePublicKeyHashQuery { + key_hash, + after: Some(*found.id().as_bytes()), + }; + } + assert_eq!( + count, 3, + "expected exactly 3 identities with this non-unique public key" + ); + } +} diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/.gitkeep b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_28e9fda4d74589e1756f49026696e167d0327209295b4a98e14c6eb0d2b7e631.json b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_28e9fda4d74589e1756f49026696e167d0327209295b4a98e14c6eb0d2b7e631.json new file mode 100644 index 0000000000000000000000000000000000000000..fee35a717a9328a88eca8b555e2eeaf385b6de42 GIT binary patch literal 72399 zcmeI4(Qcd95rli~r!aiY3sMp-GcQo&st*w4F1j!ZB!%k)C=9=QzB%VerfRF^d}@fm zAwb8L?B(v6nVsG9Q|*TrFJHd;``!ERf4ckm{oD8ddG+1P4}6Pn{&x57-*12U`QaP+ z*7oQtzJB`?U;i-vc|u;k+-)vj{Mj&gVC2V7^7ZC$%OAJnH|plAli%ml zyHR+cNa5RHk%PSc^hSBd?PkB<>4bm|NKqncTx|n^g!0~h$wpzbxh@dB|7BmClOf|c9i*4{U3#-p|7L9laz22C_)gs zs8A##1Z|%x@5hIlT5QpXXN=~q1L^*q0YAi@XjDfBTy^#t6NtN>Xs|~a1IvlC;&BwY zu*Me8qyw2ga3OP|C4OG&SKa9-WOi=qWM;)7iXKl?pKHO=6LIErsdJ1(0}Eg(AIlj9 zGE8qrpVepT6fNJYB@?0V^wc)!bKHreC7trmOb2BVc(t%=H;fkDf{0$0qHzL6uSTOQ z5{dQ{we{}dK!|Aju3T#Gb(_Fvc#fp3pQI_0dtASq(Iz%<}9j6#r@bo0?o z$^IZ-tyN*cSp;?VjgWLa9PA}6z|6%{xHM5}my4uJ7mApN5w(F6Mj_?|_N+O|R<0hF z1ft2l3moGUOalu=3Xdg9wQ6n|J$E@BlJ3DIu$AT1v%n-psyo!wN%FR>r+Qq@M^4x4 zMBpD7ao1iX0bi}bSdfrba`l5ar$fnt>oLK_ESPu-B6K5Aq#z=s_X0yFCTh+sVGfWk zqO@Diz`o4`i-;nfT5O^*#)0a-s6{lp+HjJn@7egXXjmnc*{u>u3{WhkoQ@4sz{0go zG%QhSj)#Rl-BPB21z8HxpVL9AVcVS{fT-t1vkM#rnXo;Wt5gRIdUi$)?@nfPUw>1} z8Py^YhwuHC5Yc@ZCA~ic|2`@*!ygoPiRuFhiEX+-Sa8B-ehE_o9OkU*2v=u1x0hKU z1s2D?yH|pY%9v8YvuB@lqTgqcA~1D=Og}mk*Et%pbeOp9QHdmVs#|aZ8PNTlja0-fLt!#WH?2A>MbsHs6cJ%TjG&|^!4dtDZQivNIxHmik^E(CmpAA z-p-oZduB2!ak^afc;!%{ReR)}JkA6HbFCL4DiW87=z&0|Kh6r~+ys^LOBxJtY&!+}=@_>$aoLsa6U^@TTzN+X;1L_gb`~PY^C78tafkfri zJIo5~Nlc*IG)AfI%nF0Fe9uL|WW(&QFv-|Q1(SY{N-nj4C6LL|J7Gec%-rW(y~CTK z;6@1m_PRp<(Uo|jsG2ln2@I$B`_s8x0>%iKz807;2>Np^*Xr$VHJUJlK4j(U$8iFO zEgN^me-tc9+B;tAOC#I_kxM4)$*2feX}pYI3jwrB}+)>}4q$2S5xacI60|jsmz7 zdM2G)1E3rG5*by)rpR7ki4uO^hon_OvafIk)>i2#qu2Vuk#$IHHK2hY`@r}lV7j?a zy543dae})9D#OeH`yO{u;#>wpaOlOK5TFp66!K6N3Meiit(#pCyC7EmKMP$1a}mrp zA{g;Ntlv(>Xg*tuQJc6#i1NgmAs%*}CnPstvFNIsEQ#_2Ph9lWC1s`Scftg6RndEO zk7qLSd`3KlK=XhjmRJ^M;TR*G354_>mUJ{PVN+nz(}yyJIKv%9I+>ACoNmI>XIyyp z4VW|P$7{4wFKnZ+c?T4{<7R?|XFMWkm#*}H72;`WL5*NYso=AkymbOdw#YRhD$7 zRWWB8f^rEjvCV@ceByIQ1BrRJnk{+dsK(90>>`|(KG!lh<#|4i}T_BF#kH1NW z48&*pQiF`LwTWsgo7EIbsLAL`#WZ>@QYwLBSR?P=5@#E_KzKhJ^`Ss1EqFRuT2I42 zg}_w9dXcD&PPafP>%rF-XVGrSB`TalpXfP5vfR5M=#RU(>1q=Ri@SteK>kL4WVD~}7x7=j|5p}2Q|UYtFpW0W{#I`po&8Fd zbFsAahLAx0Se0G6-+2Oyy|wvLfz*Mc9_HkXd<53-TDgCTE5jgBq+47#65S?0kZGGD zb#j9ictT%)nFA25dI(haX@PZuJ%j=4*mh|(58 zph!1Jm&Qv*(L|!tm8BS>L8C8EJQ;9Ii4&b6M6|F#EN#d*6ZQ+I{SZt#RM-2AdsvaE zUU;%X08!&FnDqE3%J~vIk7|7RU{Dl^AjSLwe{%KG3X;T%jA}F_o$ffe6L`8lWd^Bd zvo0zW^s)?VY&#;5hq_|O(x<(^NF>x1owx;QVj}LR$Wo|F?~SNS2~5yEo9;YQ+qqEO zwHM?RiO^&Mm3^&_a|6ptC5(%{n~fkA0K5ordfSYNsTq`s&^K%_!TsC;*;RTH!#>LC zg&JfOq)FGCQld!bkfBtL+5Tt>qggQ+WcoTo{d|>mE@Z?3Cxb#W5YZzOSoEgqerSq} zLf8~3Fd1OB8Vh%{t>BK~B6cK-B%KKC0g0G}k|DZB!)+a=!hXFmR?ufL2wbwX`V@%K z;JsDG7JB0{dUj2Uji>>a1+2$VxUqqvFU*_;`xVPiUz-~%;zXB_cFbju?dPcpp-;!e zuPdB^eUULPx25+ciLVNi3?Nk4_n0i9DW~&%J|NPm)1^r9 z#Ed9sLA~7>4GZ){VZAE~BNfMYZ736onbNa!9^B59MI65m$tKfF+E_6H=o=vh-YX|j z;<=XI4H((|6*%V-fQ)5HU_!3!6#`5Y;ym1qi~)twOkn^*#$)8;Ot`v3@Y=({xs zq+xr3i8aZDP@XV7rI5O#`~DILNcR?-5;{N6iN2vMf>05X8P`i%gv3Bx$1ynY5bZzW zbYoO8RqM7@d(=e6a7UdgW1{-;F_#nJ=jnYOuc=Ss;!!SeByAatMB%*`+d5;vE+!)f z>@&_%9gmuhBz0#L&OCCNHl#0vyfF$f+JwEZgNgEH6ZP9vn!WF!^j&|n$bxI zK3yD%)Ag97PIse+IMJ%Tu7nilp<2fiup0yb?JT%O554>Z+R>hHLZJi&ow3cn0Ud6w zL}iv9>ToKqgzk1TtOsC@8rTsl<_LI4)vfZlI~Bp1fx!ubUp893K@DngU1@oS$+FP3sdU; zC^kd6KlJhw=v_i;RbP1N2loZ!zw-ab)P?;&Bm4cbk=3;9eGzh=l_qmsATVzPV&70t zfSJy@xu|_Ifuhd27xhG^RLhbdvU^@BE4;dJR9#5Ma;^P639z{Nu5H>>SL@_*kiIRMVoDZx?~)9 z)}+rd)Hf{Y>Ri}tPj{Sw)AfM?L?Nlu;b5Z9q{EH`1--zMsE=9{^o@{o?wP0i{EMi5 zQCax+Qb`M_CqK!^L){+F1iDd9FEE#TF4h@)RMUMN&~0%J8H#l2RQF9T_-}24r0dNy z=@@)H5eZZuJBlLH@4Zn^Bwb(_?@7S00L5zT<|MKITl;`$0z!xI`d+hyc71^gtAfrl&WOCPb&1AvRCpQfasB9tN^rp8U z?MaNx`Z_aDMXu>xy`|)IZlP5&OF9M1tka0I^ar63LNsOKM2N~6tg!`-)|)wGtyc+t zSVsT7?*)Gzf%d+ajA~~N=?a<9Cv1TRcWVaS-NOQ&c?lx)MX3iH>H;>4d8J5+*axa_X^ zb<3|Ezw$MI-X1oGJ-^*-4*8q;$7|Ssv$?+79Qc>p{Ncn`CQE<03;XJGt^|%#VQ1x%@R}2WZEZOJ7fu$FDq7)00cPSW$XK%d5JV67?73zYx-gR?_?O6-#w2w$P zTA=PYbu?Guj7qDP8+F#{ULirU=Q>5YyCmvT&ty_3Nvo+boIiD1feB<{CKT!B2!`2j zI};X?o;d9K$(y;FUzxz$Ici_e0h~d+-EH*9P3>s5M=LPvsf+uHYZ*EcO=;z@4k+|a zMs;CLMgsd@2`um^76cd+MBpE;OGwunWOkpPg#a?CZ(Uri21%#PBd1efW|0)_z_jm& zc24J}4VZ``!9;WcvJ_F2$N23haER;E1xfWZH&K17t}f^qPoD}*P_h^$An9zuHjXIL zBMJCDCNo{TfhLM2RKel{ds)UzgeK>F>}NsJ^_UQ1igY8Dqm9!U#>LTC6-0NBB88YU z0z^9eyytqgfbbJe*Kbfs_l&t?WZ4%PX$g_Q8phRoSLhxKLn~%CJ2k^64>t>DEyC)F>YF{hyW6WXC3h=?ZUG1t64(%4WC@nOyh&hj!ZrbO_}`S{SF8IcsoBO z%_CeM-_B1-|1Teya&zv@I#BX*I%6a6H^M2G4#n}=K>~Ts<*osvo$9>@mlu$t6fufa z@4;Pq&$&ED>*bN05rw<V}1ZcbIDhLd>Wf3ERoyc+k!|;FaIq$rZcuP^C zpA)zRLV%9UkKxRmIdf*-OUC;6;_B-9kNdkn|7-u|?)BY=uRr|d?VG>9dHwVI{omgI zb>^82#=w%OYL z>T0nW4(_cMhxewN)$HziJ->T%cz15O+#WtRt!9>sX*YXzJBwjjj)%{#XE7|snbT=G z9lmy2&0aihW>IWruf3VwTTFAyX?#HCyGIY#@9tjR-o1SF@4Nl2Kt9BA`S_}ecen5N{RN-ePyhGRi%&29@Z#h3Uw^p%>f33t9HwztF4}E1Y$x5- zpJ_8}{mj_!>Zcx>#&!K%Ov6TZ^<#UZM{F<-H-6Fv{wd>W+l+5)yB&6Vp@{8gce`P$ z*VvEWVKMCV%E^zchUIEl-#D0gWcTq+li$MrR^7zT%Kx*bhi|ZwgXi4TYacAeU3a-@ zfeMorZ@YK)`#ipz0(SyNX*9%6Wb+u1-*FryLu;9=26g*meGUUV{QJs~77X?7y9Tn~ z+o86CC~AAGa{1+BmB%%1&-_YUtDPg#P3WB&R2534RDqhugYqiJbKm8jc)boSSf{(5 zf#&!cX#UIQ{r; z)n8i49$RJ-FrAS`d|u{O{}%0fX<{|=`+h&#!iH6^y3g~xH>tOEJQf_krQ_-vVP;pMTC<(jvQ|r1`@dzXVcGjcQq}HOPj$I^ zu)KF2WN~IL&w;kH`;*n}Dt}apb36LT&_cCzSDnT1BXewcZciO~w0xN?pAQkuQNjZ~ zo+-1(IjSf$&2v@x$SE(S=0K9^nG^CTV9&BMN6?Y2lJz{HAlqD8tj3{Kv^nKfl=b9U z%EK1sG;4<~+|C@PPnJwu0A-6^PXSR?FUl4te#6V%Bd79Z^V<3_SKI!@IOBTJ$lV~= zw#Y_HX1}^oS;K~+D3=9w2ky;47Sz$$uR!iAKO$%EELvrm`<6(G%&Ck!cPSEOxXpz; z6>~F<+IH6zcAHC5Fjq@XlR1)zlKf&>sFG2_V5XVm)TxUlbBeZY#k8cwL>212GfiE_ zq(-erR4fA%G!vjSc$oE$2<|o64%L|7B+P-1v=&d&&?T%o6DU@3igXhB9Co?Foqi_ znPUYOPm<9S*us1%ucA3sAY*RGkRF<>y$PR#E0*GwsTGtM4`_EWoj5(_WSLY`P|O7v z&sxO_vUcLsT!uL10z5LOVcWGp3mVh221`n?*z{siS+IH7BVu^qy~mHXvNUpW6%!^% z**u}D;{Kv6wNsTFk9hF}pH^f`yCaG@z{lZ@Vu zJlmh$BUp=)4JQy`15RDbHd1e0aiV}1!yNzvyoiczuoA}%kNirDXvz*-lZ$GR&6DGUs6r$6+(}j=6*@7eG8jQEe$v+PIu|sx zu)-8gE2ZlRbh)x&JUDR4sDh-{PX zNRd<5o#7ZX(Hm1G?@intG-ku4Vo{1#XC=k*%ngH2KvQ=lLqu#zh^p?U#~&9?Rm0t# zT#ZrKw7aeUG-g+d+M1|}IjFU5sksa-`fc=VH z5y3Qrk}6YMqn(V-WFZ$h@yV;HfoBjq1gWQ}m>97{$J83MST<~ez_NCC=!VCKBFFVg z5DWHs!zHRDT9nM4q&ot?RcL-`0#7<=AZ|(JD|!dy zS~C09jj{zB`V>qVp2z~6Q{lk`jdGmk#qiWWucXH>;9S6gB0zVr3*|19yHM^F2k;7^ zy~W2sx@Oy!ZcXZFH#OawY_5$WI2PGFIG&`z2WxiEo$h`vdUir_(P`oAJv3M81J-dF z36^}9EMt4MDhytSGTh^JI0_m~RB`Tff#*%C7Nc1U?M_r2Hbtzv4z8W5J9vXg(gW89 zjp|aDwE(wp9AHhuh?OQ_hMOvspFCMf87^^fcr>-bFMwSx!yf%pqNi5MdffaJ?1742 zF2k~apE=R+5@CGl{&#vARy@~J<%#l_Y*G|;n71M zhtY$Nz2id5HXL3Fw`s~2qT~ZaWlgDKw1pTwcetJFhW0BbuE_CvrebB0C+3l@+LMY> zfrbeqz_DvDmK1OqnrWFR;knbj)3KFo)VNYEv`%@rI|MjJTf>VUFfl`%Z*m1Dyviza zQhGS@VC3060KA*y#-v%hSeRfUXSid?9Jhw3Lg6a3v1^-G?#aZYn?^Eid*pOcG2qD+ zPVEUaYZptdY2;2k(4{g@g7@@E)R?x3BbQ$ayThhSj(a5OP+DvI4F-XELJu!STMKPs z9>|)}Zf(M@ly;IMP2(g9_J2OP@W{Iup1!PlDaSKcm@nX5z+tj{Va|m)j5!|Y*~+bb zJK9>=F46WkzOvRK1z%0YhQZ$G^@(oyJ`+(}GbAW+nd3^?Jh+_LgU9s}=JZ%3*Q_Od zjVL)C1RrhKb#j!GYkVRLt}?mG+PX2|?GVamsE%WJ=a(%$@~bZro~Ws_qc^U#>lG-W z3mtT&^#orFEviu5oHeyIXtcQ!xIXI(8#|yLS^y^hf_MMYd;fxW-}kR&*Dt=;@1P%N zzmomoH^{SJhTgA!pq0b;OCIScvAM{Dg`#XJ?VWh=zmWS9A5B~-W6m4!uvKZxxL85G ziY@vt+W~BzU{2>kgm&;jj|+`kLxYS~*##2R*mY|wR^lY(B<8Z^Evb!#vdd^w1#{TJ z&fs3o&Jhj(Ot*F|)RAWt#kf`CMjlVS;6PEJ3AZn@MR;BCA8!-mu=7H(sb#`T#W)S& z`cNFiSh^J929bP180?-QiF)F=d)gevs$=iT!BiTr*2n9QDxRG1==$TB<(f*hw+~t` zyKZvJBqv#X>^hJuyAA;NicODqnh$slan{LfOg3~OMkkvGGPbSFi*6GxG<9vXRV>OA z+fqU3orTv`sR2ez`O+oAXeUBMp?1K^mxi69tuke zbu`d4OEFes)hlrwG`NYXS|>cuu7jrD75x7QIxmM*oz&nFVYH;-VI*5BEG5XvEaQ@x zoXtfN43~OS=Csg8OrxJFgUv;6YVJ9ry_$|8dk+ryHV*_@=6(l?ymJo=&oRMX(>%y z=KY4gT40(ffMf!;v+EbxR8D_E(O)rGc!^nP$vF6mfuU(%xjQqyY4KS|81hM1kh`jDNsP=_QzeIn=g|{GYNiAd>_?h+N-c|49?_c&+N;}!<|DR7!IqqjtJo)q! OPs|?bcl6`CPyYc(FHNuj literal 0 HcmV?d00001 diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_3796e6d3ed7346055d82e39618ad441005468aa9ae346220fcc77830f30f06c7.json b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_3796e6d3ed7346055d82e39618ad441005468aa9ae346220fcc77830f30f06c7.json new file mode 100644 index 0000000000000000000000000000000000000000..ac0b4a4182283cad0f1a0d444703a2dde6f1369c GIT binary patch literal 106415 zcmeI5O>dl6a)q<@uMoWEg3S+6oMkqXAV88$R)c}zBnrlufjzP10Sv?cJ?DAvl|@F9 z&GS@-HYN?UB{j+FsycP*RNdQc>HAM!y!iCDuiyORf4=_m%@=RJ`}^5KpPr`O;8!{^`p;nUx|_&z_HAO6ehufO@?t1o~20$;j(@GQ^&d{}<= z$JgI|_J?KryTcay7cZ_}UOaeqbM^4q>DA4nr?)r|Zjy-@Sbl!`0;@r>E=F!=F9fJo@3&%STbXeDt%g9zDA{J+?eu{)o!&-+B1- zn>U|-{pPdJ|Le``uLJT!93Q>f`N@kHzXA+CSi(Pl{ncMyr+9C^{`U3$fgg_F`SpiS ze)#0?KKcIBfBEC5zxmzi>iXhzd2xMpJZ>)TPI;RDPA@O+_|D~in&0!K)8*~@{pxh_ zGEei{-K%^BgUgFod=mqHkMZg5Fn)#Y-Nk)=FvRA&_xBff`5FB13Rf5R`NkC9%_7i=V#)&xhJALg5DE@T7^m}Q=r4+gYqWFr@qQt;_W&d!IJLnBhZNN0F6Jq zeEYb&h_>?eeC6&U-+DTvrxTG=9q1Ld?<{y47zyJkWW?>$YxseRQMGjFTf*iKN6Fr` z%qhTZMuPa`G9UFnqP@L_Scm!Bet*cJ64%xUn7s;hG~36u?5HKx{@*fnV7dDwQgu9CJ+OGIh7i8%nvN=Y0q{k=9 z?E5*UC^pTfs`7hIeJL{sCYe5QLcSL;v+Ri@=snvc%RI3lV;(I|<5((UZh1Az`ot{t zVFz=YHDL$0Cl1pOmMmLw#qT*F_Dy*TNx+!D3WD3=0VXOsR9GQqx{AyWPC9{OlOoQasDaDGpMH|~NZPH?)iuJykmM(Mh zGKhR9u3fMBWvdqND|eZAs&?EpNb|w6MA(BthIw?!iYir`vE3oIi)F)JSu;%>&zGWF zvf(`yx6=f!W<8!r#PBE=^Fi=z*V;S^W2*-26JgBqU?(_|uE+Nv=G`Wi0^BC%s7vLjv8#b;3+MqF+HCj>zi_I>Ul?BX)JrafwyzltcRyK`1T-Agb zqzq52s?cASr8rHw^@tBo^l2rwwmU{K53u66q3N;oWMEF#8tt4HtZQ^^FzYd&@MK1B zBA@Ng^oZ7?$%Zo!VF0(TV>?ps+&Edl7sCm_0ltW;c|N;p(^;hROH2J76Q zse_fK=(JL~Pq51shU3AZOSU@8Pt5eSEm?}?{bs4N^xk4(^@M!2?OF&2ku$mUnI)iU z+eAJXTZnKXw^YGAOw+=W_SA{TsTJWc4YIk7Za_2OO0gzrM~d~-OV(MgG-w&tx-EEIQ6fRZM z-95RDN!Yf#(SI8=m9n-FH8Brrjjc70Au>KOPrW8>GI*m6=qB4v%m}C|f9kQVP zhFv3qWd=>EY;8e%GCC&yu4(H<3*l^Q4sKSsu+0&EA|TN&JuFgg%a+w3x~WM#k=s)+=EH-=U|LZx8##z+ z+eJ28GW_OY*`f_O1zUzsWI@iU;lV_Wa-HVc@MC|yQoeo$=M0V%0lR~pDR-vanQ|ZF zfLXD1YjZVQL^p?OL_ zV4atd!IDqOW^7-rDud6VO!xRY91R*RR3Ue}!22dmi`lG$_9m(h+ak7I5AIIW9lk*# z=>yk*W_4-H+5orVIMAA=5nGy|84gt}KYg;1GF{@q;Sp*LKLB>VhJEj^68+dpxxNlR z1N%Tl&)2Z-?`NK9_!41#b^rhL8g{mqY7OTYVz(o$|7XLW(7iJ_XK>tFb!N_)IcMg4 zj03+&cd)xouB!4W0(&6UV?Bnf6E4G310|OT5f= zr7+K#ba3B7@Vd|gj%mO-*I1GOSGHw+_Z!!f3;(#bpN2}xqbd(R(u0`fVL29djcjda z(H9e*q%Q`iD%#4{8kP=}kj}EokL%$(I|p0b4nx)!bI-BC@!3bK(>xn~{08z|j*o55 z`T7~0GdNO~&zf`A9Q7`t-b3^>*B*ZX!(+{JEy<4ODiWvJ(CkvZS5tck+U9mK4lDFZ z&-e>y7B^N?du$}@!B$Mnxs9=);eE2=<{)De2lGU3$+pcc*;d81l5Fqx*bD4gx~%z- zZ(hw-*!JMOF!$0lyxR#;?a4QhPlDfUnKWr~A9M;#)8aOm2xyBXIRQ^%9vo_D&q3r| z1M0}f7gMsoY-=Rb>LV!kLb0>VocJf}^N-6P|71PS`G@B3Kk$#tUuPD-;-4&c|GfL8 z&7n~@nl7u#Jp?~1a*q!Jsry~+QBLqVGrIJp_gaxmJ)!NZpZZz zky{}Gv;l4_ab7`{o2(>2NuOzkH=>=tnwH~hyoad~miLh*7f6DN{CT@2R; zvI+MsmXu<+L1ZTAgF-l{oBrsYY2vzjjybGV2iQYl+kb$Ty4$^vitCaM-j5~bxVBQw z_F?OV>qQtPIg`Z)*8{o2^+4*l@zO1pEyk`PWIdUUlMOwH>COthwQX?DDq*(?3T<73 z_T*+XwpI{!XIY-01{$&Sg&urCdm==$megTS(N38xV#~E(*#`e_J+6o5mfnb%=JuNM zc4pz)(u_H)!=9`MN%_dy^F*hdbx1485w>m(CaiXSoJBlbgR?J{vO6R#Ii8iC-ZpGf z)&x7Y?h#3jrO4Hq`L0Fm;HMS@QcmPnPlo1{EDxJIk;mL_R@-|3bO(pH9?slhmmRf} z!A}%*VjHUK-E2v*R^!xb;(E~F5H+=)@O&@}{tvt-_+JP*mP4yfX>ew@CQOzzJ&a;& zg@t-eZWShyI60e#BpR;79CKS}5X(x%57{HFLf7xx67|MIH$e01S%eTINH1@eXfzvXA} zw*L5|H~Ddb+-uko0?>I(IGVHW;C=yH+hEH!JafAa{61RLnKOYj?BSOloP(}TzAI~& zGPX(CmK(G)NOa-{SH1UWv7JLCKA7BOZKwW(qJ(X`V^}F%+f(v|pCp%b6b8_+U??5==$wA3aZ^Zl^Lbpn`7z~k)tESt*5KcL7*46aUDDXy|zJotzK z(B%EVCqr|(s*vDy*II+uV|^kLzP2M`-61GG`*3Vo`LWAbcdULW__arPpGVQQz! zCw84A7+(UEc?)U#uH0fxf>;&`Fkh!wq+-yGSspsR@Nu zZwNDwQ%}ok612*LVY`$D%>LXa>2e}?6wH_9- z>v~0ArXeYJKbsVuIi?+4U7CNL6&Ee_sgXt0!2*#8@4v z8BCIwLQ<;blG%Qg1B`6Wat;6%+- zN4PlY+@5AY8d!SHP0P{6`r`unCQ8k2qj^9nn9J` zYNaMrqBQ|Mt$|5RuSpIQw>>J7Py%%d#xMg~^j+}m@0}Qzy-Kg5i}Ec=5pe>GDg;&N z6LhH&WKU_rb8{s6+&2O+s`ZaSDY8qDYTH?0icmMl`l92})x!C5ZJzS=?vRweb^y}P z$5_GVp3=BWc0Hvr%$nM3CWeZf&R0ENlay%H9(hNPX96K}ttTOKtv^J-hZs5rIyH_} zvJ)k9<38>p1-*dmP7!ft{t6DK5H7)_)}M4RS&#B$y=o^DbahYpL^}jbr~g`4Rr#Qq z9s#}n@77aM$!c3|%TA;M6rXEKJa1+sYb?snriYU#sr66|Sd zreub)z{IW`vT#8owG(`fp<4q|?#P$$s2Y|adxBvl1hzNX9oI*=3~Q-$B}=dMgM)QQ zY&DCJyYYkZx)rvMPETdZ1$*r)1z<-5Uk=+sVIS1s8W`aE$&%N*c{Y` z(}rgjFmZ%=785{I2LYylsw{h&SkCRzoRRAbol4UhfJgD79B5Vi=p01$^{+$nz2O1(!HClLxt ztGi`N5L!Bt1WemOq6%WIH6_aoazdov2@UwpV+tn*U#jNjZ0=5bLlz0uap9zJhjv9A zdRQ>jOP#9VR=>W3cF*<_ZQ4O&k}C9Up$b}6oQOpLU7Txy6o(E~adEm~X7#>y2lbdR zM|w`KinrO(0(tcAXn}sVZdy8mDV2^#%BPf*X#4VE6}BjFh_-Bmln*Oq=$WQOX@x2v zg`%mxt3}ocXBfrq$KTG;8HT8Rs37Am3Jz3TzCrH<>G*c2jq#XS){N(y zM=;$6ax_j$W;{l_1fMgx+}f#yJsw2`(+G;v5T;L1N@XbRW~*8qA(*PTU>C~J^O-|> zKT_B1XoCcKwGR`dNG-!uh@J){4c^ljSx{Cr_+ftEo@bL9=QGIS=z2E!D}R#b+{ouf zuKpGF=OGN^FKg+e(^l;eNm8|m)}N5SpW}O~ia1j*;m-Ao>QH*KzlZ>kK1ka6 znP<0qTPlTiaRwHl26np9p&^(MNe7EMp)VORU2t$rWQWHdlWiq>L20sEN15F{!B%Qw z;>1KCxmVx51nd~SS|&R-ZQ*XM==~sOU-9wF^xGkx?on$VRAoGX#qP@VV7v1xKo!WG(zO7hjjBNZtZ#OgAXa^l zZjW++Y?_I!AV{jaN8*fO56h}ueytbUXW0{kGMHSRCH+)K2#8qWa3r`V3i>R#nSzR3 zp2Y<162nfIWbWAUfszX}B0;sk4x+c_a=Yg*kNtLp$CRVrA27`Q zs%YmJsw+MqRij+=DOGUX*269yT@~CLXNS8$YMR2mYoj{CwLG;GBOuI737o?8Y$3v< ziA)JXGbb;R2$x`J*0W{xk!V|xvG1pFNOaZDp{6&a5WSVDf-77<6om8%1kCK{5nyTv zU3HGcG!%~hhP*sa7qH><`fN<>RE>r>Jr96_-T=~H;5a~tP=K%3p$sXZ#>XHwnDpAh z_S;(}DWt6L{XK&FZGrW^FC1~I{Sd=Ud@-%mxnRX<^e61fL+Lhk@5xCa1T3jS-{Zhy zIo9s6Bqh*@RQBxy6n)7+L(L8hC#PePU<5Ob&^IjQjzssny}{F!K#;%3K~NEsp1=Y( zNZ5C!aTFHycPzq@gU&R*U_#H*XRD5YIG}IagZmI@P zrRqQ`S~6B@o5EdMMznys`P|c|FW-(+$Zrx%EgVHN)BEb{JtsNVJ5^YHB-k3_E04bJ z&t!7&xlT%?BQVdVEmoAZclC5KtO##=!ZSFsc=U!aP%RQzSwox(flclD1d&!ZK8~Wd zG-jl_5fGtM1mg(r5{zLl=J;|KS&O25j?m0<+c6C*y;JE?o}t_`M|1?)1yA8C5eo4P zAbqnFWXLHLe(91RwiDi@)=w@d-D9#E!MJm~Pzu+d$S@IA6T+?z+Q?V`fsM1tuki1t zJD2iI%QG#nuP&bWIQPpl^oyC!ZpfBXOavxOSVz@5#J(qX%Pdj-vVkvstrSBM=Q=d? zg(g(BVXhoo!ef=S%EcLE#MYfnIEG>wj+Kiu$6n#i9Yzvlf8b3OL=*XntIK?*N$vqu z#eJx$oho|U2+2!03MNpiS0oJc5Mb(e*jDU>q6t^?1y=%I9MRl|y*V0aQh z97)8nS6vz`0ENI}7$NEj2J-#NfxGzX4WX8%WfFAkQsO|42~}LJtVZY!@feAAIfidy zvq}-2l&Pslf|1<5n=n*F1<$?^uv9H_xD(sap||ErId_cWB6{_pf`vl#*K^Qh3MM>!*pLCDr^z3I7+LY-KmwwUXc{&WiIrife2GIF~U~2iplP(7I(cbvOD@t ztW~h9?*fEKFu9*TGQ49-3Z4i~ViNp&K@e;Nb4Px+zl9rO1fwN+Hdh}uf|9jm7fA@bJ9C$jOIAkcFkcBk3TQkz#b{v>O z=z~&EGl^sf*6Spy)z6Mi%U;iE^TgSoa52;Z8+ZLewR7ytN2snBVkIb@M(8U@kV&nF vMOV;}Gk`db^^M@5c;e?D?yoO?_9stQw?FkmAN}?RzVYZw`$>NL{SW^G1qGz% literal 0 HcmV?d00001 diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_5f9f951aa5d5af07c588813f31e9355939311eca86a05923cfb53d11b746428a.json b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_5f9f951aa5d5af07c588813f31e9355939311eca86a05923cfb53d11b746428a.json new file mode 100644 index 0000000000000000000000000000000000000000..03a98a10115aaa66a917d100a2cceab02420d83a GIT binary patch literal 114110 zcmeI5&u^XQafQ3~UtxI71uuVyVwT-BMS-@Pu7bdD8x=7U*r_cCFbx0iJPvdBub zd7c3nqq)GfWl|i@%$YN1X1+^HKYa4(r=R`Po45b`hc{on{qpVi|M>mCe*M*deD&r3 zeD~(xzx&TWfB9ek^5*;Be(~)eKl{z6KjcgE#eaSC&9`5E{nbyO;8T|mKFY^`IV?Z_ z)0^)<|Kqa#-C>LUr=MQEx_I!>&DFz?POonseRzBO_`}x^KYVO?efRM9PB)J%uTJ-m ze)sNC45#bMhrfIKD2A)cM@~=Ir-xsAx_R`)r&o`nc=hOOUqAZj>h#$1bomo1zkl!H zvv1#i@y*-MzxeOBZ@vl0k8phdRRQ(>kCyN+-+cYoH!0rRZ@zo8zu?E?SN{Fu zCqI7j51;(-*}whiv)}ygbaj1ky1cl)I$k#ycc*-qe@?G1?)aU{{b7F1r%so*>({H( z#jAXnU+!M#GZBDbu69+%%W_|5X7MJ(i<*OsmU~KyhHhhWbCGU;U!RA8J^b$*AYCx5?|y%P%=aeLqacYI zkE>jK`Q<7PH9mgl7vj3PC!)N9-ZO(*g-R(?pu^*X@+QaUewTN|+jTgCCEeRcpb_5z z8vpU?-Rtfm+REqinY)Yp*7G4fors+3K(DZUXTgVokuaV^M%+HXh99XIRZEAyBW(WT zDB1g#IR%)_NDzNq=A-^cw71s~>o9-U?+-b2{P*eV_Lrgs5JCVHAb>@X>H&p6*18|K zB9KQ218ctOU7pYTChOZ04}!zDB(ALyFnbm1Xtt-d?5HKx{=YJHV7dDwQgwW|dTPtn zPnPdo53+b>EI09+7i8%nvN=Y0 zq{nBw#qT*HIbBXOsR9GQqx{AyWPC9{OlOoQasDaDGpMH|~NZPH?)iuJyk zmM(MhGKhR9u3fMBWvdqND|eZAs&+hUkmiGBiLeKQ4D;xc6;-M>W4l9a7t4mdvSyk% zo-akUWW#$ZZl?)c&3ZhMh~X(0^Fi=z*V;S^W2*-26JgBqU?(_|uE+Nv=G`Wic7MooxD+`zpdn61Wc;E4>t!x^3 zxT*;=NEx13RiVEuOL3ZV>k%KG=xZgmwmU{K53u6g(DYb(GB77=jdtz@>lz&!%zDfx zJekp($Y=XAJ)*T}vf&Iw7{IOT*pAdYPn;~^i{S*|0AEDaJfA&k(^;$p*GJ_^nwzi->8J&}bJml1u*P#Z?Aa?|%r>vNO*hI(D8n##%9)jSqraSV| z<0FyldQA`;?DN1Cs!X(KGIu83G4Q(z?H^6(Nf!;&Etz~n?*TczZCu->2!e&5p|=tu z%%l9etrGdfwJjC|%N{!cZIxStHpp66${A|$VJ_=QVW-=u2M2w``ec*cO=MHXJgbNm zy=`ts%qZ(jxFdv*J)8Ged_RihzeiVInp-^;NIJ^(d zQ~Ckxyo?N%d`dQB`)XAge2y~RZoZl9rs=Y5%po@AA^`w0ui z@bt*rVfOIByDoHWhr=rsw?~ss86ldyftaiw<0)kF`L#3-FBHEL3}z+k#Nd{Utv`}J7uY9>8tt|}6GY7l%EpuHd%m>%O4qn%5z_GZNnw)`dNkYDZClx$3wdZGK z7T5N9MN-yR$;#RTp2#OR(cmpVj&9po^u>fHX=*FK03LH&Ys?2qNTIFr<2tzMdw)^I z;zriic7Y}(Mc@0A(|h3b<S7vV1mt3etT349*#xv*tX-fxn0D&^wFj z2xZ&Mkg|s}=980Z+1e$a#0UTXdoD7aczRQB_y9gUOzcHQu?F>8Y#EVlg1NndM`))~ znxB*PZ+3T6DSek6ZRIo>Zd<>Tn3t_@$vmI2?4lhkLkN5DgoIw6JvAi&G2M=zk0o*| zM1VHHZ6(fk&_8!Hczn7*3vSiVtlBN&Hg5QbZQ}Cco`vGoS|&~;f4Ug14`dVWTP!KX zaD&K9&<{rkPuhkg+KKD#Ip(ld9lTE-!yh}NUw=|@U9!P*S-2Y4R;t{7Bg7~CbMy}p$9QL*%-*$c5PmEo1oCvH5^N^EKjhtg0MTw z@&q-|h$UY=_=5ICh%D5rC3VmHFL zGN_9+n9+-$S`bM2SYmZkPs}M)4GvLL>j}?i&q1f&6a3!@I+jCyfD8$fB~1^b*jiy(iaMEPUJ@s_c}Sw+ zN^i#878=Ad`t6E{dFXA;83fs@?HI!Q;K0XxAjmQ2J6Pn&eNeasU66l~2ll+xf7(yz zFXQs^{S$gq{F{q=0?$ABuJ`=t!rl5A0^StJ8v^{2ui<@x^`FD{19*yLM+iXYHQ{J3 znZfe~Y;A)r+wjcoI`I2wQD@Es(y(U?=)pPY>g2n!b}3_^0n6Bm7jMw ze%OJ}?TA=+2#TM5IJT^O*=2rm^7h!0j*t`jVSM|3?`nUrQtS{2YdF^rGhqU~`YlH| ziV3~c>lcPu*!wAkNo;$;b&`RvwFi{lzT@mg3d7V+l~3$CNieUlqL227=6*a~E_3cK>fo#H2P3Tf)u!bIpho!E}@ zwPzK1w9b_ej~V)|0sA<23HrM4^7V%C$g59q8}>!SFfFgZiEYa?kWud@#gJC&TONS@dSx3JIM7osr9gsUC$Hg$T2zK_gf+nW{K*lg|GfG z2r!1pqSnXGASMVNlU-kNf>cFk`uBxEzIuX1OpMi$n!zM_DI}#@E}88|Ij{(;V_?Z$ zJ>#G+Is{WY7)>M+%;@QR7JU{CtEd7XX#|I41hBue$S$X@fF)<0XxLcwVhb#Zvv-GK zm?>ZhEk)5M9VGbNM+hM5c@g;A5-?$Ps16o&Uz1?!?r4_o>mM0Wjx@XIz9@XJ5-}0o zhtl>DCN*J22?py5PRlKt64+NY1DMH;G>x+&@R}1IMwRy_dyF*g?+5t#EA7cfddrIRj+4YphFl%bBnHVZ^ zI$!m8O;Vy&d*mHGUI>KDwVs5`wf=~J4>5EMbZQ){WG71I#(msH3VH$Aog(7Q{1qHd zAzXq+bwE)a)nEeTZ$KJ}zpjyBPGLxlOLQEXZ@|Z&Ce6R3kc!kEjBLM7q z75>Akei?90x@auAh9LXw1dLEoI9h!zge!xfcehomm%H6)LJ)eB?W-Th7#^}Lc*1;C zu;8>81mes~)LlRRN}G!~GvkE1#;R9_jigX8yPc+kT+v~Rs9h?a@zK+aGrX_DZUOxIOvcc)Ko*bQ-EJI5EqyE}!Jd|8N@gevOzg@b3l}s}JHh7|x-}r> zj(iD^s$mJTCm2>jV0)9@aeai#u$D?!vh-R%I9P|oRhH|Z2d|};S?ZP#iLSPG|h$T7T_aP(YfT9cIepxGQHZHqC3e=Mq==# zYCRMX%&x)M_Ps-DN~PUGMb(ZR>xK=x7BkCE$6eoduIYAmy*q*oVVsWjG-IZVj*C@4 zdUk3e$X@goL(JR}Xjs*PjeWV4SY##PnN0u0^zWvQ>a-XNQmGz;NNV*zxB`v) za?@*3*WMj4#$&Gun-o{z|n>(M?AVWMAU`@*<9&U2(@hE(HW}J z?+S{ie2Z+(FgdDC0ERnoV(99aPVn2074C4(NRbsr&yN+Q@Hdwn3a!(&_VU4p&^ zaF;45WZ+mYWq9nn05FvnnJ(y)N=AZ~NlK_z00em@T$8;8zx^>qx%;t&uk|WDLS|9E zOml%}e7V$=I4?j%?_j7r!p|PexN||`uGdW5^^?w736f~=SA>FK%;H{Pao5*6fbiAxSYDA-llu%>XFV)-{y(GpR9+hG0{$i!41d3@o zyOv|FuzW@VEkoGeUS;s zCKh*MCCKO$`*8Ad>)+bj-KL}ty|skM9D}C|HIRzkIN+oleG$nqZ%p(|aAf-|xLXyVO1I1hIo;Z-q(jzCJ;k-MLHM`=ZAe z2YXL5X{9A1h9bzL=u(v8vBy(926y4LYXgN28odySA>t|}GI{|M(ZU8X{bWxlw&%kVbE3U&nj9Px`#&DjdLqGi6em@#o4$5C$le`>eH9>BdZBN243}-GEp%TWC)tH& zLR(VqsB|yX@I*lhmO#3`%- zXc_Lg<>ex1!(sj@Uu=cmhk0$u4>yr&O~Pu5Th&GBIRj$$G)0%6*3# zW$w`yUd|>p*#u{VNi@L;Y_BAS&!PpE_zPCY(Cs$8@*5~baeEUkGBH`4qkg8GW#k29 zggv&Wv@yx7SA1h@Qtvv^cRg2Q#ire3g1gviL(w2{H-NNd7iUY4=Uyp$oxw2KDQ%|^ z{gD+h6y;m|r26G!ocI#%NKIh^t3?%ZAUk*N`G#sA7Il$Y2&PJ)d4fAZ%fxx~LQ62H z5=!^W8}5`x+&Q)cncgs|nYss%UdNWCn1&U;dJ_{^FIht61k}iW-_FvJ7`uISUIw=p z@*>jjO{6mS)nt5^gBvLjq8fb@I7Ell%`V8mY*L!wp8nxW021U~kSTQUZUrey0Izbk zh@qB>3#$y!p2^9}Fj>EyB)Z1(pT|38U&H^NA&jb?kSVdKR~)hP^z^_myP3cw_-7{< zuqZHzQyqN)lUIK}pj~1oPrLPCl3m}V8F<8jK%(%W-}C4ZWqc3?uTiRmwb-9z7g0HRFHIQuP2LIrUa^Ya2EyZn+`)IER-*s zUhi=k3P>?LdbZsnb}f3&#Bc%IdPD$4_Z^T5WC$|cPPFArS%y;N6>Us*uKNTt-8Anp zH+6f|>7}vn zcREo`A0}*{-I|i7CD$_0D<^oSWdzMqg}&}2sGiO4INhL3E&5RLXdlwXw$%q7Eefk+ zt#aAnuCMzGnH+p>F*LCW%yW!~!=#_c zNv&4eaJa8Bu`Ozqmy0Xe#2qrL5~NRs%!=8n4wj9HpjO=VsztfOe?hQVX(d{T2O`L*rQ7KshgS{d!{YU2&KhPb>FH*{$0yp-1XeX-Q>NM-lGT- zB%J`N%@*)8%GER%g`)&%Ti5TRgM_?i!iqxo?It?(Y*Guwxi0O>cSh`t*!fRNf8L)H zew~HzMf$}I=wEAKCYQ`{yP}xIJJ&(Kl(O6CcZgyr)|+)%^o1tWfJtkXH`ulE?L}Z8IfoEOxP&&4K!^U0fYX{lm$yMIFOX17|v9z0b zjFzO_z!GR!UWo`j75lzkJk0oLd6nfvlx)*pXo6!%ta=U;qH^4^UocK~r(6n6D30ER+lqhM;Wq;M|Iz|t>5xXS<%RX7q9g+SZ$>O;k<=QFAG z<-;W8>}dv=A~h!|nXP@R0*DhLy#u7_Yzb;r3HW_>zEx9hF$)5S2=eGSha^F&63FSl zw7nPG1c^s+AO{@P4tR9k*9W<>Yboulgm%RdwCtHlgb@cfMBIT8D{r=U1=FyKX1G(ZtT@FiZcPM3Kwi+} zC|MgGIH;f8BStFuL)79S}R literal 0 HcmV?d00001 diff --git a/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_a6d61bfcc12549cc29bf4e9abe2839f231c080d4ffacee80457ecc1db6a4bbbb.json b/packages/rs-sdk/tests/vectors/test_fetch_identity_by_non_unique_public_keys/msg_IdentityRequest_a6d61bfcc12549cc29bf4e9abe2839f231c080d4ffacee80457ecc1db6a4bbbb.json new file mode 100644 index 0000000000000000000000000000000000000000..4af3de76387412e9987a01137e9938b648ed4dbe GIT binary patch literal 104070 zcmeI5O>dl6a)q<@uQ0skg3S+6oMkpi5Fp7WtHHo0OZ=#=s#B*<)xF&&fB59nPe1#IS8x95kFQ?8`SQ*8fB*fzeEs_0Uw`@E z-@W>`@BZVfFaPtOUw!|NUwr$g&wlsm5Bb)7^Iukzy9eHeCqPSNBQ_Khvnyg ze)aw5e_FP`KWwr8^wX;s7Y{zVxqA4~>E+F%4{vWDfB5p@hmS3U96; zt9OrLI9*>p{Oawa7_KfKIXzvU9)9oX=FvBwUObB8#iQ?i`RJpo(__oiH*Y@w;=kU!`X(U1{_d-<9(?gH-+cYoSNZgtZ@zoAt$#fJ=D$CF^5Z9e_sI{R z{p+7U``zzPSJxM(%Zux)<8gCwcglzPb9!-c$5$@*hxt37I$hqbzpqXgFY;mjx_g<= zU~qZyk}qPw-!VSj9mX%Qy}P*2HwH4kdVha$m+!$3&v12dpYJ^JnVXC2n~U3*1oJqu zxAC1$JO%!4wh7Lb|BsqJe1e-e_?VmZy+2u8-fx#LjzELS5%2NQlb`3^vn6m60HsSH zCXpBK0^%7jFGBF!nA}{1NAK$MFtCUJz5&t&!+Q4n17x0?P>+HnYCNuT@$HwZJkTZ>PuM|)1==Tf}s%yIHNeq9_TpK>Mr{433d_k5TBAa7`M|yms%)Xyviel4z zsw%(d)R!`IV3O$*C**qpGs~Vhg5I-Dvdj|;GUn0ZG>)Yr=9X8ZtWV5RA9gUeSrc|} zd*U$t*^*@oux!EgX+YG}%d&;UU;1+I$gO-~zP3Ki)nos1oS|MeavDV2F0$E@;WrN} zYuYdr^|Fxe(7id3g>SI;B`Kw`gMOD{H2SR!W>)4LeJ2y@i@WpTfaDXqOYM#%o+H}^dL1fA{S)dL{Y^8`fkBNW>ZC&Wt zS|wbGYle^fMvH99gq_J{wFvXc@gY>Pku!HDt5FI)F}E@}f;#-Pt-(4sXzF04DLSoG z?i1{Ch2eN`=#s6@@)I+CZA+G7dB0ieEWNjwSUn+MZMzo2LF7y>eP#)0+BT67#ug%+ z$SqYc57V@;q&;=wacV_4OoME0qZ`l+xKgYM+L2;C^^$d#D-BwPwQi8uPPU^&Ze4GN zYtTY(O_h0X>h7>H16PV=DY`mqQY>cfF!%{*>5j<|30o#aO?TVl4~0wBbazi~V-mLQ zZuH;AOr@+XL`}?tT4QU?V~C7T%u}yPn+#rP1G>q!6Z1jj=z~=!V2|qwXOg&WaBv%V zC-^jrgJ%pi9NN>5oa0kMgWr8R7^FgygoWleYFp~pue*Y%npHrVHZ zD^!_i(PZvSx?|vX7201-=t&n1)Ge8OL+=4Oy=`3Er3iwBpP{!BBFv-wx~&rV#I-FJ z1j`;f0d19AgEq)oSIQY`@nJ6ONnxkks0Rmq#QJ2D-A!au#yqQt6}@e4N6aYeOyof$ zfVs_@u!EZwE^Kpzp9n~_OAm{b+p=Xfh;C{UPvrI#jQQ}OF_>1=%SH}j+IEr6mJGjn zShi?GPQjMp6IqaRYIrbFqg4(QV=IJ~U712dwilGFb8{ z*^KS0Rb}uwl<6K{hoeEGg(~E37kJ;KX)&91(B4GVVOzwu>%rY=y2Cd}Bz@o-(5x

SB4^LR)nypSf@fKQc@D6X3yQ({*MpW~>FspgwayUJ zjSp1@yD^B_8xm^0a#t5DnCH}Nal>{PxqagnfKM)L2U(MHT-(UOe4tCqT$c>-EqO>Yhc0X(sHm=ro^_KEpG);3RUtvAVSz4UTRy=-f&m^DSW zxn|m+*1K4?uVJZup_8nG`CxiZrFd|!l@Xv5C*=$gOSWx3&_j?PtXl+c&2gO#)0z+Y zrU%dt+bSw0vn@7sz07suiF^|L=Bib;o%^6uV46jq+_mdyizPV$PhuXN-~fCOITKJv zo@L4Q7h!9T4HlK^g|mC1*x7>K{$}sv7wz&fzaBTgfS6xX{ODKX{`8xC-7lq`v^n^P zb(0ri(`&Y0=+oHRI&O`!+>1o$#OXD={{XmMy+AAv+cMkS)@!jv3kBvA%ypqBXvbFb zbFwcDem)xoI>v!JQbFv$Maq+ymo1U2<7aeWGQu=mggv+}pq(>(2klG%V!9n~^&xUA zMDfc*)9gr`XHexP>j=1&1+Xs7a8S>*Wp4Wq&{B81_fc_uz#81G5_4Q@&<3~BxL$-& zk~3L+a6OPKTo0r!BJEpj@U!m7dNLa)8+s7aofUd(+qHS=sW=E5v?n*Cv9*G*JIih{ zsDVZ-eW3?m(4GjdBTbeOvb=Z^j zASoYNd!Fc&2k$Q+=3}RFVPmUB>^O@!zq>EAjwNfmL*jZoD?Pn!*c4h5Y%#>@=OUON zI)fR#_^AaUwWi!wPlo1{EDtN-L>}|OQnvzd9g6L6<_^2;sGSUcqNo$wP<^0jNU>Jq z)NA58WV7ZFHMO4be0C2y^`78u?r9U!*XBvwsuQj!gvpYohf!?fUEN3k*W^}VB8ii; zc}Sw+O3X30g$A*Veya?dpVpi~kiFWDA-oR`_?Qm_Ip#crMV{OTgGc>d&B@9*=^yY(IdUKGd+0{`c4C&;~q9U%an$AqIf>)QQz zk<4Ixyq<-5?mF#Z8-5=x>f|`xmxJA*_gesI-$j~3fG zMB;EN5~lPmD_iFSNq0FF%c2gaIPO_!UTHt z#|YslCiGITUl?X#@2Ax6pI&gCWDw`t1D;Ij^}nF+ID3)8P~8>~YQO8miiYj++{pBX z8yUqJLahX2y%8-H?3V-T-uv3H^o4k4cf_gg`-wAPDjmaCAd^+tl`rlTKZ#RFQ_mJA zLNCEPI?(qQ2|CGAFSy|@br%UHH8r8I>J4G$aq4MVO@dZ= zFl_ZIVD{%;f{d;zt#jqWV}`zKz&_4hg1+v%e7#{j^5_%XhJ6t+Ov@{9V%steWYnuk zF{G9Hmd9gn{1fIPStp z0ZW1u8XGIss+=JhCdTSW&0vze6p~Ucm(2E~ z99V?aF|g#Wo^j9@9fGMHj3yEZX7uzui$05nRa60xG=f7i0@$Ck$Sy}+0ZWc`qG4mz zi!HDu&fXn{VWxm3v=l|3bdcb4A0dFK`y%kSC1AqpP#rAld`*I>yQ5jUufH;)9BFpZ zeNp&cC1N7F52fuROlrc45)9TAoR(WOC9rqk%vne5%r7BI0VisvI>NWek-^LJgihZ3wmR_Z6ftTr4%stMJ5K#zfEUL?{W<(+sNgRx35360Hg7X$?$j zdQEbexb0Dqgc7JzFoqe>qVIxdfA7S&>{WUdU6gN0iii_fR3WHBpP);PAbUy^o|_}l z=e`kuQLTRrN|9ZHRNKx1Q-r!X))yU*t`^RhYx9(^cZa0(wF8iTKE?_@_msw6vg;|0 zVb;`MGci=;biV5GnxsUl_Q*SWJQE0+Ydr~>YyBYtKE%*5(5Z2(lAS1-8~1S+Dd+`c zcZ!HJ^H*>VTbwf>}o$$FG0>s33MpsRbzC)y!kI{nwWs>%n=^a$wnf480@47R;t zta$Vavkm(qV(2!FBh+?gQNgr)Ukkv@hS?us@Yq{<8B_}xL1wb_N{ET0S*|IB&i4v$ zhF56ZI|9I-SK&Xr>X!l6q>IL)YY4K>PQVBig`?HiLbx&rdUsp3db!(;CIq23*}nR5 zjNu{6f+x&J1q)7lK_JdNMBVlESK3^}nHeY4HCDYcY$Sz(+3hqP~?U4NP@jbDD7rY5_J6@Je>?C)E=4Lw!NlN_nt7RAoI()>zjQO#ucJx3)!^? zL)Ujo`Uo%D-d9Z?u>*S#2oZ)_J(DqXEs({dcefkIQA;0|lVDFvGbJ;W1txaokcA5x zsh!|+4BZ-#a!0;|N7b+d*%J&aA+Wv4?zld}Wmrq4D_MH29~`VhVyjt%>8%JDGii4gyq-=NnPKaTSDtfQ( z@x){9XT&YUI&_OAHVbo4Fj_3Wgkg%_!-Bg!giM7nefh#nK~8FyARW!(QDnDZ={;^d z`vxROeZ7`e>WOVQw$6Z}?ol&Qg~1g;yL55;B=U0iWA1K8O#}!T-RsZ$yXksE;ftG@ zzGdWK4fvv2*1+no$Uq~uJ+)#bIuu{FYq0H+fna&`^AUGKVBLDNs?fV5$aoZ8pcX1x zwMP=Go|!u}QE}0#2SdyV5NHH>VU-9&53zS0hQ_1o(*gfQc92BPI1EFr9#0HYKDElm zooUsGvyD>-=0>J`!QBDEU=`YxO{4E1@`^j9u@tWNn5k;4T$@i0k4~Herdk_Z!e%Q zweuL4CQkM8VyGfjJ%s7^YAKZ&1a7vf)lr^g`}ToXQHGw+RIMMWt_48e9TpO#NG-!u z41Enql%c0FdZMh_zGekdOIPbiNZi@v4Dzq!Eu;N>Ka>AV{@*hBGnJla0@7&5+TZex z(b=zLMz!aJOsWis&}^ z1DUodh%+Z>foICsAEqcl&z2pe+IwYosrzx2(c%e`DnZVDeS!#@b9ZP(n;nJ7am*d! zgq4$I9tab{^IE@?uZ~8A6Kr75w9?rxr-YR(MpS0e8CNQagdC z>(k62bvNs*C_zt4s+Mg>80Jz}4B7N)Pp~)=?1Cq5L0XuI^J!=)*roTzuuBn4lzTPZ z^Gt20La1v`$VC#tNeq>I?T&K;%Tk4ki@sZpAQpgVQNZbC6A@E0h>6fQESN<7T!PtE z`XW;OFsCOf$S6p0*NYNXap#aps7bSZYf475svyYpaZ>vADC6A7hy+&!1!q!-9vNWK zo2q-$R6Gh|CrE+G0cOiFYL~VZwaai3J5qv-J7L%Z0+|JqQ|KNIw{@5Z`}M|1LGQ&N zaQ4#jQy`WG@1+u1=!MJh*)?foL=89=upUF9#s-Q$Fq2E zUI2dQH6&hekQW!)aSVD4M$W zWZOCR>mnYLVDE8->guS=fm4^GQ0BvzX~Xn|pm&5qj5Z-J*}+73tBLw;ie~RSh`#Gn zgC1GG`&x5^>D{47$*#wQINgapg%hpX^GZ;0CDkgPfZZSv(4GZ1_|Vf&pbhtg5(*|L z=!|Uk4d`%dDJs46P*YQJBy^{nC6!LQqL%%3iax~9jh^-`Uh$>o4nYRbK0CTTwxT6t zm1~N=?zl_K2%5P&_O$@?<=b%z`CYYe6wP=QTZg)6g3^6Ad3qspxv#~}u}78eOM-5TbI2q}=T3Fs zszv@=%V6B~;)%NqzP=C`s+S!_ko0>m)C);xNX7f2lUiRm;!NB2LXDv+TurU?c#l84 z>Is>QBox807h5n4yLx2+`JN%TQ}Pug-QjL0=-3sfeckaG!u0#VY9C>U$ILI6qZ;lM zvF~(>7_vE*s%`=qMO5%q6=W!gZqn;VAQkIJv_e>|9#pVU2qA{)zMf|I3eEaBF-#%U zOriVwP+^OJ#Zg-IESU7dB9a0(vgjcZXp{l_JTiUE4LQ3a)%| z(@26!767Lgy$xw!L}=E>$vhRlmUi`$LUwMURW!q$f@Q^N$XV)xU`R}K$wWpWl@nMa z3rVdP^Au}8O7N+2_}}Xu_~#0=*F8L{otdI5WGJ7Ig*3QZGt%8XEWwk9D1<&J^a<<78Z+3Wdk1b6V)0vmU3OHAwaaJa_Xy|z7ZT0 jPkgu7{q@B!e&^}x_G91l!7u;J7ao0Te~`ca@Z Date: Mon, 17 Mar 2025 15:51:42 +0100 Subject: [PATCH 05/21] test: initial version of framework and some first tests --- .../distribution_function/mod.rs | 2 + .../distribution/perpetual/block_based.rs | 763 ++++++++++++++++++ 2 files changed, 765 insertions(+) diff --git a/packages/rs-dpp/src/data_contract/associated_token/token_perpetual_distribution/distribution_function/mod.rs b/packages/rs-dpp/src/data_contract/associated_token/token_perpetual_distribution/distribution_function/mod.rs index 2397d93f18a..c04a5b3521d 100644 --- a/packages/rs-dpp/src/data_contract/associated_token/token_perpetual_distribution/distribution_function/mod.rs +++ b/packages/rs-dpp/src/data_contract/associated_token/token_perpetual_distribution/distribution_function/mod.rs @@ -85,6 +85,8 @@ pub enum DistributionFunction { /// f(x) = n * (1 - (decrease_per_interval_numerator / decrease_per_interval_denominator))^((x - s) / step_count) /// ``` /// + /// For `x <= s`, `f(x) = n` + /// /// # Parameters /// - `step_count`: The number of periods between each step. /// - `decrease_per_interval_numerator` and `decrease_per_interval_denominator`: Define the reduction factor per step. diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index 72d9ea6b700..6604c55d5db 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -18,6 +18,7 @@ mod perpetual_distribution_block { use dpp::data_contract::associated_token::token_perpetual_distribution::v0::TokenPerpetualDistributionV0; use crate::test::helpers::fast_forward_to_block::fast_forward_to_block; use super::*; + #[test] fn test_token_perpetual_distribution_block_claim_linear_and_claim_again() { let platform_version = PlatformVersion::latest(); @@ -494,3 +495,765 @@ mod perpetual_distribution_block { assert_eq!(token_balance, Some(200)); } } + +#[cfg(test)] +mod block_based_test_suite_tests { + use dpp::consensus::state::state_error::StateError; + use dpp::consensus::ConsensusError; + use dpp::data_contract::associated_token::token_configuration::accessors::v0::TokenConfigurationV0Getters; + use dpp::data_contract::associated_token::token_distribution_key::TokenDistributionType; + use dpp::data_contract::associated_token::token_distribution_rules::accessors::v0::TokenDistributionRulesV0Setters; + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_recipient::TokenDistributionRecipient; + use dpp::data_contract::associated_token::token_perpetual_distribution::reward_distribution_type::RewardDistributionType; + use dpp::data_contract::associated_token::token_perpetual_distribution::TokenPerpetualDistribution; + use dpp::data_contract::associated_token::token_perpetual_distribution::v0::TokenPerpetualDistributionV0; + use dpp::data_contract::TokenConfiguration; + use rust_decimal::prelude::ToPrimitive; + use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; + + use super::test_suite::*; + + // Given some token configuration, + // When a claim is made at block 42, + // Then the claim should be successful. + #[test] + + fn test_block_based_perpetual_fixed_amount_50() { + check_heights_odd_no_current_rewards( + DistributionFunction::FixedAmount { amount: 50 }, + &[41, 46, 50, 1000], + &[100200, 100200, 100250], + 10, + ) + .expect("\n-> fixed amount should pass"); + } + + /// Test case for overflow error. + /// + /// claim at height 1000000000000: claim failed: assertion 0 failed: expected SuccessfulExecution, + /// got [InternalError(\"storage: protocol: overflow error: Overflow in FixedAmount evaluation\")]" + #[test] + fn test_block_based_perpetual_fixed_amount_1_000_000_000() { + check_heights_odd_no_current_rewards( + DistributionFunction::FixedAmount { + amount: 1_000_000_000, + }, + &[41, 46, 50, 51, 1_000_000_000_000], + &[ + 100_000 + 4 * 1_000_000_000, + 100_000 + 4 * 1_000_000_000, + 100_000 + 5 * 1_000_000_000, + 100_000 + 5 * 1_000_000_000, + 1, // 100_000 + (1_000_000_000_000 / 10) * 1_000_000_000, -- this will overflow + ], + 10, + ) + .expect("\n-> fixed amount should pass"); + } + + #[test] + /// With a fixed amount of 0, we expect first claim to fetch 100_000 units (which are in the contract defintion), + /// and fail for the rest of the claims. + /// + /// FAILS + fn test_block_based_perpetual_fixed_amount_0() { + check_heights( + DistributionFunction::FixedAmount { amount: 0 }, + &[41, 46, 50, 100000], + &[100000, 100000, 100000, 100000], + &[true, false, false, false], + None, + 10, + ) + .expect("\nfixed amount zero increase\n"); + } + + #[test] + fn test_block_based_perpetual_fixed_amount_u64_max() { + check_heights_odd_no_current_rewards( + DistributionFunction::FixedAmount { amount: u64::MAX }, + &[41, 46, 50, 1000], + &[100200, 100200, 100250, 100250], + 10, + ) + .expect("\nfixed amount u64::MAX should pass\n"); + } + + #[test] + fn test_block_based_perpetual_random() { + check_heights_odd_no_current_rewards( + DistributionFunction::Random { min: 0, max: 100 }, + &[41, 46, 50, 59, 60], + &[100192, 100192, 100263, 100263, 100310], + 10, + ) + .expect("correct case 1"); + + check_heights_odd_no_current_rewards( + DistributionFunction::Random { min: 0, max: 0 }, + &[41], + &[100192], + 10, + ) + .expect("no rewards"); + } + + /// Test [DistributionFunction::StepDecreasingAmount]. + #[test] + fn test_block_based_perpetual_step_decreasing() { + struct Case<'a> { + name: String, + dist_function: DistributionFunction, + claim_heights: &'a [u64], + distribution_interval: u64, + } + let claim_heights = [1, 2, 3, 4, 5, 10, 20, 30, 50, 100, 1000, 1000000]; + + let test_cases = [ + Case { + name: "claim height u64::MAX".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: 1, + decrease_per_interval_denominator: 1, + s: Some(1), + n: 100_000, + min_value: Some(1), + }, + claim_heights: &[u64::MAX], + distribution_interval: 10, + }, + Case { + name: "no change".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: 1, + decrease_per_interval_denominator: 1, + s: Some(1), + n: 100_000, + min_value: Some(1), + }, + claim_heights: &claim_heights, + distribution_interval: 10, + }, + Case { + name: "increase by u16::MAX".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: u16::MAX, + decrease_per_interval_denominator: 1, + s: Some(1), + n: 100_000, + min_value: Some(1), + }, + claim_heights: &claim_heights, + distribution_interval: 10, + }, + Case { + name: "zero decrease".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: 0, + decrease_per_interval_denominator: 1, + s: Some(1), + n: 100_000, + min_value: Some(1), + }, + claim_heights: &claim_heights, + distribution_interval: 10, + }, + Case { + name: "divide by 0".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: 0, + decrease_per_interval_denominator: 1, + s: Some(1), + n: 100_000, + min_value: Some(1), + }, + claim_heights: &claim_heights, + distribution_interval: 10, + }, + Case { + name: "decrease by 10%".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: 1, + decrease_per_interval_denominator: 10, + s: Some(1), + n: 100_000, + min_value: Some(1), + }, + claim_heights: &claim_heights, + distribution_interval: 10, + }, + Case { + name: "decrease by 50%".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: 1, + decrease_per_interval_denominator: 2, + s: Some(1), + n: 100_000, + min_value: Some(1), + }, + claim_heights: &claim_heights, + distribution_interval: 10, + }, + Case { + name: "increase by 50%".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: 2, + decrease_per_interval_denominator: 1, + s: Some(1), + n: 100_000, + min_value: Some(1), + }, + claim_heights: &claim_heights, + distribution_interval: 10, + }, + { + Case { + name: "decrease by 90%".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: 9, + decrease_per_interval_denominator: 10, + s: Some(1), + n: 100_000, + min_value: Some(1), + }, + claim_heights: &claim_heights, + distribution_interval: 10, + } + }, + { + Case { + name: "decrease by 99%".to_string(), + dist_function: DistributionFunction::StepDecreasingAmount { + step_count: 10, + decrease_per_interval_numerator: 99, + decrease_per_interval_denominator: 100, + s: Some(1), + n: 100, + min_value: Some(1), + }, + claim_heights: &claim_heights, + distribution_interval: 10, + } + }, + ]; + + // f(x) = n * (1 - (decrease_per_interval_numerator / decrease_per_interval_denominator))^((x - s) / step_count) + fn expected_emission(x: u64, dist: &DistributionFunction) -> u64 { + let ( + step_count, + decrease_per_interval_numerator, + decrease_per_interval_denominator, + s, + n, + min_value, + ) = match dist { + DistributionFunction::StepDecreasingAmount { + step_count, + decrease_per_interval_numerator, + decrease_per_interval_denominator, + s, + n, + min_value, + } => ( + *step_count, + *decrease_per_interval_numerator, + *decrease_per_interval_denominator, + s.unwrap_or(1), + *n, + min_value.unwrap_or_default(), + ), + _ => panic!("expected StepDecreasingAmount"), + }; + + if x <= s { + return n; + } + + // let's simplify it to a form like: + // f(x) = N * a ^ b + let a = 1f64 + - (decrease_per_interval_numerator as f64 + / decrease_per_interval_denominator as f64); + let b = (x as f64 - s as f64) as i32 / step_count as i32; // integer by purpose, we want to round down + let f_x = n as f64 * a.powi(b); + + // println!("expected_emission({}) = {}", x, f_x); + // f_x.to_u64().expect("expected to convert to u64") + f_x.to_u64().unwrap_or(min_value).max(min_value) + } + + let mut fails = String::new(); + + for case in test_cases { + println!("TEST CASE '{}'", case.name); + let dist = case.dist_function; + let claim_heights = case.claim_heights; + let expected_balances = claim_heights + .iter() + .map(|&h| { + // initial balance, defined in contract js + let mut expected_balance = 100_000; + // loop over blocks, starting with S, with step PERPETUAL_DISTRIBUTION_INTERVAL + for i in (1..=h).step_by(case.distribution_interval as usize) { + expected_balance += expected_emission(i, &dist); + } + println!("expected balance at height {}: {}", h, expected_balance); + expected_balance + }) + .collect::>(); + // we expect all tests to pass + let expect_pass = claim_heights.iter().map(|&_h| true).collect::>(); + + if let Err(e) = check_heights( + dist, + claim_heights, + &expected_balances, + &expect_pass, + None, //Some(S), + case.distribution_interval, + ) { + fails.push_str(format!("-> Test '{}':\n{}\n", case.name, &e).as_str()); + } + } + + if !fails.is_empty() { + panic!("failed tests:\n{}", fails); + } + } + + /// Check that claim results at provided heights are as expected, and that balances match expectations. + fn check_heights( + distribution_function: DistributionFunction, + claim_heights: &[u64], + expected_balances: &[u64], + expect_pass: &[bool], + contract_start_height: Option, + distribution_interval: u64, + ) -> Result<(), String> { + let mut suite = TestSuite::new( + 10_200_000_000, + 0, + TokenDistributionType::Perpetual, + Some(|token_configuration: &mut TokenConfiguration| { + token_configuration + .distribution_rules_mut() + .set_perpetual_distribution(Some(TokenPerpetualDistribution::V0( + TokenPerpetualDistributionV0 { + distribution_type: RewardDistributionType::BlockBasedDistribution { + interval: distribution_interval, + function: distribution_function, + }, + distribution_recipient: TokenDistributionRecipient::ContractOwner, + }, + ))); + }), + ); + if let Some(start) = contract_start_height { + suite = suite.with_contract_start_time(start); + } + + let mut tests = Vec::new(); + for (i, height) in claim_heights.iter().enumerate() { + let assertions: Vec = if expect_pass[i] { + vec![|processing_results: &[_]| match processing_results { + [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => Ok(()), + _ => Err(format!( + "expected SuccessfulExecution, got {:?}", + processing_results + )), + }] + } else { + vec![|processing_results: &[_]| match processing_results { + [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => { + Err("expected error, got SuccessfulExecution".into()) + } + _ => Ok(()), + }] + }; + + tests.push(TestCase { + name: format!("claim at height {}", height), + base_height: *height - 1, + base_time_ms: 10_200_000_000, + expected_balance: expected_balances[i], + assertions, + }); + } + + suite.execute(&tests) + } + /// This test checks claims at provided heights, where every second height does not have any rewards to claim. + /// + /// # Arguments + /// + /// * `distribution_function` - configured distribution function to test + /// * `claim_heights` - heights at which claims will be made; they will see balance from previous height + /// * `expected_balances` - expected balances after claims were made and block from `heights` was committed + /// + fn check_heights_odd_no_current_rewards( + distribution_function: DistributionFunction, + claim_heights: &[u64], + expected_balances: &[u64], + distribution_interval: u64, + ) -> Result<(), String> { + let mut suite = TestSuite::new( + 10_200_000_000, + 0, + TokenDistributionType::Perpetual, + Some(|token_configuration: &mut TokenConfiguration| { + token_configuration + .distribution_rules_mut() + .set_perpetual_distribution(Some(TokenPerpetualDistribution::V0( + TokenPerpetualDistributionV0 { + distribution_type: RewardDistributionType::BlockBasedDistribution { + interval: distribution_interval, + function: distribution_function, + }, + distribution_recipient: TokenDistributionRecipient::ContractOwner, + }, + ))); + }), + ); + + let mut tests = Vec::new(); + for (i, height) in claim_heights.iter().enumerate() { + let assertions: Vec = if i % 2 == 0 { + vec![|processing_results: &[_]| match processing_results { + [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => Ok(()), + _ => Err(format!( + "expected SuccessfulExecution, got {:?}", + processing_results + )), + }] + } else { + vec![|processing_results: &[_]| match processing_results { + [StateTransitionExecutionResult::PaidConsensusError( + ConsensusError::StateError(StateError::InvalidTokenClaimNoCurrentRewards( + _, + )), + _, + )] => Ok(()), + _ => Err(format!( + "expected InvalidTokenClaimNoCurrentRewards, got {:?}", + processing_results + )), + }] + }; + + tests.push(TestCase { + name: format!("claim at height {}", height), + base_height: *height - 1, + base_time_ms: 10_200_000_000, + expected_balance: expected_balances[i], + assertions, + }); + } + + suite.execute(&tests) + } +} + +mod test_suite { + use super::*; + use crate::rpc::core::MockCoreRPCLike; + use crate::test::helpers::fast_forward_to_block::fast_forward_to_block; + use crate::test::helpers::setup::TempPlatform; + use dpp::block::extended_block_info::v0::ExtendedBlockInfoV0Getters; + use dpp::data_contract::associated_token::token_distribution_key::TokenDistributionType; + use dpp::prelude::{DataContract, IdentityPublicKey}; + use simple_signer::signer::SimpleSigner; + + pub(crate) struct TestSuite { + platform: TempPlatform, + platform_version: &'static PlatformVersion, + identity: dpp::prelude::Identity, + signer: SimpleSigner, + identity_public_key: IdentityPublicKey, + token_id: Option, + contract: Option, + start_time: Option, + token_distribution_type: TokenDistributionType, + token_configuration_modification: Option, + epoch_index: u16, + nonce: u64, + time_between_blocks: u64, + } + + impl TestSuite { + pub(crate) fn new( + genesis_time_ms: u64, + time_between_blocks: u64, + + token_distribution_type: TokenDistributionType, + token_configuration_modification: Option, + ) -> Self { + let platform_version = PlatformVersion::latest(); + let mut platform = TestPlatformBuilder::new() + .with_latest_protocol_version() + .build_with_mock_rpc() + .set_genesis_state(); + + let mut rng = StdRng::seed_from_u64(49853); + + let (identity, signer, identity_public_key) = + setup_identity(&mut platform, rng.gen(), dash_to_credits!(0.5)); + + Self { + platform, + platform_version, + identity, + signer, + identity_public_key, + token_id: None, // lazy initialization in get_contract/get_token_id + contract: None, // lazy initialization in get_contract/get_token_id + start_time: None, // optional, configured using with_contract_start_time + token_distribution_type, + epoch_index: 1, + nonce: 1, + time_between_blocks, + token_configuration_modification, + } + .with_genesis(1, genesis_time_ms) + } + /// Lazily initialize and return token contract. Also sets token id. + fn get_contract(&mut self) -> DataContract { + if let Some(ref contract) = self.contract { + return contract.clone(); + } + // we `take()` to avoid moving from reference; this means subsequent calls will fail, but we will already have + // the contract and token id initialized so it should never happen + let token_config_fn = if let Some(tc) = self.token_configuration_modification.take() { + let closure = |token_configuration: &mut TokenConfiguration| { + tc(token_configuration); + }; + Some(closure) + } else { + None + }; + + let (contract, token_id) = create_token_contract_with_owner_identity( + &mut self.platform, + self.identity.id(), + token_config_fn, + self.start_time, + None, + self.platform_version, + ); + self.token_id = Some(token_id); + self.contract = Some(contract.clone()); + + contract + } + /// Get token ID or create if needed. + fn get_token_id(&mut self) -> Identifier { + if self.token_id.is_none() { + self.get_contract(); // lazy initialization of token id and contract + } + + self.token_id + .expect("expected token id to be initialized in get_contract") + } + + fn next_identity_nonce(&mut self) -> u64 { + self.nonce += 1; + + self.nonce + } + + // submit a claim transition and assert the results + pub(crate) fn assert_claim(&mut self, assertions: Vec) -> Result<(), String> { + let committed_block_info = self.block_info(); + let nonce = self.next_identity_nonce(); + // next block config + let new_block_info = BlockInfo { + time_ms: committed_block_info.time_ms + self.time_between_blocks, + height: committed_block_info.height + 1, + // no change here + core_height: committed_block_info.core_height, + ..committed_block_info + }; + + let claim_transition = BatchTransition::new_token_claim_transition( + self.get_token_id(), + self.identity.id(), + self.get_contract().id(), + 0, + self.token_distribution_type, + None, + &self.identity_public_key, + nonce, + 0, + &self.signer, + self.platform_version, + None, + None, + None, + ) + .expect("expect to create documents batch transition"); + + let claim_serialized_transition = claim_transition + .serialize_to_bytes() + .expect("expected documents batch serialized state transition"); + + let transaction = self.platform.drive.grove.start_transaction(); + let platform_state = self.platform.state.load(); + + let processing_result = self + .platform + .platform + .process_raw_state_transitions( + &vec![claim_serialized_transition.clone()], + &platform_state, + &new_block_info, + &transaction, + self.platform_version, + false, + None, + ) + .expect("expected to process state transition"); + + for (i, assertion) in assertions.iter().enumerate() { + if let Err(e) = assertion(processing_result.execution_results().as_slice()) { + return Err(format!("assertion {} failed: {}", i, e)); + } + } + + self.platform + .drive + .grove + .commit_transaction(transaction) + .unwrap() + .expect("expected to commit transaction"); + + Ok(()) + } + + pub(crate) fn assert_balance( + &mut self, + expected_balance: Option, + ) -> Result<(), String> { + let token_id = self.get_token_id().to_buffer(); + let token_balance = self + .platform + .drive + .fetch_identity_token_balance( + token_id, + self.identity.id().to_buffer(), + None, + self.platform_version, + ) + .expect("expected to fetch token balance"); + + if token_balance != expected_balance { + return Err(format!( + "expected balance {:?} but got {:?}", + expected_balance, token_balance + )); + } + + Ok(()) + } + + fn block_info(&self) -> BlockInfo { + *self + .platform + .state + .load() + .last_committed_block_info() + .as_ref() + .expect("expected last committed block info") + .basic_info() + } + /// initialize genesis state + fn with_genesis(self, genesis_core_height: u32, genesis_time_ms: u64) -> Self { + fast_forward_to_block( + &self.platform, + genesis_time_ms, + 1, + genesis_core_height, + self.epoch_index, + false, + ); + + self + } + + /// Configure custom contract start time; must be called before contract is + /// initialized. + pub(super) fn with_contract_start_time(mut self, start_time: u64) -> Self { + if self.contract.is_some() { + panic!("with_contract_start_time must be called before contract is initialized"); + } + self.start_time = Some(start_time); + self + } + /// execute test cases + pub(super) fn execute(&mut self, tests: &[TestCase]) -> Result<(), String> { + let mut errors = String::new(); + for test_case in tests { + let result = self.execute_test_case(test_case); + if let Err(e) = result { + errors += format!("\n--> {}: {}", test_case.name, e).as_str(); + } + } + + if errors.is_empty() { + Ok(()) + } else { + Err(errors) + } + } + + pub(super) fn execute_test_case(&mut self, test_case: &TestCase) -> Result<(), String> { + let current_height = self.block_info().height; + let current_core_height = self.block_info().core_height; + + let block_time = if test_case.base_height >= current_height { + test_case.base_time_ms + + self.time_between_blocks * (test_case.base_height - current_height) + } else { + // workaround for fast_forward_to_block not allowing to go back in time + test_case.base_time_ms + }; + + fast_forward_to_block( + &self.platform, + block_time, + test_case.base_height, + current_core_height, + self.epoch_index, + false, + ); + self.assert_claim(test_case.assertions.clone()) + .map_err(|e| format!("claim failed: {}", e))?; + self.assert_balance(Some(test_case.expected_balance)) + .map_err(|e| format!("invalid balance: {}", e))?; + + Ok(()) + } + } + + pub(crate) type AssertionFn = fn(&[StateTransitionExecutionResult]) -> Result<(), String>; + pub(crate) struct TestCase { + pub(crate) name: String, + /// height of block just before the claim + pub(crate) base_height: u64, + /// time of block before the claim + pub(crate) base_time_ms: u64, + /// expected balance is a function that should return the expected balance after committing block + /// at provided height and time + pub(crate) expected_balance: u64, + /// assertion functions that will be executed on the claim + pub(crate) assertions: Vec, + } +} From 2c338d62a0643d039faa0ff1dcba8a023a66c427 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Tue, 18 Mar 2025 15:07:18 +0100 Subject: [PATCH 06/21] WIP --- Cargo.lock | 1 + packages/rs-drive-abci/Cargo.toml | 1 + .../distribution/perpetual/block_based.rs | 111 +++++++++++------- 3 files changed, 68 insertions(+), 45 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0fb070fb49d..a1eb8fe3328 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1611,6 +1611,7 @@ dependencies = [ "strategy-tests", "tempfile", "tenderdash-abci", + "test-case", "thiserror 1.0.64", "tokio", "tokio-util", diff --git a/packages/rs-drive-abci/Cargo.toml b/packages/rs-drive-abci/Cargo.toml index 33ade76e797..6c315df9677 100644 --- a/packages/rs-drive-abci/Cargo.toml +++ b/packages/rs-drive-abci/Cargo.toml @@ -103,6 +103,7 @@ assert_matches = "1.5.0" drive-abci = { path = ".", features = ["testing-config", "mocks"] } bls-signatures = { git = "https://github.com/dashpay/bls-signatures", tag = "1.3.3" } mockall = { version = "0.13" } +test-case = { version = "3.3.1" } # For tests of grovedb verify rocksdb = { version = "0.23.0" } diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index 6604c55d5db..3b33624de85 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -498,6 +498,7 @@ mod perpetual_distribution_block { #[cfg(test)] mod block_based_test_suite_tests { + use dpp::balances::credits::TokenAmount; use dpp::consensus::state::state_error::StateError; use dpp::consensus::ConsensusError; use dpp::data_contract::associated_token::token_configuration::accessors::v0::TokenConfigurationV0Getters; @@ -510,6 +511,7 @@ mod block_based_test_suite_tests { use dpp::data_contract::associated_token::token_perpetual_distribution::v0::TokenPerpetualDistributionV0; use dpp::data_contract::TokenConfiguration; use rust_decimal::prelude::ToPrimitive; + use test_case::test_matrix; use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; use super::test_suite::*; @@ -599,6 +601,26 @@ mod block_based_test_suite_tests { .expect("no rewards"); } + #[test_case::test_matrix( + [1,10], // step_count + [0,1,u16::MAX,999], // decrease_per_interval_numerator + [0,1,2,10,100,u16::MAX], // decrease_per_interval_denominator + [None,Some(1),Some(10),Some(u64::MAX)], // s + [0,1,100,100_000, 1_000_000, 10_000_000, 100_000_000, u64::MAX], // n + [None,Some(1),Some(10),Some(u64::MAX)], // min_value + [0,110, 100, 1000] // distribution_interval + )] + fn test_block_based_perpetual_step_decreasing_matrix( + step_count: u32, + decrease_per_interval_numerator: u16, + decrease_per_interval_denominator: u16, + s: Option, + n: TokenAmount, + min_value: Option, + + distribution_interval: u64, + ) { + } /// Test [DistributionFunction::StepDecreasingAmount]. #[test] fn test_block_based_perpetual_step_decreasing() { @@ -747,51 +769,6 @@ mod block_based_test_suite_tests { }, ]; - // f(x) = n * (1 - (decrease_per_interval_numerator / decrease_per_interval_denominator))^((x - s) / step_count) - fn expected_emission(x: u64, dist: &DistributionFunction) -> u64 { - let ( - step_count, - decrease_per_interval_numerator, - decrease_per_interval_denominator, - s, - n, - min_value, - ) = match dist { - DistributionFunction::StepDecreasingAmount { - step_count, - decrease_per_interval_numerator, - decrease_per_interval_denominator, - s, - n, - min_value, - } => ( - *step_count, - *decrease_per_interval_numerator, - *decrease_per_interval_denominator, - s.unwrap_or(1), - *n, - min_value.unwrap_or_default(), - ), - _ => panic!("expected StepDecreasingAmount"), - }; - - if x <= s { - return n; - } - - // let's simplify it to a form like: - // f(x) = N * a ^ b - let a = 1f64 - - (decrease_per_interval_numerator as f64 - / decrease_per_interval_denominator as f64); - let b = (x as f64 - s as f64) as i32 / step_count as i32; // integer by purpose, we want to round down - let f_x = n as f64 * a.powi(b); - - // println!("expected_emission({}) = {}", x, f_x); - // f_x.to_u64().expect("expected to convert to u64") - f_x.to_u64().unwrap_or(min_value).max(min_value) - } - let mut fails = String::new(); for case in test_cases { @@ -830,7 +807,51 @@ mod block_based_test_suite_tests { panic!("failed tests:\n{}", fails); } } + // HELPER FUNCTIONS // + + // f(x) = n * (1 - (decrease_per_interval_numerator / decrease_per_interval_denominator))^((x - s) / step_count) + fn expected_emission(x: u64, dist: &DistributionFunction) -> u64 { + let ( + step_count, + decrease_per_interval_numerator, + decrease_per_interval_denominator, + s, + n, + min_value, + ) = match dist { + DistributionFunction::StepDecreasingAmount { + step_count, + decrease_per_interval_numerator, + decrease_per_interval_denominator, + s, + n, + min_value, + } => ( + *step_count, + *decrease_per_interval_numerator, + *decrease_per_interval_denominator, + s.unwrap_or(1), + *n, + min_value.unwrap_or_default(), + ), + _ => panic!("expected StepDecreasingAmount"), + }; + + if x <= s { + return n; + } + + // let's simplify it to a form like: + // f(x) = N * a ^ b + let a = 1f64 + - (decrease_per_interval_numerator as f64 / decrease_per_interval_denominator as f64); + let b = (x as f64 - s as f64) as i32 / step_count as i32; // integer by purpose, we want to round down + let f_x = n as f64 * a.powi(b); + // println!("expected_emission({}) = {}", x, f_x); + // f_x.to_u64().expect("expected to convert to u64") + f_x.to_u64().unwrap_or(min_value).max(min_value) + } /// Check that claim results at provided heights are as expected, and that balances match expectations. fn check_heights( distribution_function: DistributionFunction, From 938fea9791bd2dd3d1be82474179a0960eb36113 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Thu, 20 Mar 2025 16:43:58 +0100 Subject: [PATCH 07/21] chore: test decreasing distribution --- .../distribution/perpetual/block_based.rs | 536 ++++++++++-------- 1 file changed, 300 insertions(+), 236 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index 3b33624de85..2cc16b5d1e7 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -497,24 +497,9 @@ mod perpetual_distribution_block { } #[cfg(test)] -mod block_based_test_suite_tests { - use dpp::balances::credits::TokenAmount; - use dpp::consensus::state::state_error::StateError; - use dpp::consensus::ConsensusError; - use dpp::data_contract::associated_token::token_configuration::accessors::v0::TokenConfigurationV0Getters; - use dpp::data_contract::associated_token::token_distribution_key::TokenDistributionType; - use dpp::data_contract::associated_token::token_distribution_rules::accessors::v0::TokenDistributionRulesV0Setters; - use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; - use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_recipient::TokenDistributionRecipient; - use dpp::data_contract::associated_token::token_perpetual_distribution::reward_distribution_type::RewardDistributionType; - use dpp::data_contract::associated_token::token_perpetual_distribution::TokenPerpetualDistribution; - use dpp::data_contract::associated_token::token_perpetual_distribution::v0::TokenPerpetualDistributionV0; - use dpp::data_contract::TokenConfiguration; - use rust_decimal::prelude::ToPrimitive; - use test_case::test_matrix; - use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; - +mod block_based_perpetual_fixed_amount { use super::test_suite::*; + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; // Given some token configuration, // When a claim is made at block 42, @@ -522,7 +507,7 @@ mod block_based_test_suite_tests { #[test] fn test_block_based_perpetual_fixed_amount_50() { - check_heights_odd_no_current_rewards( + super::test_suite::check_heights_odd_no_current_rewards( DistributionFunction::FixedAmount { amount: 50 }, &[41, 46, 50, 1000], &[100200, 100200, 100250], @@ -581,6 +566,11 @@ mod block_based_test_suite_tests { ) .expect("\nfixed amount u64::MAX should pass\n"); } +} +mod block_based_perpetual_random { + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; + + use super::test_suite::check_heights_odd_no_current_rewards; #[test] fn test_block_based_perpetual_random() { @@ -600,14 +590,32 @@ mod block_based_test_suite_tests { ) .expect("no rewards"); } +} +mod matrix { + use dpp::{ + balances::credits::TokenAmount, + data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction, + }; + use rust_decimal::prelude::ToPrimitive; + use crate::execution::validation::state_transition::batch::tests::token::distribution::perpetual::block_based::{block_based_perpetual_step_decreasing::expected_emission, test_suite::check_heights}; + + // #[test_case::test_matrix( + // [1,10], // step_count + // [0,1,u16::MAX,999], // decrease_per_interval_numerator + // [0,1,2,10,100,u16::MAX], // decrease_per_interval_denominator + // [None,Some(1),Some(10),Some(u64::MAX)], // s + // [0,1,100,100_000, 1_000_000, 10_000_000, 100_000_000, u64::MAX], // n + // [None,Some(1),Some(10),Some(u64::MAX)], // min_value + // [0,110, 100, 1000] // distribution_interval + // )] #[test_case::test_matrix( - [1,10], // step_count - [0,1,u16::MAX,999], // decrease_per_interval_numerator - [0,1,2,10,100,u16::MAX], // decrease_per_interval_denominator - [None,Some(1),Some(10),Some(u64::MAX)], // s - [0,1,100,100_000, 1_000_000, 10_000_000, 100_000_000, u64::MAX], // n - [None,Some(1),Some(10),Some(u64::MAX)], // min_value + [0,1,10], // step_count + [0,1,10,u16::MAX], // decrease_per_interval_numerator + [1], // decrease_per_interval_denominator + [None], // s + [0,1,100000], // n + [None], // min_value [0,110, 100, 1000] // distribution_interval )] fn test_block_based_perpetual_step_decreasing_matrix( @@ -617,200 +625,227 @@ mod block_based_test_suite_tests { s: Option, n: TokenAmount, min_value: Option, - distribution_interval: u64, ) { - } - /// Test [DistributionFunction::StepDecreasingAmount]. - #[test] - fn test_block_based_perpetual_step_decreasing() { - struct Case<'a> { - name: String, - dist_function: DistributionFunction, - claim_heights: &'a [u64], - distribution_interval: u64, - } - let claim_heights = [1, 2, 3, 4, 5, 10, 20, 30, 50, 100, 1000, 1000000]; - - let test_cases = [ - Case { - name: "claim height u64::MAX".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: 1, - decrease_per_interval_denominator: 1, - s: Some(1), - n: 100_000, - min_value: Some(1), - }, - claim_heights: &[u64::MAX], - distribution_interval: 10, - }, - Case { - name: "no change".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: 1, - decrease_per_interval_denominator: 1, - s: Some(1), - n: 100_000, - min_value: Some(1), - }, - claim_heights: &claim_heights, - distribution_interval: 10, - }, - Case { - name: "increase by u16::MAX".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: u16::MAX, - decrease_per_interval_denominator: 1, - s: Some(1), - n: 100_000, - min_value: Some(1), - }, - claim_heights: &claim_heights, - distribution_interval: 10, - }, - Case { - name: "zero decrease".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: 0, - decrease_per_interval_denominator: 1, - s: Some(1), - n: 100_000, - min_value: Some(1), - }, - claim_heights: &claim_heights, - distribution_interval: 10, - }, - Case { - name: "divide by 0".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: 0, - decrease_per_interval_denominator: 1, - s: Some(1), - n: 100_000, - min_value: Some(1), - }, - claim_heights: &claim_heights, - distribution_interval: 10, - }, - Case { - name: "decrease by 10%".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: 1, - decrease_per_interval_denominator: 10, - s: Some(1), - n: 100_000, - min_value: Some(1), - }, - claim_heights: &claim_heights, - distribution_interval: 10, - }, - Case { - name: "decrease by 50%".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: 1, - decrease_per_interval_denominator: 2, - s: Some(1), - n: 100_000, - min_value: Some(1), - }, - claim_heights: &claim_heights, - distribution_interval: 10, - }, - Case { - name: "increase by 50%".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: 2, - decrease_per_interval_denominator: 1, - s: Some(1), - n: 100_000, - min_value: Some(1), - }, - claim_heights: &claim_heights, - distribution_interval: 10, - }, - { - Case { - name: "decrease by 90%".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: 9, - decrease_per_interval_denominator: 10, - s: Some(1), - n: 100_000, - min_value: Some(1), - }, - claim_heights: &claim_heights, - distribution_interval: 10, - } - }, - { - Case { - name: "decrease by 99%".to_string(), - dist_function: DistributionFunction::StepDecreasingAmount { - step_count: 10, - decrease_per_interval_numerator: 99, - decrease_per_interval_denominator: 100, - s: Some(1), - n: 100, - min_value: Some(1), - }, - claim_heights: &claim_heights, - distribution_interval: 10, - } - }, - ]; - - let mut fails = String::new(); - - for case in test_cases { - println!("TEST CASE '{}'", case.name); - let dist = case.dist_function; - let claim_heights = case.claim_heights; - let expected_balances = claim_heights - .iter() - .map(|&h| { - // initial balance, defined in contract js - let mut expected_balance = 100_000; - // loop over blocks, starting with S, with step PERPETUAL_DISTRIBUTION_INTERVAL - for i in (1..=h).step_by(case.distribution_interval as usize) { + // + let dist = DistributionFunction::StepDecreasingAmount { + step_count, + decrease_per_interval_numerator, + decrease_per_interval_denominator, + s, + n, + min_value, + }; + + const VERY_HIGH_HEIGHT: u64 = 1_000_000; + let claim_heights = if distribution_interval > 0 { + let mut heights = (1..10) + .map(|i| i * distribution_interval) + .collect::>(); + heights.push(VERY_HIGH_HEIGHT); + + heights + } else { + vec![1, 2, 3, 10, 100, VERY_HIGH_HEIGHT] + }; + let expected_balances = claim_heights + .iter() + .map(|&h| { + // initial balance, defined in contract js + let mut expected_balance = 100_000; + // loop over blocks, starting with S, with step PERPETUAL_DISTRIBUTION_INTERVAL + if distribution_interval > 0 { + for i in (1..=h).step_by(distribution_interval as usize) { expected_balance += expected_emission(i, &dist); } - println!("expected balance at height {}: {}", h, expected_balance); - expected_balance - }) - .collect::>(); - // we expect all tests to pass - let expect_pass = claim_heights.iter().map(|&_h| true).collect::>(); + } + println!("expected balance at height {}: {}", h, expected_balance); + expected_balance.to_u64().unwrap_or(0) // to handle tests that overflow + }) + .collect::>(); + // we expect all tests to pass + let expect_pass = claim_heights.iter().map(|&_h| true).collect::>(); + + if let Err(e) = check_heights( + dist.clone(), + &claim_heights, + &expected_balances, + &expect_pass, + None, //Some(S), + distribution_interval, + ) { + // print dist to stderr + panic!("test failed for distribution function {:?}: {}", dist, e); + } else { + println!("test passed for distribution function {:?}", dist); + } + } +} + +mod block_based_perpetual_step_decreasing { + use dpp::balances::credits::TokenAmount; + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; + use rust_decimal::prelude::ToPrimitive; + use test_case::test_case; + use crate::execution::validation::state_transition::batch::tests::token::distribution::perpetual::block_based::test_suite::check_heights; + use super::test_suite::with_timeout; + + const TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_secs(1); + + #[test_case( + 1,// step_count + 1,// decrease_per_interval_numerator + 100,// decrease_per_interval_denominator + None,// s + 100_000,// n + Some(1),// min_value + Some((1..1000).step_by(100).collect()),// claim_heights + 1; // distribution_interval + "claim every 100 blocks" + )] + #[test_case( + 1,// step_count + 1,// decrease_per_interval_numerator + 100,// decrease_per_interval_denominator + None,// s + 100_000,// n + Some(1),// min_value + Some((1..1000).step_by(500).collect()),// claim_heights + 1; // distribution_interval + "claim every 500 blocks" + )] + #[test_matrix( + 1,// step_count + 101,// decrease_per_interval_numerator + 100,// decrease_per_interval_denominator + None,// s + 100_000,// n + Some(1),// min_value + [Some((1..1000).step_by(100).collect()),Some((1..1000).step_by(500).collect())],// claim_heights + 1; // distribution_interval + "1% increase, varying claim heights" + )] + #[test_case( + 1,// step_count + 1000,// decrease_per_interval_numerator + 1,// decrease_per_interval_denominator + None,// s + 100_000,// n + Some(1),// min_value + Some(vec![1,7]), // claim_heights + 1; // distribution_interval + "1000x increase, overflow" + )] + #[test_case( + 1,// step_count + 1,// decrease_per_interval_numerator + 1,// decrease_per_interval_denominator + None,// s + 100_000,// n + Some(1),// min_value + Some(vec![1,2,3,10,100]), // claim_heights // ,300,500,800,1_000,1_000_000 + 1; // distribution_interval + "100% decrease, various min values" + )] + #[test_matrix( + 1,// step_count + 0,// decrease_per_interval_numerator + 1,// decrease_per_interval_denominator + None,// s + 100_000,// n + [None,Some(0),Some(1),Some(100)],// min_value + Some(vec![1,2,3,10,100]), // claim_heights // ,300,500,800,1_000,1_000_000 + 1; // distribution_interval + "no decrease, irrelevant min values" + )] + #[test_matrix( + [5,10],// step_count + 1,// decrease_per_interval_numerator + 2,// decrease_per_interval_denominator + None,// s + 100_000,// n + None,// min_value + Some(vec![5,10,100]), // claim_heights // ,300,500,800,1_000,1_000_000 + [1,5]; // distribution_interval + "1/2 decrease, changing step" + )] + #[test_matrix( + [1,10],// step_count + 1,// decrease_per_interval_numerator + 2,// decrease_per_interval_denominator + [None,Some(1),Some(5)],// s + 100_000,// n + None,// min_value + Some(vec![5,10,100]), // claim_heights // ,300,500,800,1_000,1_000_000 + [1,5]; // distribution_interval + "1/2 decrease, changing S" + )] - if let Err(e) = check_heights( + /// Test various combinations of [DistributionFunction::StepDecreasingAmount] distribution. + fn run_test( + step_count: u32, + decrease_per_interval_numerator: u16, + decrease_per_interval_denominator: u16, + s: Option, + n: TokenAmount, + min_value: Option, + claim_heights: Option>, + distribution_interval: u64, + ) -> Result<(), String> { + let dist = DistributionFunction::StepDecreasingAmount { + step_count, + decrease_per_interval_numerator, + decrease_per_interval_denominator, + s, + n, + min_value, + }; + let claim_heights = + claim_heights.unwrap_or(vec![1, 2, 3, 4, 5, 10, 20, 30, 50, 100, 1_000_000]); + + let expected_balances = claim_heights + .iter() + .map(|&h| { + // initial balance, defined in contract js + let mut expected_balance: i128 = 100_000; + // loop over blocks, starting with S, with step PERPETUAL_DISTRIBUTION_INTERVAL + for i in (1..=h).step_by(distribution_interval as usize) { + expected_balance += expected_emission(i, &dist); + } + println!("expected balance at height {}: {}", h, expected_balance); + expected_balance.to_u64().unwrap_or_else(|| { + println!("ERR: overflow in expected balance at height {}", h); + 0 + }) // to handle tests that overflow + }) + .collect::>(); + // we expect all tests to pass + let expect_pass = claim_heights.iter().map(|&_h| true).collect::>(); + + with_timeout(TIMEOUT, move || { + check_heights( dist, - claim_heights, + &claim_heights, &expected_balances, &expect_pass, None, //Some(S), - case.distribution_interval, - ) { - fails.push_str(format!("-> Test '{}':\n{}\n", case.name, &e).as_str()); - } - } - - if !fails.is_empty() { - panic!("failed tests:\n{}", fails); - } + distribution_interval, + ) + }) + .inspect_err(|e| { + println!("{}", e); + }) } - // HELPER FUNCTIONS // + // ===== HELPER FUNCTIONS ===== // + + /// Calculate expected emission at provided height. + /// + /// We use [i128] to ensure we handle overflows better than the original code. + /// // f(x) = n * (1 - (decrease_per_interval_numerator / decrease_per_interval_denominator))^((x - s) / step_count) - fn expected_emission(x: u64, dist: &DistributionFunction) -> u64 { + pub(super) fn expected_emission(x: u64, dist: &DistributionFunction) -> i128 { + let x = x as i128; let ( step_count, decrease_per_interval_numerator, @@ -827,33 +862,73 @@ mod block_based_test_suite_tests { n, min_value, } => ( - *step_count, - *decrease_per_interval_numerator, - *decrease_per_interval_denominator, - s.unwrap_or(1), - *n, - min_value.unwrap_or_default(), + *step_count as i128, + *decrease_per_interval_numerator as i128, + *decrease_per_interval_denominator as i128, + s.unwrap_or_default() as i128, + *n as i128, + min_value.unwrap_or(1) as i128, ), _ => panic!("expected StepDecreasingAmount"), }; - if x <= s { - return n; + if x < s { + n + } else { + // let's simplify it to a form like: + // f(x) = N * a ^ b + let a = 1f64 + - (decrease_per_interval_numerator as f64 + / decrease_per_interval_denominator as f64); + let b = (x - s) / step_count; // integer by purpose, we want to round down + let f_x = n as f64 * a.powi(b.to_i32().expect("overflow")); + f_x.to_i128() + .unwrap_or_else(|| { + println!("ERR: overflow in expected_emission({})", f_x); + 0 + }) + .max(min_value) } + } +} - // let's simplify it to a form like: - // f(x) = N * a ^ b - let a = 1f64 - - (decrease_per_interval_numerator as f64 / decrease_per_interval_denominator as f64); - let b = (x as f64 - s as f64) as i32 / step_count as i32; // integer by purpose, we want to round down - let f_x = n as f64 * a.powi(b); +mod test_suite { + use super::*; + use crate::rpc::core::MockCoreRPCLike; + use crate::test::helpers::fast_forward_to_block::fast_forward_to_block; + use crate::test::helpers::setup::TempPlatform; + use dpp::block::extended_block_info::v0::ExtendedBlockInfoV0Getters; + use dpp::data_contract::associated_token::token_distribution_key::TokenDistributionType; + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_recipient::TokenDistributionRecipient; + use dpp::data_contract::associated_token::token_perpetual_distribution::reward_distribution_type::RewardDistributionType; + use dpp::data_contract::associated_token::token_perpetual_distribution::v0::TokenPerpetualDistributionV0; + use dpp::data_contract::associated_token::token_perpetual_distribution::TokenPerpetualDistribution; + use dpp::prelude::{DataContract, IdentityPublicKey}; + use simple_signer::signer::SimpleSigner; - // println!("expected_emission({}) = {}", x, f_x); - // f_x.to_u64().expect("expected to convert to u64") - f_x.to_u64().unwrap_or(min_value).max(min_value) + /// Run provided closure with timeout. + pub(super) fn with_timeout( + duration: tokio::time::Duration, + f: impl FnOnce() -> Result<(), String> + Send + 'static, + ) -> Result<(), String> { + let rt = tokio::runtime::Builder::new_multi_thread() + .worker_threads(2) + .enable_all() + .build() + .unwrap(); + // thread executing our code + let worker = rt.spawn_blocking(f); + + rt.block_on(async move { tokio::time::timeout(duration, worker).await }) + .map_err(|e| format!("timeout after {:?}", e))? + .map_err(|e| format!("join error: {:?}", e))? } + /// Check that claim results at provided heights are as expected, and that balances match expectations. - fn check_heights( + /// + /// Note we take i128 into expected_balances, as we want to be able to detect overflows. + pub(super) fn check_heights( distribution_function: DistributionFunction, claim_heights: &[u64], expected_balances: &[u64], @@ -921,7 +996,7 @@ mod block_based_test_suite_tests { /// * `claim_heights` - heights at which claims will be made; they will see balance from previous height /// * `expected_balances` - expected balances after claims were made and block from `heights` was committed /// - fn check_heights_odd_no_current_rewards( + pub(super) fn check_heights_odd_no_current_rewards( distribution_function: DistributionFunction, claim_heights: &[u64], expected_balances: &[u64], @@ -982,17 +1057,6 @@ mod block_based_test_suite_tests { suite.execute(&tests) } -} - -mod test_suite { - use super::*; - use crate::rpc::core::MockCoreRPCLike; - use crate::test::helpers::fast_forward_to_block::fast_forward_to_block; - use crate::test::helpers::setup::TempPlatform; - use dpp::block::extended_block_info::v0::ExtendedBlockInfoV0Getters; - use dpp::data_contract::associated_token::token_distribution_key::TokenDistributionType; - use dpp::prelude::{DataContract, IdentityPublicKey}; - use simple_signer::signer::SimpleSigner; pub(crate) struct TestSuite { platform: TempPlatform, From 4a42235fc7109dee795ae4bd3bb24fc111a995c9 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Thu, 20 Mar 2025 16:46:42 +0100 Subject: [PATCH 08/21] chore: remove duplicate code --- .../distribution/perpetual/block_based.rs | 90 ------------------- 1 file changed, 90 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index 2cc16b5d1e7..bbd62d0cf67 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -591,96 +591,6 @@ mod block_based_perpetual_random { .expect("no rewards"); } } -mod matrix { - use dpp::{ - balances::credits::TokenAmount, - data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction, - }; - use rust_decimal::prelude::ToPrimitive; - - use crate::execution::validation::state_transition::batch::tests::token::distribution::perpetual::block_based::{block_based_perpetual_step_decreasing::expected_emission, test_suite::check_heights}; - - // #[test_case::test_matrix( - // [1,10], // step_count - // [0,1,u16::MAX,999], // decrease_per_interval_numerator - // [0,1,2,10,100,u16::MAX], // decrease_per_interval_denominator - // [None,Some(1),Some(10),Some(u64::MAX)], // s - // [0,1,100,100_000, 1_000_000, 10_000_000, 100_000_000, u64::MAX], // n - // [None,Some(1),Some(10),Some(u64::MAX)], // min_value - // [0,110, 100, 1000] // distribution_interval - // )] - #[test_case::test_matrix( - [0,1,10], // step_count - [0,1,10,u16::MAX], // decrease_per_interval_numerator - [1], // decrease_per_interval_denominator - [None], // s - [0,1,100000], // n - [None], // min_value - [0,110, 100, 1000] // distribution_interval - )] - fn test_block_based_perpetual_step_decreasing_matrix( - step_count: u32, - decrease_per_interval_numerator: u16, - decrease_per_interval_denominator: u16, - s: Option, - n: TokenAmount, - min_value: Option, - distribution_interval: u64, - ) { - // - let dist = DistributionFunction::StepDecreasingAmount { - step_count, - decrease_per_interval_numerator, - decrease_per_interval_denominator, - s, - n, - min_value, - }; - - const VERY_HIGH_HEIGHT: u64 = 1_000_000; - let claim_heights = if distribution_interval > 0 { - let mut heights = (1..10) - .map(|i| i * distribution_interval) - .collect::>(); - heights.push(VERY_HIGH_HEIGHT); - - heights - } else { - vec![1, 2, 3, 10, 100, VERY_HIGH_HEIGHT] - }; - let expected_balances = claim_heights - .iter() - .map(|&h| { - // initial balance, defined in contract js - let mut expected_balance = 100_000; - // loop over blocks, starting with S, with step PERPETUAL_DISTRIBUTION_INTERVAL - if distribution_interval > 0 { - for i in (1..=h).step_by(distribution_interval as usize) { - expected_balance += expected_emission(i, &dist); - } - } - println!("expected balance at height {}: {}", h, expected_balance); - expected_balance.to_u64().unwrap_or(0) // to handle tests that overflow - }) - .collect::>(); - // we expect all tests to pass - let expect_pass = claim_heights.iter().map(|&_h| true).collect::>(); - - if let Err(e) = check_heights( - dist.clone(), - &claim_heights, - &expected_balances, - &expect_pass, - None, //Some(S), - distribution_interval, - ) { - // print dist to stderr - panic!("test failed for distribution function {:?}: {}", dist, e); - } else { - println!("test passed for distribution function {:?}", dist); - } - } -} mod block_based_perpetual_step_decreasing { use dpp::balances::credits::TokenAmount; From 27bad8da58ffcc6955123c4c2992cc6be769900b Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Thu, 20 Mar 2025 16:52:11 +0100 Subject: [PATCH 09/21] chore: self-review --- .../distribution/perpetual/block_based.rs | 31 ++++++++++++------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index bbd62d0cf67..cb8966a4dbc 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -887,7 +887,7 @@ mod test_suite { }] }; - tests.push(TestCase { + tests.push(TestStep { name: format!("claim at height {}", height), base_height: *height - 1, base_time_ms: 10_200_000_000, @@ -956,7 +956,7 @@ mod test_suite { }] }; - tests.push(TestCase { + tests.push(TestStep { name: format!("claim at height {}", height), base_height: *height - 1, base_time_ms: 10_200_000_000, @@ -968,6 +968,7 @@ mod test_suite { suite.execute(&tests) } + /// Test engine to run tests for different token distribution functions. pub(crate) struct TestSuite { platform: TempPlatform, platform_version: &'static PlatformVersion, @@ -985,10 +986,11 @@ mod test_suite { } impl TestSuite { + /// Create new test suite that will start at provided genesis time and create token contract with provided + /// configuration. pub(crate) fn new( genesis_time_ms: u64, time_between_blocks: u64, - token_distribution_type: TokenDistributionType, token_configuration_modification: Option, ) -> Self { @@ -1020,6 +1022,7 @@ mod test_suite { } .with_genesis(1, genesis_time_ms) } + /// Lazily initialize and return token contract. Also sets token id. fn get_contract(&mut self) -> DataContract { if let Some(ref contract) = self.contract { @@ -1049,6 +1052,7 @@ mod test_suite { contract } + /// Get token ID or create if needed. fn get_token_id(&mut self) -> Identifier { if self.token_id.is_none() { @@ -1065,8 +1069,8 @@ mod test_suite { self.nonce } - // submit a claim transition and assert the results - pub(crate) fn assert_claim(&mut self, assertions: Vec) -> Result<(), String> { + /// Submit a claim transition and assert the results + pub(crate) fn claim(&mut self, assertions: Vec) -> Result<(), String> { let committed_block_info = self.block_info(); let nonce = self.next_identity_nonce(); // next block config @@ -1133,6 +1137,7 @@ mod test_suite { Ok(()) } + /// Retrieve token balance for the identity and assert it matches expected value. pub(crate) fn assert_balance( &mut self, expected_balance: Option, @@ -1192,11 +1197,11 @@ mod test_suite { self.start_time = Some(start_time); self } - /// execute test cases - pub(super) fn execute(&mut self, tests: &[TestCase]) -> Result<(), String> { + /// execute test steps, one by one + pub(super) fn execute(&mut self, tests: &[TestStep]) -> Result<(), String> { let mut errors = String::new(); for test_case in tests { - let result = self.execute_test_case(test_case); + let result = self.execute_step(test_case); if let Err(e) = result { errors += format!("\n--> {}: {}", test_case.name, e).as_str(); } @@ -1209,7 +1214,9 @@ mod test_suite { } } - pub(super) fn execute_test_case(&mut self, test_case: &TestCase) -> Result<(), String> { + /// Execute a single test step. It fasts forwards to the block height of the test case, + /// executes the claim and checks the balance. + pub(super) fn execute_step(&mut self, test_case: &TestStep) -> Result<(), String> { let current_height = self.block_info().height; let current_core_height = self.block_info().core_height; @@ -1229,7 +1236,7 @@ mod test_suite { self.epoch_index, false, ); - self.assert_claim(test_case.assertions.clone()) + self.claim(test_case.assertions.clone()) .map_err(|e| format!("claim failed: {}", e))?; self.assert_balance(Some(test_case.expected_balance)) .map_err(|e| format!("invalid balance: {}", e))?; @@ -1239,7 +1246,9 @@ mod test_suite { } pub(crate) type AssertionFn = fn(&[StateTransitionExecutionResult]) -> Result<(), String>; - pub(crate) struct TestCase { + + /// Individual step of a test case. + pub(crate) struct TestStep { pub(crate) name: String, /// height of block just before the claim pub(crate) base_height: u64, From 53576a189fe1c2122d2edf51292ae7110bb4b351 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Fri, 21 Mar 2025 14:42:13 +0100 Subject: [PATCH 10/21] WIP --- .../distribution/perpetual/block_based.rs | 273 +++++++++++++++--- 1 file changed, 228 insertions(+), 45 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index cb8966a4dbc..5bd824362b3 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -547,9 +547,11 @@ mod block_based_perpetual_fixed_amount { fn test_block_based_perpetual_fixed_amount_0() { check_heights( DistributionFunction::FixedAmount { amount: 0 }, - &[41, 46, 50, 100000], - &[100000, 100000, 100000, 100000], - &[true, false, false, false], + &Claim::from_claims(( + &[41, 46, 50, 100000], + &[100000, 100000, 100000, 100000], + &[true, false, false, false], + )), None, 10, ) @@ -598,7 +600,7 @@ mod block_based_perpetual_step_decreasing { use rust_decimal::prelude::ToPrimitive; use test_case::test_case; use crate::execution::validation::state_transition::batch::tests::token::distribution::perpetual::block_based::test_suite::check_heights; - use super::test_suite::with_timeout; + use super::test_suite::{with_timeout, Claim}; const TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_secs(1); @@ -735,9 +737,7 @@ mod block_based_perpetual_step_decreasing { with_timeout(TIMEOUT, move || { check_heights( dist, - &claim_heights, - &expected_balances, - &expect_pass, + &Claim::from_claims((&claim_heights, &expected_balances, &expect_pass)), None, //Some(S), distribution_interval, ) @@ -802,6 +802,144 @@ mod block_based_perpetual_step_decreasing { } } +mod block_based_perpetual_stepwise { + use std::collections::BTreeMap; + + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; + + use super::test_suite::{check_heights, with_timeout, Claim}; + + const TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_secs(1); + + #[test] + fn stepwise_correct() { + let periods = BTreeMap::from([ + (0, 10_000), + (20, 20_000), + (45, 30_000), + (50, 40_000), + (70, 50_000), + ]); + + let dist = DistributionFunction::Stepwise(periods); + let distribution_interval = 10; + + // claims: height, balance, expect_pass + let claims = [ + (10, 110_000, true), + (11, 110_000, false), + (20, 120_000, true), + (24, 120_000, false), + (35, 140_000, true), + (39, 140_000, false), + (46, 160_000, true), + (49, 160_000, false), + (50, 180_000, true), + (51, 180_000, false), + (70, 270_000, true), + ( + 1_000_000, + 270_000 + 50_000 * (1_000_000 - 70_000) / distribution_interval, + true, + ), + ]; + + let claim_heights = claims.map(|x| x.0); + let expected_balances = claims.map(|x| x.1); + let expect_pass = claims.map(|x| x.2); + + with_timeout(TIMEOUT, move || { + check_heights( + dist, + &claim_heights, + &expected_balances, + &expect_pass, + None, //Some(S), + distribution_interval, + ) + }) + .inspect_err(|e| { + println!("{}", e); + }) + .expect("stepwise should pass"); + } + + // ===== HELPER FUNCTIONS ===== // + + #[test] + fn stepwise_u64_max() { + let periods = BTreeMap::from([(0, u64::MAX)]); + let dist = DistributionFunction::Stepwise(periods); + + check_heights( + dist, + &[100], + &[0], // doesn't matter, we expect overflow + &[false], + None, //Some(S), + 10, + ) + .inspect_err(|e| { + println!("{}", e); + }) + .expect("stepwise should pass"); + } + #[test] + /// We check what happens if we start distribution before the first period. + fn stepwise_before_first_period() { + let periods = BTreeMap::from([(100, 10_000)]); + let dist = DistributionFunction::Stepwise(periods); + + // claims: height, balance, expect_pass + let claims = [ + (1, 100_000, true), // IMO we should be able to claim first 100_000 here so expect_pass == true + (9, 100_000, false), // TODO: claim should succeed here? To transfer this 100k? + // (10, 0, false), + // (11, 0, false), + // (20, 0, false), + // (99, 0, false), + (100, 100_000, false), + (101, 110_000, true), + (102, 110_000, false), + (111, 120_000, true), + (200, 200_000, true), + (209, 200_000, false), + ]; + + check_heights( + dist, + &claims.map(|x| x.0), + &claims.map(|x| x.1), + &claims.map(|x| x.2), + None, + 10, + ) + .inspect_err(|e| { + println!("{}", e); + }) + .expect("stepwise should pass"); + } + + #[test] + /// This test will overflow within 6 distributions + fn stepwise_overflow() { + let periods = BTreeMap::from([(10, u64::MAX / 5)]); + let dist = DistributionFunction::Stepwise(periods); + + check_heights( + dist, + &[10, 11], + &[100_000], // doesn't matter, we expect overflow + &[false], + None, //Some(S), + 10, + ) + .inspect_err(|e| { + println!("{}", e); + }) + .expect("stepwise should pass"); + } +} mod test_suite { use super::*; use crate::rpc::core::MockCoreRPCLike; @@ -814,9 +952,10 @@ mod test_suite { use dpp::data_contract::associated_token::token_perpetual_distribution::reward_distribution_type::RewardDistributionType; use dpp::data_contract::associated_token::token_perpetual_distribution::v0::TokenPerpetualDistributionV0; use dpp::data_contract::associated_token::token_perpetual_distribution::TokenPerpetualDistribution; - use dpp::prelude::{DataContract, IdentityPublicKey}; + use dpp::prelude::{DataContract, IdentityPublicKey, TimestampMillis}; use simple_signer::signer::SimpleSigner; + const TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_secs(1); /// Run provided closure with timeout. pub(super) fn with_timeout( duration: tokio::time::Duration, @@ -838,19 +977,17 @@ mod test_suite { /// Check that claim results at provided heights are as expected, and that balances match expectations. /// /// Note we take i128 into expected_balances, as we want to be able to detect overflows. - pub(super) fn check_heights( + pub(super) fn check_heights + Clone>( distribution_function: DistributionFunction, - claim_heights: &[u64], - expected_balances: &[u64], - expect_pass: &[bool], - contract_start_height: Option, + claims: &[C], + contract_start_time: Option, distribution_interval: u64, ) -> Result<(), String> { let mut suite = TestSuite::new( 10_200_000_000, 0, TokenDistributionType::Perpetual, - Some(|token_configuration: &mut TokenConfiguration| { + Some(move |token_configuration: &mut TokenConfiguration| { token_configuration .distribution_rules_mut() .set_perpetual_distribution(Some(TokenPerpetualDistribution::V0( @@ -864,39 +1001,28 @@ mod test_suite { ))); }), ); - if let Some(start) = contract_start_height { + if let Some(start) = contract_start_time { suite = suite.with_contract_start_time(start); } let mut tests = Vec::new(); - for (i, height) in claim_heights.iter().enumerate() { - let assertions: Vec = if expect_pass[i] { - vec![|processing_results: &[_]| match processing_results { - [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => Ok(()), - _ => Err(format!( - "expected SuccessfulExecution, got {:?}", - processing_results - )), - }] - } else { - vec![|processing_results: &[_]| match processing_results { - [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => { - Err("expected error, got SuccessfulExecution".into()) - } - _ => Ok(()), - }] - }; + + let final_claims = claims + .iter() + .map(|item| item.clone().into()) + .collect::>(); + for item in claims { + let claim: Claim = item.clone().into(); tests.push(TestStep { - name: format!("claim at height {}", height), - base_height: *height - 1, + name: format!("claim at height {}", claim.claim_height), + base_height: claim.claim_height - 1, base_time_ms: 10_200_000_000, - expected_balance: expected_balances[i], - assertions, + expected_balance: claim.expected_balance, + claim_transition_assertions: claim.assertions(), }); } - - suite.execute(&tests) + with_timeout(TIMEOUT, move || suite.execute(&tests)) } /// This test checks claims at provided heights, where every second height does not have any rewards to claim. /// @@ -961,7 +1087,7 @@ mod test_suite { base_height: *height - 1, base_time_ms: 10_200_000_000, expected_balance: expected_balances[i], - assertions, + claim_transition_assertions: assertions, }); } @@ -977,7 +1103,7 @@ mod test_suite { identity_public_key: IdentityPublicKey, token_id: Option, contract: Option, - start_time: Option, + start_time: Option, token_distribution_type: TokenDistributionType, token_configuration_modification: Option, epoch_index: u16, @@ -1190,7 +1316,7 @@ mod test_suite { /// Configure custom contract start time; must be called before contract is /// initialized. - pub(super) fn with_contract_start_time(mut self, start_time: u64) -> Self { + pub(super) fn with_contract_start_time(mut self, start_time: TimestampMillis) -> Self { if self.contract.is_some() { panic!("with_contract_start_time must be called before contract is initialized"); } @@ -1203,7 +1329,7 @@ mod test_suite { for test_case in tests { let result = self.execute_step(test_case); if let Err(e) = result { - errors += format!("\n--> {}: {}", test_case.name, e).as_str(); + errors += format!("\n--> {}: {}\n", test_case.name, e).as_str(); } } @@ -1236,7 +1362,7 @@ mod test_suite { self.epoch_index, false, ); - self.claim(test_case.assertions.clone()) + self.claim(test_case.claim_transition_assertions.clone()) .map_err(|e| format!("claim failed: {}", e))?; self.assert_balance(Some(test_case.expected_balance)) .map_err(|e| format!("invalid balance: {}", e))?; @@ -1257,7 +1383,64 @@ mod test_suite { /// expected balance is a function that should return the expected balance after committing block /// at provided height and time pub(crate) expected_balance: u64, - /// assertion functions that will be executed on the claim - pub(crate) assertions: Vec, + /// assertion functions that must be met after executing the claim state transition + pub(crate) claim_transition_assertions: Vec, + } + + impl TestStep { + pub(super) fn new( + claim_height: u64, + expected_balance: u64, + expect_claim_successful: bool, + ) -> Self { + let assertions: Vec = if expect_claim_successful { + vec![|processing_results: &[_]| match processing_results { + [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => Ok(()), + _ => Err(format!( + "expected SuccessfulExecution, got {:?}", + processing_results + )), + }] + } else { + vec![|processing_results: &[_]| match processing_results { + [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => { + Err("expected error, got SuccessfulExecution".into()) + } + [StateTransitionExecutionResult::InternalError(e)] => { + Err(format!("expected normal error, got InternalError: {}", e)) + } + _ => Ok(()), + }] + }; + Self { + name: format!("claim at height {}", claim_height), + base_height: claim_height - 1, + base_time_ms: 10_200_000_000, + expected_balance, + claim_transition_assertions: assertions, + } + } + + // just a helper to faster update existing code + pub(super) fn from_claims( + (claim_heights, expected_balances, expect_pass): (&[u64], &[u64], &[bool]), + ) -> Vec { + assert_eq!(claim_heights.len(), expected_balances.len()); + assert_eq!(claim_heights.len(), expect_pass.len()); + claim_heights + .iter() + .zip(expected_balances.iter()) + .zip(expect_pass.iter()) + .map(|((&h, &balance), &expect)| Claim::new(h, balance, expect)) + .collect() + } + } + + impl From<(u64, u64, bool)> for TestStep { + fn from( + (claim_height, expected_balance, expect_claim_successful): (u64, u64, bool), + ) -> Self { + Self::new(claim_height, expected_balance, expect_claim_successful) + } } } From 91de94c86c772e46a5a6b21c13886de09a24bb3a Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Mon, 24 Mar 2025 18:35:42 +0100 Subject: [PATCH 11/21] chore: check entropy of random distribution --- .../distribution/perpetual/block_based.rs | 489 +++++++++++------- 1 file changed, 290 insertions(+), 199 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index 5bd824362b3..3aa327651a3 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -505,12 +505,16 @@ mod block_based_perpetual_fixed_amount { // When a claim is made at block 42, // Then the claim should be successful. #[test] - fn test_block_based_perpetual_fixed_amount_50() { - super::test_suite::check_heights_odd_no_current_rewards( + super::test_suite::check_heights( DistributionFunction::FixedAmount { amount: 50 }, - &[41, 46, 50, 1000], - &[100200, 100200, 100250], + &[ + TestStep::new(41, 100_200, true), + TestStep::new(46, 100_200, false), + TestStep::new(50, 100_250, true), + TestStep::new(51, 100_250, false), + ], + None, 10, ) .expect("\n-> fixed amount should pass"); @@ -522,76 +526,222 @@ mod block_based_perpetual_fixed_amount { /// got [InternalError(\"storage: protocol: overflow error: Overflow in FixedAmount evaluation\")]" #[test] fn test_block_based_perpetual_fixed_amount_1_000_000_000() { - check_heights_odd_no_current_rewards( + check_heights( DistributionFunction::FixedAmount { amount: 1_000_000_000, }, - &[41, 46, 50, 51, 1_000_000_000_000], &[ - 100_000 + 4 * 1_000_000_000, - 100_000 + 4 * 1_000_000_000, - 100_000 + 5 * 1_000_000_000, - 100_000 + 5 * 1_000_000_000, - 1, // 100_000 + (1_000_000_000_000 / 10) * 1_000_000_000, -- this will overflow + TestStep::new(41, 100_000 + 4 * 1_000_000_000, true), + TestStep::new(46, 100_000 + 4 * 1_000_000_000, false), + TestStep::new(50, 100_000 + 5 * 1_000_000_000, true), + TestStep::new(51, 100_000 + 5 * 1_000_000_000, false), + TestStep::new(1_000_000_000_000, 100_000 + 5 * 1_000_000_000, false), ], + None, 10, ) .expect("\n-> fixed amount should pass"); } #[test] - /// With a fixed amount of 0, we expect first claim to fetch 100_000 units (which are in the contract defintion), + /// With a fixed amount of 0, we expect first claim to fetch 100_000 units (which are hardcoded in the JSON contract defintion), /// and fail for the rest of the claims. /// /// FAILS fn test_block_based_perpetual_fixed_amount_0() { check_heights( DistributionFunction::FixedAmount { amount: 0 }, - &Claim::from_claims(( - &[41, 46, 50, 100000], - &[100000, 100000, 100000, 100000], - &[true, false, false, false], - )), + &[ + (41, 100000, true), + (46, 100000, false), + (50, 100000, false), + (1000, 100000, false), + ], None, 10, ) .expect("\nfixed amount zero increase\n"); } + /// Overflow caused by using u64::MAX as fixed amount should not cause InternalError. #[test] fn test_block_based_perpetual_fixed_amount_u64_max() { - check_heights_odd_no_current_rewards( + check_heights( DistributionFunction::FixedAmount { amount: u64::MAX }, - &[41, 46, 50, 1000], - &[100200, 100200, 100250, 100250], + &[ + TestStep::new(41, 100_200, true), + TestStep::new(46, 100_200, false), + TestStep::new(50, 100_250, true), + TestStep::new(1000, 100_250, false), + ], + None, 10, ) .expect("\nfixed amount u64::MAX should pass\n"); } } mod block_based_perpetual_random { - use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; - - use super::test_suite::check_heights_odd_no_current_rewards; + use std::{ + collections::BTreeMap, + sync::{Arc, Mutex}, + }; + + use crate::execution::validation::state_transition::batch::tests::token::distribution::perpetual::block_based::test_suite::TestSuite; + + use super::test_suite::{check_heights, TestStep}; + use dpp::data_contract::{ + associated_token::{ + token_configuration::accessors::v0::TokenConfigurationV0Getters, + token_distribution_key::TokenDistributionType, + token_distribution_rules::accessors::v0::TokenDistributionRulesV0Setters, + token_perpetual_distribution::{ + distribution_function::DistributionFunction, + distribution_recipient::TokenDistributionRecipient, + reward_distribution_type::RewardDistributionType, v0::TokenPerpetualDistributionV0, + TokenPerpetualDistribution, + }, + }, + TokenConfiguration, + }; + /// Random distribution function with min=0, max=100. #[test] - fn test_block_based_perpetual_random() { - check_heights_odd_no_current_rewards( + fn test_block_based_perpetual_random_0_100() { + check_heights( DistributionFunction::Random { min: 0, max: 100 }, - &[41, 46, 50, 59, 60], - &[100192, 100192, 100263, 100263, 100310], + &[ + TestStep::new(41, 100_192, true), + TestStep::new(46, 100_192, false), + TestStep::new(50, 100_263, true), + TestStep::new(59, 100_263, false), + TestStep::new(60, 100_310, true), + ], + None, 10, ) .expect("correct case 1"); + } - check_heights_odd_no_current_rewards( + /// Random distribution function with min=0, max=0 should only return the initial balance. + #[test] + fn test_block_based_perpetual_random_0_0() { + check_heights( DistributionFunction::Random { min: 0, max: 0 }, - &[41], - &[100192], + &[ + TestStep::new(41, 100_000, true), + TestStep::new(50, 100_000, false), + TestStep::new(100, 100_000, false), + ], + None, 10, ) .expect("no rewards"); } + + /// Check if the random function is truly random by estimating its entropy. + #[test] + fn test_block_based_perpetual_random_10_30_entropy() { + const N: u64 = 200; + const MIN: u64 = 10; + const MAX: u64 = 30; + let tests: Vec<_> = (1..=N) + .map(|i| TestStep { + name: format!("test_{}", i), + base_height: i - 1, + base_time_ms: Default::default(), + + expected_balance: None, + claim_transition_assertions: Default::default(), + }) + .collect(); + + // we expect the average to be 200; we add 100_000 which is the initial balance + // let expected_balance = ((((MIN + MAX) as f64) / 2.0) * (N as f64)) as u64 + 100_000; + // tests.push(TestStep { + // name: "last test".to_string(), + // base_height: N - 1, + // base_time_ms: Default::default(), + // expected_balance: Some(expected_balance), + // claim_transition_assertions: Default::default(), + // }); + + let balances = Arc::new(Mutex::new(Vec::new())); + let balances_result = balances.clone(); + + let mut suite = TestSuite::new( + 10_200_000_000, + 0, + TokenDistributionType::Perpetual, + Some(move |token_configuration: &mut TokenConfiguration| { + token_configuration + .distribution_rules_mut() + .set_perpetual_distribution(Some(TokenPerpetualDistribution::V0( + TokenPerpetualDistributionV0 { + distribution_type: RewardDistributionType::BlockBasedDistribution { + interval: 1, + function: DistributionFunction::Random { min: MIN, max: MAX }, + }, + distribution_recipient: TokenDistributionRecipient::ContractOwner, + }, + ))); + }), + ) + .with_step_success_fn(move |balance: u64| { + balances.lock().unwrap().push(balance); + }); + + suite.execute(&tests).expect("should execute"); + + let data = balances_result.lock().unwrap(); + // substract balance from previous step (for first step, substract initial balance of 100_000) + let diffs: Vec = data + .iter() + .scan(100_000, |prev, &x| { + let diff = x - *prev; + *prev = x; + Some(diff) + }) + .collect(); + + let entropy = calculate_entropy(&diffs); + let max_entropy: f64 = ((MAX - MIN) as f64).log2(); + let entropy_diff = (max_entropy - entropy).abs() / max_entropy; + + println!("Data: {:?}", diffs); + println!( + "Entropy: {}, max entropy: {}, difference: {}%", + entropy, + max_entropy, + entropy_diff * 100.0 + ); + + // assert that the entropy is close to the maximum entropy + assert!( + entropy_diff < 0.05, + "Entropy is not close to maximum entropy" + ); + } + + // HELPERS // + + fn calculate_entropy(data: &[u64]) -> f64 { + let mut counts = BTreeMap::new(); + let len = data.len() as f64; + + // Count the occurrences of each value + for &value in data { + *counts.entry(value).or_insert(0) += 1; + } + + // Calculate the probability of each value and apply the Shannon entropy formula + let mut entropy = 0.0; + for &count in counts.values() { + let probability = count as f64 / len; + entropy -= probability * probability.log2(); + } + + entropy + } } mod block_based_perpetual_step_decreasing { @@ -600,9 +750,6 @@ mod block_based_perpetual_step_decreasing { use rust_decimal::prelude::ToPrimitive; use test_case::test_case; use crate::execution::validation::state_transition::batch::tests::token::distribution::perpetual::block_based::test_suite::check_heights; - use super::test_suite::{with_timeout, Claim}; - - const TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_secs(1); #[test_case( 1,// step_count @@ -734,14 +881,19 @@ mod block_based_perpetual_step_decreasing { // we expect all tests to pass let expect_pass = claim_heights.iter().map(|&_h| true).collect::>(); - with_timeout(TIMEOUT, move || { - check_heights( - dist, - &Claim::from_claims((&claim_heights, &expected_balances, &expect_pass)), - None, //Some(S), - distribution_interval, - ) - }) + let claims = claim_heights + .iter() + .zip(expected_balances.iter()) + .zip(expect_pass.iter()) + .map(|((&h, &b), &p)| (h, b, p)) + .collect::>(); + + check_heights( + dist, + &claims, + None, //Some(S), + distribution_interval, + ) .inspect_err(|e| { println!("{}", e); }) @@ -803,13 +955,9 @@ mod block_based_perpetual_step_decreasing { } mod block_based_perpetual_stepwise { - use std::collections::BTreeMap; - + use super::test_suite::check_heights; use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; - - use super::test_suite::{check_heights, with_timeout, Claim}; - - const TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_secs(1); + use std::collections::BTreeMap; #[test] fn stepwise_correct() { @@ -844,20 +992,12 @@ mod block_based_perpetual_stepwise { ), ]; - let claim_heights = claims.map(|x| x.0); - let expected_balances = claims.map(|x| x.1); - let expect_pass = claims.map(|x| x.2); - - with_timeout(TIMEOUT, move || { - check_heights( - dist, - &claim_heights, - &expected_balances, - &expect_pass, - None, //Some(S), - distribution_interval, - ) - }) + check_heights( + dist, + &claims, + None, //Some(S), + distribution_interval, + ) .inspect_err(|e| { println!("{}", e); }) @@ -873,9 +1013,7 @@ mod block_based_perpetual_stepwise { check_heights( dist, - &[100], - &[0], // doesn't matter, we expect overflow - &[false], + &[(100, 0, false)], None, //Some(S), 10, ) @@ -906,18 +1044,11 @@ mod block_based_perpetual_stepwise { (209, 200_000, false), ]; - check_heights( - dist, - &claims.map(|x| x.0), - &claims.map(|x| x.1), - &claims.map(|x| x.2), - None, - 10, - ) - .inspect_err(|e| { - println!("{}", e); - }) - .expect("stepwise should pass"); + check_heights(dist, &claims, None, 10) + .inspect_err(|e| { + println!("{}", e); + }) + .expect("stepwise should pass"); } #[test] @@ -928,9 +1059,7 @@ mod block_based_perpetual_stepwise { check_heights( dist, - &[10, 11], - &[100_000], // doesn't matter, we expect overflow - &[false], + &[(10, 100_000, false), (11, 100_000, false)], None, //Some(S), 10, ) @@ -955,9 +1084,10 @@ mod test_suite { use dpp::prelude::{DataContract, IdentityPublicKey, TimestampMillis}; use simple_signer::signer::SimpleSigner; - const TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_secs(1); + const TIMEOUT: tokio::time::Duration = tokio::time::Duration::from_secs(10); /// Run provided closure with timeout. - pub(super) fn with_timeout( + /// TODO: Check if it works with sync code + fn with_timeout( duration: tokio::time::Duration, f: impl FnOnce() -> Result<(), String> + Send + 'static, ) -> Result<(), String> { @@ -970,16 +1100,29 @@ mod test_suite { let worker = rt.spawn_blocking(f); rt.block_on(async move { tokio::time::timeout(duration, worker).await }) - .map_err(|e| format!("timeout after {:?}", e))? + .map_err(|e| format!("test timed out after {:?}", TIMEOUT))? .map_err(|e| format!("join error: {:?}", e))? } /// Check that claim results at provided heights are as expected, and that balances match expectations. /// /// Note we take i128 into expected_balances, as we want to be able to detect overflows. - pub(super) fn check_heights + Clone>( + /// + /// # Arguments + /// + /// * `distribution_function` - configured distribution function to test + /// * `claims` - heights at which claims will be made; they will see balance from previous height + /// * `contract_start_time` - optional start time of the contract + /// * `distribution_interval` - interval between distributions + /// + /// Note that for conveniance, you can provide `steps` as a [`TestStep`] or a slice of tuples, where each tuple contains: + /// * `height` - height at which claim will be made + /// * `expected_balance` - expected balance after claim was made + /// * `expect_pass` - whether we expect the claim to pass or not + /// + pub(super) fn check_heights + Clone>( distribution_function: DistributionFunction, - claims: &[C], + steps: &[C], contract_start_time: Option, distribution_interval: u64, ) -> Result<(), String> { @@ -1005,93 +1148,12 @@ mod test_suite { suite = suite.with_contract_start_time(start); } - let mut tests = Vec::new(); - - let final_claims = claims + let steps = steps .iter() .map(|item| item.clone().into()) - .collect::>(); - for item in claims { - let claim: Claim = item.clone().into(); + .collect::>(); - tests.push(TestStep { - name: format!("claim at height {}", claim.claim_height), - base_height: claim.claim_height - 1, - base_time_ms: 10_200_000_000, - expected_balance: claim.expected_balance, - claim_transition_assertions: claim.assertions(), - }); - } - with_timeout(TIMEOUT, move || suite.execute(&tests)) - } - /// This test checks claims at provided heights, where every second height does not have any rewards to claim. - /// - /// # Arguments - /// - /// * `distribution_function` - configured distribution function to test - /// * `claim_heights` - heights at which claims will be made; they will see balance from previous height - /// * `expected_balances` - expected balances after claims were made and block from `heights` was committed - /// - pub(super) fn check_heights_odd_no_current_rewards( - distribution_function: DistributionFunction, - claim_heights: &[u64], - expected_balances: &[u64], - distribution_interval: u64, - ) -> Result<(), String> { - let mut suite = TestSuite::new( - 10_200_000_000, - 0, - TokenDistributionType::Perpetual, - Some(|token_configuration: &mut TokenConfiguration| { - token_configuration - .distribution_rules_mut() - .set_perpetual_distribution(Some(TokenPerpetualDistribution::V0( - TokenPerpetualDistributionV0 { - distribution_type: RewardDistributionType::BlockBasedDistribution { - interval: distribution_interval, - function: distribution_function, - }, - distribution_recipient: TokenDistributionRecipient::ContractOwner, - }, - ))); - }), - ); - - let mut tests = Vec::new(); - for (i, height) in claim_heights.iter().enumerate() { - let assertions: Vec = if i % 2 == 0 { - vec![|processing_results: &[_]| match processing_results { - [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => Ok(()), - _ => Err(format!( - "expected SuccessfulExecution, got {:?}", - processing_results - )), - }] - } else { - vec![|processing_results: &[_]| match processing_results { - [StateTransitionExecutionResult::PaidConsensusError( - ConsensusError::StateError(StateError::InvalidTokenClaimNoCurrentRewards( - _, - )), - _, - )] => Ok(()), - _ => Err(format!( - "expected InvalidTokenClaimNoCurrentRewards, got {:?}", - processing_results - )), - }] - }; - - tests.push(TestStep { - name: format!("claim at height {}", height), - base_height: *height - 1, - base_time_ms: 10_200_000_000, - expected_balance: expected_balances[i], - claim_transition_assertions: assertions, - }); - } - - suite.execute(&tests) + with_timeout(TIMEOUT, move || suite.execute(&steps)) } /// Test engine to run tests for different token distribution functions. @@ -1109,6 +1171,13 @@ mod test_suite { epoch_index: u16, nonce: u64, time_between_blocks: u64, + + /// function that will be called after successful claim. + /// + /// ## Arguments + /// + /// * `u64` - balance after claim + on_step_success: Box, } impl TestSuite { @@ -1145,6 +1214,7 @@ mod test_suite { nonce: 1, time_between_blocks, token_configuration_modification, + on_step_success: Box::new(|_| {}), } .with_genesis(1, genesis_time_ms) } @@ -1264,13 +1334,10 @@ mod test_suite { } /// Retrieve token balance for the identity and assert it matches expected value. - pub(crate) fn assert_balance( - &mut self, - expected_balance: Option, - ) -> Result<(), String> { + pub(crate) fn get_balance(&mut self) -> Result, String> { let token_id = self.get_token_id().to_buffer(); - let token_balance = self - .platform + + self.platform .drive .fetch_identity_token_balance( token_id, @@ -1278,7 +1345,15 @@ mod test_suite { None, self.platform_version, ) - .expect("expected to fetch token balance"); + .map_err(|e| format!("failed to fetch token balance: {}", e)) + } + + /// Retrieve token balance for the identity and assert it matches expected value. + pub(crate) fn assert_balance( + &mut self, + expected_balance: Option, + ) -> Result<(), String> { + let token_balance = self.get_balance()?; if token_balance != expected_balance { return Err(format!( @@ -1323,6 +1398,21 @@ mod test_suite { self.start_time = Some(start_time); self } + + pub(super) fn with_step_success_fn<'a>( + mut self, + step_success_fn: impl Fn(u64) + Send + Sync + 'static, + ) -> Self + where + Self: 'a, + { + // fn f(s: TestSuite) { + // step_success_fn(s); + // }; + self.on_step_success = Box::new(step_success_fn); + self + } + /// execute test steps, one by one pub(super) fn execute(&mut self, tests: &[TestStep]) -> Result<(), String> { let mut errors = String::new(); @@ -1364,8 +1454,21 @@ mod test_suite { ); self.claim(test_case.claim_transition_assertions.clone()) .map_err(|e| format!("claim failed: {}", e))?; - self.assert_balance(Some(test_case.expected_balance)) - .map_err(|e| format!("invalid balance: {}", e))?; + + let balance = self + .get_balance() + .map_err(|e| format!("failed to get balance: {}", e))? + .ok_or("expected balance to be present, but got None".to_string())?; + + if let Some(expected_balance) = test_case.expected_balance { + if expected_balance != balance { + return Err(format!( + "expected balance {:?} but got {:?}", + test_case.expected_balance, balance + )); + } + }; + (self.on_step_success)(balance); Ok(()) } @@ -1374,6 +1477,7 @@ mod test_suite { pub(crate) type AssertionFn = fn(&[StateTransitionExecutionResult]) -> Result<(), String>; /// Individual step of a test case. + #[derive(Clone, Debug)] pub(crate) struct TestStep { pub(crate) name: String, /// height of block just before the claim @@ -1382,18 +1486,19 @@ mod test_suite { pub(crate) base_time_ms: u64, /// expected balance is a function that should return the expected balance after committing block /// at provided height and time - pub(crate) expected_balance: u64, + pub(crate) expected_balance: Option, /// assertion functions that must be met after executing the claim state transition pub(crate) claim_transition_assertions: Vec, } impl TestStep { - pub(super) fn new( - claim_height: u64, - expected_balance: u64, - expect_claim_successful: bool, - ) -> Self { - let assertions: Vec = if expect_claim_successful { + /// Create a new test step with provided claim height and expected balance. + /// If expect_success is true, we expect the claim to be successful. + /// If false, we expect the claim to fail. + /// + /// If expected_balance is None, we don't check the balance. + pub(super) fn new(claim_height: u64, expected_balance: u64, expect_success: bool) -> Self { + let assertions: Vec = if expect_success { vec![|processing_results: &[_]| match processing_results { [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => Ok(()), _ => Err(format!( @@ -1416,24 +1521,10 @@ mod test_suite { name: format!("claim at height {}", claim_height), base_height: claim_height - 1, base_time_ms: 10_200_000_000, - expected_balance, + expected_balance: Some(expected_balance), claim_transition_assertions: assertions, } } - - // just a helper to faster update existing code - pub(super) fn from_claims( - (claim_heights, expected_balances, expect_pass): (&[u64], &[u64], &[bool]), - ) -> Vec { - assert_eq!(claim_heights.len(), expected_balances.len()); - assert_eq!(claim_heights.len(), expect_pass.len()); - claim_heights - .iter() - .zip(expected_balances.iter()) - .zip(expect_pass.iter()) - .map(|((&h, &balance), &expect)| Claim::new(h, balance, expect)) - .collect() - } } impl From<(u64, u64, bool)> for TestStep { From 3dc1c9b10d508a0f0085b52e24ea9f229b993164 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Tue, 25 Mar 2025 14:29:01 +0100 Subject: [PATCH 12/21] wip --- .../distribution/perpetual/block_based.rs | 467 ++++++++++++------ 1 file changed, 320 insertions(+), 147 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index 3aa327651a3..da6f8e62a5f 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -8,6 +8,10 @@ use dpp::data_contract::TokenConfiguration; use dpp::state_transition::batch_transition::BatchTransition; use platform_version::version::PlatformVersion; use rand::prelude::StdRng; + +/// Initial contract balance, as hardcoded in the contract definition (JSON file). +const INITIAL_BALANCE: u64 = 100_000; + mod perpetual_distribution_block { use dpp::block::epoch::Epoch; use dpp::data_contract::associated_token::token_distribution_key::TokenDistributionType; @@ -498,8 +502,13 @@ mod perpetual_distribution_block { #[cfg(test)] mod block_based_perpetual_fixed_amount { - use super::test_suite::*; - use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; + use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; + + use super::{test_suite::*, INITIAL_BALANCE}; + use dpp::{ + consensus::{state::state_error::StateError, ConsensusError}, + data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction, + }; // Given some token configuration, // When a claim is made at block 42, @@ -516,66 +525,100 @@ mod block_based_perpetual_fixed_amount { ], None, 10, + None, ) .expect("\n-> fixed amount should pass"); } /// Test case for overflow error. /// + /// TODO: Fails, please fix. + /// /// claim at height 1000000000000: claim failed: assertion 0 failed: expected SuccessfulExecution, /// got [InternalError(\"storage: protocol: overflow error: Overflow in FixedAmount evaluation\")]" #[test] - fn test_block_based_perpetual_fixed_amount_1_000_000_000() { + fn fail_test_block_based_perpetual_fixed_amount_1_000_000_000() { check_heights( DistributionFunction::FixedAmount { amount: 1_000_000_000, }, &[ - TestStep::new(41, 100_000 + 4 * 1_000_000_000, true), - TestStep::new(46, 100_000 + 4 * 1_000_000_000, false), - TestStep::new(50, 100_000 + 5 * 1_000_000_000, true), - TestStep::new(51, 100_000 + 5 * 1_000_000_000, false), - TestStep::new(1_000_000_000_000, 100_000 + 5 * 1_000_000_000, false), + TestStep::new(41, INITIAL_BALANCE + 4 * 1_000_000_000, true), + TestStep::new(46, INITIAL_BALANCE + 4 * 1_000_000_000, false), + TestStep::new(50, INITIAL_BALANCE + 5 * 1_000_000_000, true), + TestStep::new(51, INITIAL_BALANCE + 5 * 1_000_000_000, false), + TestStep::new( + 1_000_000_000_000, + INITIAL_BALANCE + 5 * 1_000_000_000, + false, + ), ], None, 10, + None, ) .expect("\n-> fixed amount should pass"); } #[test] - /// With a fixed amount of 0, we expect first claim to fetch 100_000 units (which are hardcoded in the JSON contract defintion), - /// and fail for the rest of the claims. - /// - /// FAILS + /// Given a fixed amount distribution with value of 0, + /// When we try to claim, + /// Then we always fail and the balance remains unchanged. fn test_block_based_perpetual_fixed_amount_0() { check_heights( DistributionFunction::FixedAmount { amount: 0 }, &[ - (41, 100000, true), + (41, 100000, false), (46, 100000, false), (50, 100000, false), (1000, 100000, false), ], None, 10, + None, ) .expect("\nfixed amount zero increase\n"); } - /// Overflow caused by using u64::MAX as fixed amount should not cause InternalError. #[test] - fn test_block_based_perpetual_fixed_amount_u64_max() { + /// Given a fixed amount distribution with value of 1_000_000 and max_supply of 200_000, + /// When we try to claim, + /// Then we always fail and the balance remains unchanged. + fn test_fixed_amount_above_max_supply() { + let test = TestStep { + name: "test_fixed_amount_above_max_supply".to_string(), + base_height: 41, + base_time_ms: Default::default(), + expected_balance: None, + claim_transition_assertions: vec![|v| match v { + [StateTransitionExecutionResult::PaidConsensusError( + ConsensusError::StateError(StateError::TokenMintPastMaxSupplyError(_)), + _, + )] => Ok(()), + _ => Err(format!("expected TokenMintPastMaxSupplyError, got {:?}", v)), + }], + }; + check_heights( + DistributionFunction::FixedAmount { amount: 1_000_000 }, + &[test], + None, + 10, + Some(Some(200_000)), + ) + .expect("\nfixed amount zero increase\n"); + } + + /// Given a fixed amount distribution with value of u64::MAX, + /// When I claim tokens, + /// Then I don't get an InternalError. + #[test] + fn fail_test_block_based_perpetual_fixed_amount_u64_max() { check_heights( DistributionFunction::FixedAmount { amount: u64::MAX }, - &[ - TestStep::new(41, 100_200, true), - TestStep::new(46, 100_200, false), - TestStep::new(50, 100_250, true), - TestStep::new(1000, 100_250, false), - ], + &[TestStep::new(41, 100_000, false)], None, 10, + None, ) .expect("\nfixed amount u64::MAX should pass\n"); } @@ -588,7 +631,10 @@ mod block_based_perpetual_random { use crate::execution::validation::state_transition::batch::tests::token::distribution::perpetual::block_based::test_suite::TestSuite; - use super::test_suite::{check_heights, TestStep}; + use super::{ + test_suite::{check_heights, TestStep}, + INITIAL_BALANCE, + }; use dpp::data_contract::{ associated_token::{ token_configuration::accessors::v0::TokenConfigurationV0Getters, @@ -603,42 +649,56 @@ mod block_based_perpetual_random { }, TokenConfiguration, }; + use test_case::test_matrix; - /// Random distribution function with min=0, max=100. - #[test] - fn test_block_based_perpetual_random_0_100() { + /// , steps: &[TestStep]) { check_heights( - DistributionFunction::Random { min: 0, max: 100 }, - &[ - TestStep::new(41, 100_192, true), - TestStep::new(46, 100_192, false), - TestStep::new(50, 100_263, true), - TestStep::new(59, 100_263, false), - TestStep::new(60, 100_310, true), - ], + DistributionFunction::Random { min, max }, + steps, None, 10, + Some(max_supply), ) .expect("correct case 1"); } - /// Random distribution function with min=0, max=0 should only return the initial balance. + /// Given a random distribution function with min=0, max=0, + /// When I claim tokens at various heights, + /// Then claim fails and I get the same balance at those heights. #[test] fn test_block_based_perpetual_random_0_0() { check_heights( DistributionFunction::Random { min: 0, max: 0 }, &[ - TestStep::new(41, 100_000, true), - TestStep::new(50, 100_000, false), - TestStep::new(100, 100_000, false), + TestStep::new(41, INITIAL_BALANCE, false), + TestStep::new(50, INITIAL_BALANCE, false), + TestStep::new(100, INITIAL_BALANCE, false), ], None, 10, + None, ) .expect("no rewards"); } - /// Check if the random function is truly random by estimating its entropy. + /// Given a random distribution function with min=10, max=30, + /// When I claim tokens at various heights, + /// Then I get a distribution of balances that is close to the maximum entropy. #[test] fn test_block_based_perpetual_random_10_30_entropy() { const N: u64 = 200; @@ -655,16 +715,6 @@ mod block_based_perpetual_random { }) .collect(); - // we expect the average to be 200; we add 100_000 which is the initial balance - // let expected_balance = ((((MIN + MAX) as f64) / 2.0) * (N as f64)) as u64 + 100_000; - // tests.push(TestStep { - // name: "last test".to_string(), - // base_height: N - 1, - // base_time_ms: Default::default(), - // expected_balance: Some(expected_balance), - // claim_transition_assertions: Default::default(), - // }); - let balances = Arc::new(Mutex::new(Vec::new())); let balances_result = balances.clone(); @@ -696,7 +746,7 @@ mod block_based_perpetual_random { // substract balance from previous step (for first step, substract initial balance of 100_000) let diffs: Vec = data .iter() - .scan(100_000, |prev, &x| { + .scan(INITIAL_BALANCE, |prev, &x| { let diff = x - *prev; *prev = x; Some(diff) @@ -707,8 +757,8 @@ mod block_based_perpetual_random { let max_entropy: f64 = ((MAX - MIN) as f64).log2(); let entropy_diff = (max_entropy - entropy).abs() / max_entropy; - println!("Data: {:?}", diffs); - println!( + tracing::debug!("Data: {:?}", diffs); + tracing::info!( "Entropy: {}, max entropy: {}, difference: {}%", entropy, max_entropy, @@ -750,6 +800,7 @@ mod block_based_perpetual_step_decreasing { use rust_decimal::prelude::ToPrimitive; use test_case::test_case; use crate::execution::validation::state_transition::batch::tests::token::distribution::perpetual::block_based::test_suite::check_heights; + use crate::execution::validation::state_transition::batch::tests::token::distribution::perpetual::block_based::INITIAL_BALANCE; #[test_case( 1,// step_count @@ -771,7 +822,7 @@ mod block_based_perpetual_step_decreasing { Some(1),// min_value Some((1..1000).step_by(500).collect()),// claim_heights 1; // distribution_interval - "claim every 500 blocks" + "fail claim every 500 blocks" )] #[test_matrix( 1,// step_count @@ -793,7 +844,7 @@ mod block_based_perpetual_step_decreasing { Some(1),// min_value Some(vec![1,7]), // claim_heights 1; // distribution_interval - "1000x increase, overflow" + "fail 1000x increase, overflow" )] #[test_case( 1,// step_count @@ -817,6 +868,17 @@ mod block_based_perpetual_step_decreasing { 1; // distribution_interval "no decrease, irrelevant min values" )] + #[test_matrix( + [1],// step_count + 1,// decrease_per_interval_numerator + 1,// decrease_per_interval_denominator + None,// s + 100_000,// n + None,// min_value + Some(vec![7,10,20,100]), // claim_heights // ,300,500,800,1_000,1_000_000 + [5]; // distribution_interval + "no decrease, changing step" + )] #[test_matrix( [5,10],// step_count 1,// decrease_per_interval_numerator @@ -824,7 +886,7 @@ mod block_based_perpetual_step_decreasing { None,// s 100_000,// n None,// min_value - Some(vec![5,10,100]), // claim_heights // ,300,500,800,1_000,1_000_000 + Some(vec![5,10,20,100]), // claim_heights // ,300,500,800,1_000,1_000_000 [1,5]; // distribution_interval "1/2 decrease, changing step" )] @@ -866,14 +928,14 @@ mod block_based_perpetual_step_decreasing { .iter() .map(|&h| { // initial balance, defined in contract js - let mut expected_balance: i128 = 100_000; + let mut expected_balance: i128 = INITIAL_BALANCE as i128; // loop over blocks, starting with S, with step PERPETUAL_DISTRIBUTION_INTERVAL for i in (1..=h).step_by(distribution_interval as usize) { expected_balance += expected_emission(i, &dist); } - println!("expected balance at height {}: {}", h, expected_balance); + tracing::debug!("expected balance at height {}: {}", h, expected_balance); expected_balance.to_u64().unwrap_or_else(|| { - println!("ERR: overflow in expected balance at height {}", h); + tracing::error!("overflow in expected balance at height {}", h); 0 }) // to handle tests that overflow }) @@ -893,9 +955,10 @@ mod block_based_perpetual_step_decreasing { &claims, None, //Some(S), distribution_interval, + None, ) .inspect_err(|e| { - println!("{}", e); + tracing::error!("{}", e); }) } @@ -946,7 +1009,7 @@ mod block_based_perpetual_step_decreasing { let f_x = n as f64 * a.powi(b.to_i32().expect("overflow")); f_x.to_i128() .unwrap_or_else(|| { - println!("ERR: overflow in expected_emission({})", f_x); + tracing::error!("overflow in expected_emission({})", f_x); 0 }) .max(min_value) @@ -973,7 +1036,7 @@ mod block_based_perpetual_stepwise { let distribution_interval = 10; // claims: height, balance, expect_pass - let claims = [ + let steps = [ (10, 110_000, true), (11, 110_000, false), (20, 120_000, true), @@ -994,81 +1057,103 @@ mod block_based_perpetual_stepwise { check_heights( dist, - &claims, + &steps, None, //Some(S), distribution_interval, + None, ) .inspect_err(|e| { - println!("{}", e); + tracing::error!("{}", e); }) .expect("stepwise should pass"); } +} - // ===== HELPER FUNCTIONS ===== // +mod block_based_perpetual_linear { + use super::{test_suite::check_heights, INITIAL_BALANCE}; + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; + use rust_decimal::prelude::ToPrimitive; + use test_case::test_case; - #[test] - fn stepwise_u64_max() { - let periods = BTreeMap::from([(0, u64::MAX)]); - let dist = DistributionFunction::Stepwise(periods); + #[test_case(DistributionFunction::Linear{ + a: 1, + d: 1, + start_step: None, + starting_amount: 100_000, + min_value: None, + max_value: None + }, + &[10 ], + 1 + ; "x=y")] + + fn test_linear(dist: DistributionFunction, heights: &[u64], distribution_interval: u64) { + // Linear distribution function + // + // # Formula + // The formula for the linear distribution function is: + + // ```text + // f(x) = (a * (x - start_moment) / d) + starting_amount + // ``` + // + let steps = heights + .iter() + .scan((INITIAL_BALANCE, 1), |(balance, last_height), &h| { + for i in (*last_height..=h).step_by(distribution_interval as usize) { + *balance += expected_emission(i, 1, 1, None, 100_000); + } + *last_height = h; + Some((h, *balance, true)) + }) + .collect::>(); check_heights( dist, - &[(100, 0, false)], + &steps, None, //Some(S), - 10, + distribution_interval, + None, ) .inspect_err(|e| { - println!("{}", e); + tracing::error!("{}", e); }) .expect("stepwise should pass"); } - #[test] - /// We check what happens if we start distribution before the first period. - fn stepwise_before_first_period() { - let periods = BTreeMap::from([(100, 10_000)]); - let dist = DistributionFunction::Stepwise(periods); - - // claims: height, balance, expect_pass - let claims = [ - (1, 100_000, true), // IMO we should be able to claim first 100_000 here so expect_pass == true - (9, 100_000, false), // TODO: claim should succeed here? To transfer this 100k? - // (10, 0, false), - // (11, 0, false), - // (20, 0, false), - // (99, 0, false), - (100, 100_000, false), - (101, 110_000, true), - (102, 110_000, false), - (111, 120_000, true), - (200, 200_000, true), - (209, 200_000, false), - ]; - check_heights(dist, &claims, None, 10) - .inspect_err(|e| { - println!("{}", e); - }) - .expect("stepwise should pass"); - } + /// Calculate expected emission at provided height. + /// + /// All calculations are done in i128 to better handle overflows. + /// + /// ```text + /// f(x) = (a * (x - start_moment) / d) + starting_amount + /// ``` + fn expected_emission( + x: u64, + a: u64, + d: u64, + start_moment: Option, + starting_amount: u64, + ) -> u64 { + let x = x as i128; + let a = a as i128; + let d = d as i128; + let start_moment = start_moment.unwrap_or(0) as i128; + let starting_amount = starting_amount as i128; - #[test] - /// This test will overflow within 6 distributions - fn stepwise_overflow() { - let periods = BTreeMap::from([(10, u64::MAX / 5)]); - let dist = DistributionFunction::Stepwise(periods); + let f_x = if x < start_moment { + starting_amount + } else { + (a * (x - start_moment) / d + starting_amount).max(0) + }; - check_heights( - dist, - &[(10, 100_000, false), (11, 100_000, false)], - None, //Some(S), - 10, - ) - .inspect_err(|e| { - println!("{}", e); + f_x.to_u64().unwrap_or_else(|| { + tracing::error!("overflow in expected_emission({}), using 0", f_x); + 0 }) - .expect("stepwise should pass"); } } + mod test_suite { use super::*; use crate::rpc::core::MockCoreRPCLike; @@ -1100,7 +1185,7 @@ mod test_suite { let worker = rt.spawn_blocking(f); rt.block_on(async move { tokio::time::timeout(duration, worker).await }) - .map_err(|e| format!("test timed out after {:?}", TIMEOUT))? + .map_err(|_| format!("test timed out after {:?}", TIMEOUT))? .map_err(|e| format!("join error: {:?}", e))? } @@ -1114,6 +1199,7 @@ mod test_suite { /// * `claims` - heights at which claims will be made; they will see balance from previous height /// * `contract_start_time` - optional start time of the contract /// * `distribution_interval` - interval between distributions + /// * `max_supply` - optional max supply of the token; if Some(), it will override max supply in contract JSON definition /// /// Note that for conveniance, you can provide `steps` as a [`TestStep`] or a slice of tuples, where each tuple contains: /// * `height` - height at which claim will be made @@ -1125,6 +1211,7 @@ mod test_suite { steps: &[C], contract_start_time: Option, distribution_interval: u64, + max_supply: Option>, ) -> Result<(), String> { let mut suite = TestSuite::new( 10_200_000_000, @@ -1144,6 +1231,10 @@ mod test_suite { ))); }), ); + if let Some(max_supply) = max_supply { + suite = suite.with_max_suppy(max_supply); + } + if let Some(start) = contract_start_time { suite = suite.with_contract_start_time(start); } @@ -1156,8 +1247,9 @@ mod test_suite { with_timeout(TIMEOUT, move || suite.execute(&steps)) } + pub(super) type TokenConfigFn = dyn FnOnce(&mut TokenConfiguration) + Send + Sync; /// Test engine to run tests for different token distribution functions. - pub(crate) struct TestSuite { + pub(crate) struct TestSuite { platform: TempPlatform, platform_version: &'static PlatformVersion, identity: dpp::prelude::Identity, @@ -1167,7 +1259,7 @@ mod test_suite { contract: Option, start_time: Option, token_distribution_type: TokenDistributionType, - token_configuration_modification: Option, + token_configuration_modification: Option>, epoch_index: u16, nonce: u64, time_between_blocks: u64, @@ -1180,10 +1272,10 @@ mod test_suite { on_step_success: Box, } - impl TestSuite { + impl TestSuite { /// Create new test suite that will start at provided genesis time and create token contract with provided /// configuration. - pub(crate) fn new( + pub(crate) fn new( genesis_time_ms: u64, time_between_blocks: u64, token_distribution_type: TokenDistributionType, @@ -1195,12 +1287,14 @@ mod test_suite { .build_with_mock_rpc() .set_genesis_state(); + Self::setup_logs(); + let mut rng = StdRng::seed_from_u64(49853); let (identity, signer, identity_public_key) = setup_identity(&mut platform, rng.gen(), dash_to_credits!(0.5)); - Self { + let me = Self { platform, platform_version, identity, @@ -1213,10 +1307,61 @@ mod test_suite { epoch_index: 1, nonce: 1, time_between_blocks, - token_configuration_modification, + token_configuration_modification: None, // setup later on_step_success: Box::new(|_| {}), } - .with_genesis(1, genesis_time_ms) + .with_genesis(1, genesis_time_ms); + + if let Some(token_configuration_modification) = token_configuration_modification { + me.with_token_configuration_modification_fn(token_configuration_modification) + } else { + me + } + } + + /// Appends new token configuration modification function after existing ones. + pub(crate) fn with_token_configuration_modification_fn( + mut self, + token_configuration_modification: impl FnOnce(&mut TokenConfiguration) + + Send + + Sync + + 'static, + ) -> Self { + if let Some(previous) = self.token_configuration_modification.take() { + let f = Box::new(move |token_configuration: &mut TokenConfiguration| { + previous(token_configuration); + token_configuration_modification(token_configuration); + }); + + self.token_configuration_modification = Some(f); + } else { + // no previous modifications + let f = Box::new(token_configuration_modification); + self.token_configuration_modification = Some(f); + }; + + self + } + /// Appends a token configuration modification that will change max supply. + pub(crate) fn with_max_suppy(self, max_supply: Option) -> Self { + self.with_token_configuration_modification_fn( + move |token_configuration: &mut TokenConfiguration| { + token_configuration.set_max_supply(max_supply); + }, + ) + } + + /// Enable logging for tests + fn setup_logs() { + tracing_subscriber::fmt::fmt() + .with_env_filter(tracing_subscriber::EnvFilter::new( + "info,dash_sdk=trace,dash_sdk::platform::fetch=debug,drive_proof_verifier=debug,main=debug,h2=info,drive_abci::execution=trace", + )) + .pretty() + .with_ansi(true) + .with_writer(std::io::stdout) + .try_init() + .ok(); } /// Lazily initialize and return token contract. Also sets token id. @@ -1452,25 +1597,32 @@ mod test_suite { self.epoch_index, false, ); - self.claim(test_case.claim_transition_assertions.clone()) - .map_err(|e| format!("claim failed: {}", e))?; + let mut result = Vec::new(); + if let Err(e) = self.claim(test_case.claim_transition_assertions.clone()) { + result.push(format!("claim failed: {}", e)) + } let balance = self .get_balance() .map_err(|e| format!("failed to get balance: {}", e))? .ok_or("expected balance to be present, but got None".to_string())?; - if let Some(expected_balance) = test_case.expected_balance { - if expected_balance != balance { - return Err(format!( - "expected balance {:?} but got {:?}", - test_case.expected_balance, balance - )); - } - }; - (self.on_step_success)(balance); + if test_case + .expected_balance + .is_some_and(|expected_balance| expected_balance != balance) + { + result.push(format!( + "expected balance {:?} but got {:?}", + test_case.expected_balance, balance + )); + } - Ok(()) + if result.is_empty() { + (self.on_step_success)(balance); + Ok(()) + } else { + Err(result.join("\n")) + } } } @@ -1498,24 +1650,45 @@ mod test_suite { /// /// If expected_balance is None, we don't check the balance. pub(super) fn new(claim_height: u64, expected_balance: u64, expect_success: bool) -> Self { + let trace_assertion: AssertionFn = |processing_results: &[_]| { + tracing::trace!( + "transaction assertion check for processing results: {:?}", + processing_results + ); + Ok(()) + }; let assertions: Vec = if expect_success { - vec![|processing_results: &[_]| match processing_results { - [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => Ok(()), - _ => Err(format!( - "expected SuccessfulExecution, got {:?}", - processing_results - )), - }] + vec![ + |processing_results: &[_]| { + tracing::trace!(?processing_results, "expect success"); + Ok(()) + }, + |processing_results: &[_]| match processing_results { + [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => Ok(()), + _ => Err(format!( + "expected SuccessfulExecution, got {:?}", + processing_results + )), + }, + trace_assertion, + ] } else { - vec![|processing_results: &[_]| match processing_results { - [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => { - Err("expected error, got SuccessfulExecution".into()) - } - [StateTransitionExecutionResult::InternalError(e)] => { - Err(format!("expected normal error, got InternalError: {}", e)) - } - _ => Ok(()), - }] + vec![ + |processing_results: &[_]| { + tracing::trace!(?processing_results, "expect failure"); + Ok(()) + }, + |processing_results: &[_]| match processing_results { + [StateTransitionExecutionResult::SuccessfulExecution(_, _)] => { + Err("expected error, got SuccessfulExecution".into()) + } + [StateTransitionExecutionResult::InternalError(e)] => { + Err(format!("expected normal error, got InternalError: {}", e)) + } + _ => Ok(()), + }, + trace_assertion, + ] }; Self { name: format!("claim at height {}", claim_height), From a9cbf97ed7b8df08c5f5e97e524d8a6b0281bbd5 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Wed, 26 Mar 2025 16:33:28 +0100 Subject: [PATCH 13/21] chore: wip --- .../distribution/perpetual/block_based.rs | 309 ++++++++++++------ 1 file changed, 217 insertions(+), 92 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index da6f8e62a5f..6e60d3b6e34 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -656,8 +656,8 @@ mod block_based_perpetual_random { /// Then I get deterministic balances at those heights. #[test_matrix( 0, //min - 100,//max, - [None,Some(1_000_000)], // max_supply + 100,//max, + [None,Some(1_000_000)], // max_supply &[ TestStep::new(41, 100_192, true), TestStep::new(46, 100_192, false), @@ -695,6 +695,24 @@ mod block_based_perpetual_random { ) .expect("no rewards"); } + #[test] + fn fails_test_block_based_perpetual_random_0_max() { + check_heights( + DistributionFunction::Random { + min: 0, + max: u64::MAX, + }, + &[ + TestStep::new(41, INITIAL_BALANCE, false), + TestStep::new(50, INITIAL_BALANCE, false), + TestStep::new(100, INITIAL_BALANCE, false), + ], + None, + 10, + None, + ) + .expect("no rewards"); + } /// Given a random distribution function with min=10, max=30, /// When I claim tokens at various heights, @@ -822,18 +840,29 @@ mod block_based_perpetual_step_decreasing { Some(1),// min_value Some((1..1000).step_by(500).collect()),// claim_heights 1; // distribution_interval - "fail claim every 500 blocks" + "fails: claim every 500 blocks" )] - #[test_matrix( + #[test_case( + 1,// step_count + 101,// decrease_per_interval_numerator + 100,// decrease_per_interval_denominator + None,// s + 100_000,// n + Some(1),// min_value + Some((1..1000).step_by(100).collect()),// claim_heights + 1; // distribution_interval + "1% increase, claim every 100 blocks" + )] + #[test_case( 1,// step_count 101,// decrease_per_interval_numerator 100,// decrease_per_interval_denominator None,// s 100_000,// n Some(1),// min_value - [Some((1..1000).step_by(100).collect()),Some((1..1000).step_by(500).collect())],// claim_heights + Some((1..1000).step_by(500).collect()),// claim_heights 1; // distribution_interval - "1% increase, varying claim heights" + "fails: 1% increase, claim every 500 blocks" )] #[test_case( 1,// step_count @@ -844,19 +873,30 @@ mod block_based_perpetual_step_decreasing { Some(1),// min_value Some(vec![1,7]), // claim_heights 1; // distribution_interval - "fail 1000x increase, overflow" + "fails: 1000x increase, overflow" )] - #[test_case( + #[test_matrix( 1,// step_count 1,// decrease_per_interval_numerator 1,// decrease_per_interval_denominator None,// s 100_000,// n - Some(1),// min_value + [Some(1), Some(100)],// min_value Some(vec![1,2,3,10,100]), // claim_heights // ,300,500,800,1_000,1_000_000 1; // distribution_interval "100% decrease, various min values" )] + #[test_case( + 1,// step_count + 1,// decrease_per_interval_numerator + 1,// decrease_per_interval_denominator + None,// s + 100_000,// n + Some(u64::MAX),// min_value + Some(vec![1,2,3,10,100]), // claim_heights // ,300,500,800,1_000,1_000_000 + 1; // distribution_interval + "fails: full decrease, min is u64::MAX" + )] #[test_matrix( 1,// step_count 0,// decrease_per_interval_numerator @@ -868,17 +908,43 @@ mod block_based_perpetual_step_decreasing { 1; // distribution_interval "no decrease, irrelevant min values" )] - #[test_matrix( - [1],// step_count + /// Given 100% decrease with step 10, when I claim below 10th block, then the claim is successful. + #[test_case( + 10,// step_count 1,// decrease_per_interval_numerator 1,// decrease_per_interval_denominator None,// s 100_000,// n None,// min_value - Some(vec![7,10,20,100]), // claim_heights // ,300,500,800,1_000,1_000_000 - [5]; // distribution_interval - "no decrease, changing step" + Some(vec![2,7,9]), // claim_heights // ,300,500,800,1_000,1_000_000 + 1; // distribution_interval + "full decrease, step 10 interval 1" + )] + /// Given 100% decrease with step 10 starting at 5, when I claim below 15th block, then the claim is successful. + #[test_case( + 10,// step_count + 1,// decrease_per_interval_numerator + 1,// decrease_per_interval_denominator + Some(5),// s + 100_000,// n + None,// min_value + Some(vec![2,7,9,13,14]), // claim_heights // ,300,500,800,1_000,1_000_000 + 1; // distribution_interval + "full decrease, start 5 step 10 interval 1" )] + /// Given 100% decrease with step 10 starting at 5, when I claim at height 15, there are no new coins. + #[test_case( + 10,// step_count + 1,// decrease_per_interval_numerator + 1,// decrease_per_interval_denominator + Some(5),// s + 100_000,// n + None,// min_value + Some(vec![14,15]), // claim_heights // at 14 we zero out, at 15 nothing to claim + 1 // distribution_interval + => with |x:Result<(),String>| assert!(x.is_err_and(|s|s.contains("claim at height 15: claim failed"))) + ;"full decrease, start 5 step 10 interval 1 err at 15" + )] #[test_matrix( [5,10],// step_count 1,// decrease_per_interval_numerator @@ -886,19 +952,19 @@ mod block_based_perpetual_step_decreasing { None,// s 100_000,// n None,// min_value - Some(vec![5,10,20,100]), // claim_heights // ,300,500,800,1_000,1_000_000 + Some(vec![5,10,18,22,100]), // claim_heights [1,5]; // distribution_interval - "1/2 decrease, changing step" + "fails: 1/2 decrease, changing step" )] #[test_matrix( - [1,10],// step_count - 1,// decrease_per_interval_numerator - 2,// decrease_per_interval_denominator + 1,// step_count + 10,// decrease_per_interval_numerator + 100,// decrease_per_interval_denominator [None,Some(1),Some(5)],// s 100_000,// n None,// min_value - Some(vec![5,10,100]), // claim_heights // ,300,500,800,1_000,1_000_000 - [1,5]; // distribution_interval + Some(vec![5,10,15,20]), // claim_heights // ,300,500,800,1_000,1_000_000 + 1; // distribution_interval "1/2 decrease, changing S" )] @@ -941,15 +1007,13 @@ mod block_based_perpetual_step_decreasing { }) .collect::>(); // we expect all tests to pass - let expect_pass = claim_heights.iter().map(|&_h| true).collect::>(); - let claims = claim_heights .iter() .zip(expected_balances.iter()) - .zip(expect_pass.iter()) - .map(|((&h, &b), &p)| (h, b, p)) + .map(|(&h, &b)| (h, b, true)) .collect::>(); + // we return Err(()) to make result comparision easier in test_case check_heights( dist, &claims, @@ -958,7 +1022,7 @@ mod block_based_perpetual_step_decreasing { None, ) .inspect_err(|e| { - tracing::error!("{}", e); + tracing::error!(e); }) } @@ -1023,7 +1087,7 @@ mod block_based_perpetual_stepwise { use std::collections::BTreeMap; #[test] - fn stepwise_correct() { + fn fails_stepwise_correct() { let periods = BTreeMap::from([ (0, 10_000), (20, 20_000), @@ -1037,16 +1101,20 @@ mod block_based_perpetual_stepwise { // claims: height, balance, expect_pass let steps = [ + (1, 100_000, false), + (9, 100_000, false), (10, 110_000, true), (11, 110_000, false), + (19, 110_000, false), (20, 120_000, true), + (21, 120_000, false), (24, 120_000, false), - (35, 140_000, true), + (35, 140_000, true), // since 20, we should get one more distribution of 20k at height 30 (39, 140_000, false), (46, 160_000, true), (49, 160_000, false), - (50, 180_000, true), - (51, 180_000, false), + (51, 180_000, true), + (52, 180_000, false), (70, 270_000, true), ( 1_000_000, @@ -1070,24 +1138,93 @@ mod block_based_perpetual_stepwise { } mod block_based_perpetual_linear { + use std::i64; + use super::{test_suite::check_heights, INITIAL_BALANCE}; use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; use rust_decimal::prelude::ToPrimitive; - use test_case::test_case; + use test_case::{test_case, test_matrix}; - #[test_case(DistributionFunction::Linear{ - a: 1, - d: 1, - start_step: None, - starting_amount: 100_000, - min_value: None, - max_value: None - }, - &[10 ], - 1 - ; "x=y")] - - fn test_linear(dist: DistributionFunction, heights: &[u64], distribution_interval: u64) { + #[test_matrix( + 1,// a + 1, // d + [None,Some(0)], // start_step + 0, // starting_amount + [None,Some(0),Some(1)],// min_value + [None,Some(1000)],// max_value + &[(1,100_001,true),(2,100_003,true),(3,100_006,true),(10,100_055,true)], // heights + 1 // distribution_interval + ; "f(x)=x")] + + /// Given linear distribution with d=0, + /// When I create a token, + /// Then I get an error. + #[test_case( + 1,// a + 0, // d + None, // start_step + 100_000, // starting_amount + None, // min_value + None, // max_value + &[(10,100_000,false)], // heights + 1 // distribution_interval + ; "fails: divide by 0")] + /// Given linear distribution with d=MAX and starting amount of 1, + /// When I claim tokens, + /// Then I have only one success, and subsequent claims fail because the calculated distribution is lower than 1 + #[test_case( + 1,// a + u64::MAX, // d + None, // start_step + 0, // starting_amount + Some(0), // min_value + None, // max_value + &[(1,100_000,false),(20,100_000,false)], // heights + 1 // distribution_interval + ; "divide by u64::MAX")] + #[test_matrix( + [-1,-100000,i64::MIN],// a + 1, // d + None, // start_step + 0, // starting_amount + None, // min_value + None, // max_value + &[(1,100_000,false),(20,100_000,false)], // heights + 1 // distribution_interval + ; "negative a")] + + /// We expect failure when max < min + #[test_matrix( + 1,// a + 1, // d + None, // start_step + 0, // starting_amount + Some(100), // min_value + [Some(0),Some(99)], // max_value + &[(1,100_000,false),(20,100_000,false)], // heights + 1 // distribution_interval + ; "fails: max less than min")] + #[test_case( + 1,// a + 1, // d + None, // start_step + 0, // starting_amount + Some(10), // min_value + Some(10), // max_value + &[(1,100_010,true),(2,100_020,true),(10,100_100,true)], // heights + 1 // distribution_interval + ; "min eq max")] + + fn test_linear( + a: i64, + d: u64, + start_step: Option, + starting_amount: u64, + min_value: Option, + max_value: Option, + steps: &[(u64, u64, bool)], // height, expected balance, expect pass + distribution_interval: u64, + ) { // Linear distribution function // // # Formula @@ -1097,17 +1234,27 @@ mod block_based_perpetual_linear { // f(x) = (a * (x - start_moment) / d) + starting_amount // ``` // - let steps = heights - .iter() - .scan((INITIAL_BALANCE, 1), |(balance, last_height), &h| { - for i in (*last_height..=h).step_by(distribution_interval as usize) { - *balance += expected_emission(i, 1, 1, None, 100_000); - } - *last_height = h; - - Some((h, *balance, true)) - }) - .collect::>(); + let dist = DistributionFunction::Linear { + a, + d, + start_step, + starting_amount, + min_value, + max_value, + }; + // let steps = heights + // .iter() + // .scan((INITIAL_BALANCE, 1), |(balance, last_height), &h| { + // if *last_height > start_step.unwrap_or(1) { + // for i in (*last_height..=h).step_by(distribution_interval as usize) { + // *balance += expected_emission(i, a, d, start_step, starting_amount); + // } + // } + // *last_height = h; + + // Some((h, *balance, true)) + // }) + // .collect::>(); check_heights( dist, &steps, @@ -1120,38 +1267,6 @@ mod block_based_perpetual_linear { }) .expect("stepwise should pass"); } - - /// Calculate expected emission at provided height. - /// - /// All calculations are done in i128 to better handle overflows. - /// - /// ```text - /// f(x) = (a * (x - start_moment) / d) + starting_amount - /// ``` - fn expected_emission( - x: u64, - a: u64, - d: u64, - start_moment: Option, - starting_amount: u64, - ) -> u64 { - let x = x as i128; - let a = a as i128; - let d = d as i128; - let start_moment = start_moment.unwrap_or(0) as i128; - let starting_amount = starting_amount as i128; - - let f_x = if x < start_moment { - starting_amount - } else { - (a * (x - start_moment) / d + starting_amount).max(0) - }; - - f_x.to_u64().unwrap_or_else(|| { - tracing::error!("overflow in expected_emission({}), using 0", f_x); - 0 - }) - } } mod test_suite { @@ -1189,6 +1304,9 @@ mod test_suite { .map_err(|e| format!("join error: {:?}", e))? } + pub(super) fn contains(a: T, text: &str) -> bool { + a.to_string().contains(text) + } /// Check that claim results at provided heights are as expected, and that balances match expectations. /// /// Note we take i128 into expected_balances, as we want to be able to detect overflows. @@ -1235,9 +1353,7 @@ mod test_suite { suite = suite.with_max_suppy(max_supply); } - if let Some(start) = contract_start_time { - suite = suite.with_contract_start_time(start); - } + suite = suite.with_contract_start_time(contract_start_time.unwrap_or(1)); let steps = steps .iter() @@ -1482,7 +1598,8 @@ mod test_suite { pub(crate) fn get_balance(&mut self) -> Result, String> { let token_id = self.get_token_id().to_buffer(); - self.platform + let balance = self + .platform .drive .fetch_identity_token_balance( token_id, @@ -1490,7 +1607,10 @@ mod test_suite { None, self.platform_version, ) - .map_err(|e| format!("failed to fetch token balance: {}", e)) + .map_err(|e| format!("failed to fetch token balance: {}", e)); + + tracing::trace!("retrieved balance: {:?}", balance); + balance } /// Retrieve token balance for the identity and assert it matches expected value. @@ -1618,6 +1738,11 @@ mod test_suite { } if result.is_empty() { + tracing::trace!( + "step successful, base height: {}, balance: {}", + test_case.base_height, + balance + ); (self.on_step_success)(balance); Ok(()) } else { From 3adc24c994cfe6514d8dee03cb99ad3763b65b88 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Thu, 27 Mar 2025 14:02:20 +0100 Subject: [PATCH 14/21] chore(Dockerfile): snapshots_enabled = true --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 47e7e5a77fe..a688730baaf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -598,6 +598,7 @@ RUN apk add --no-cache libgcc libstdc++ ENV DB_PATH=/var/lib/dash/rs-drive-abci/db ENV REJECTIONS_PATH=/var/log/dash/rejected +ENV SNAPSHOTS_ENABLED=true RUN mkdir -p /var/log/dash \ /var/lib/dash/rs-drive-abci/db \ From b84b54b3a286808b88ce0b6d003cb5be65f5279d Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Thu, 27 Mar 2025 14:15:28 +0100 Subject: [PATCH 15/21] Revert "chore(Dockerfile): snapshots_enabled = true" This reverts commit c8502a370fb2b1d9ded0d451c0dc7fa84037d65a. --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index a688730baaf..47e7e5a77fe 100644 --- a/Dockerfile +++ b/Dockerfile @@ -598,7 +598,6 @@ RUN apk add --no-cache libgcc libstdc++ ENV DB_PATH=/var/lib/dash/rs-drive-abci/db ENV REJECTIONS_PATH=/var/log/dash/rejected -ENV SNAPSHOTS_ENABLED=true RUN mkdir -p /var/log/dash \ /var/lib/dash/rs-drive-abci/db \ From 1061b52fd293fdb01cb91f9283ff1f57876916ea Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Thu, 3 Apr 2025 16:49:00 +0200 Subject: [PATCH 16/21] test: Polynomial --- .../distribution/perpetual/block_based.rs | 389 ++++++++++++++++-- 1 file changed, 362 insertions(+), 27 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index 6e60d3b6e34..086ee9db356 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -88,7 +88,7 @@ mod perpetual_distribution_block { let processing_result = platform .platform .process_raw_state_transitions( - &vec![claim_serialized_transition.clone()], + &[claim_serialized_transition.clone()], &platform_state, &BlockInfo { time_ms: 10_200_100_000, @@ -156,7 +156,7 @@ mod perpetual_distribution_block { let processing_result = platform .platform .process_raw_state_transitions( - &vec![claim_serialized_transition.clone()], + &[claim_serialized_transition.clone()], &platform_state, &BlockInfo { time_ms: 10_200_100_000, @@ -226,7 +226,7 @@ mod perpetual_distribution_block { let processing_result = platform .platform .process_raw_state_transitions( - &vec![claim_serialized_transition.clone()], + &[claim_serialized_transition.clone()], &platform_state, &BlockInfo { time_ms: 10_200_100_000, @@ -337,7 +337,7 @@ mod perpetual_distribution_block { let processing_result = platform .platform .process_raw_state_transitions( - &vec![claim_serialized_transition.clone()], + &[claim_serialized_transition.clone()], &platform_state, &BlockInfo { time_ms: 10_200_100_000, @@ -459,7 +459,7 @@ mod perpetual_distribution_block { let processing_result = platform .platform .process_raw_state_transitions( - &vec![claim_serialized_transition.clone()], + &[claim_serialized_transition.clone()], &platform_state, &BlockInfo { time_ms: 10_200_100_000, @@ -1138,12 +1138,9 @@ mod block_based_perpetual_stepwise { } mod block_based_perpetual_linear { - use std::i64; - - use super::{test_suite::check_heights, INITIAL_BALANCE}; + use super::test_suite::check_heights; use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction; - use rust_decimal::prelude::ToPrimitive; - use test_case::{test_case, test_matrix}; + use test_case::test_matrix; #[test_matrix( 1,// a @@ -1242,22 +1239,10 @@ mod block_based_perpetual_linear { min_value, max_value, }; - // let steps = heights - // .iter() - // .scan((INITIAL_BALANCE, 1), |(balance, last_height), &h| { - // if *last_height > start_step.unwrap_or(1) { - // for i in (*last_height..=h).step_by(distribution_interval as usize) { - // *balance += expected_emission(i, a, d, start_step, starting_amount); - // } - // } - // *last_height = h; - - // Some((h, *balance, true)) - // }) - // .collect::>(); + check_heights( dist, - &steps, + steps, None, //Some(S), distribution_interval, None, @@ -1269,6 +1254,358 @@ mod block_based_perpetual_linear { } } +mod block_based_perpetual_polynomial { + use dpp::data_contract::{ + associated_token::{ + token_configuration::accessors::v0::TokenConfigurationV0Getters, + token_distribution_key::TokenDistributionType, + token_distribution_rules::accessors::v0::TokenDistributionRulesV0Setters, + token_perpetual_distribution::{ + distribution_function::DistributionFunction::{self, Polynomial}, + distribution_recipient::TokenDistributionRecipient, + reward_distribution_type::RewardDistributionType, + v0::TokenPerpetualDistributionV0, + TokenPerpetualDistribution, + }, + }, + TokenConfiguration, + }; + use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; + use super::test_suite::{check_heights, TestStep, TestSuite}; + + /// Calculates f(x) = (a * (x - s + o)^(m/n)) / d + b + + #[test_case::test_matrix([1,2,10,20])] + + fn test_fx(x_max: i128) { + let a: i128 = 1; + let d: i128 = 1; + let m: i128 = 1; + let n: i128 = 1; + let o: i128 = 1; + let s: i128 = 0; + let b: i128 = 100_000; + + let mut sum = 0; + for x in 1i128..=x_max { + // f(x) = (a * (x - s + o)^(m/n)) / d + b + let f_x = (a * (x - s + o).pow((m / n) as u32)) / d + b; + sum += f_x; + println!("f({}) = {}", x, f_x); + } + + println!("SUM({}) = {}", n, sum); + } + + #[test_case::test_case( + Polynomial { + a: 1, + d: 1, + m: 1, + n: 1, + o: 1, + start_moment: Some(1), + b: 100_000, + min_value: None, + max_value: None, + }, + &[ + (10,1_100_055,true), + (20,2_100_210,true), + ], // steps + 1; // distribution_interval + "ones")] + + /// Divide by 0 + /// claim at height 10: claim failed: assertion 1 failed: expected SuccessfulExecution, got + /// [InternalError(\"storage: protocol: divide by zero error: Polynomial function: divisor d is 0\")]\nexpected balance Some(1100055) but got 100000\n\n--> + #[test_case::test_case( + Polynomial { + a: 1, + d: 0, + m: 1, + n: 1, + o: 1, + start_moment: Some(1), + b: 100_000, + min_value: None, + max_value: None, + }, + &[ + (10,1_100_055,true), + (20,2_100_210,true), + ], // steps + 1; // distribution_interval + "fails: divide by 0")] + #[test_case::test_case( + Polynomial { + a: 1, + d: 1, + m: 1, + n: 1, + o: 1, + start_moment: Some(1), + b: 100_000, + min_value: Some(100_000), + max_value: Some(10_000), + }, + &[ + (10,100_000,false), + (20,100_000,false), + ], // steps + 1 // distribution_interval + ; "max < min should fail")] + #[test_case::test_case( + Polynomial { + a: -1, + d: 1, + m: 1, + n: 1, + o: 1, + start_moment: Some(1), + b: 100_000, + min_value: None, + max_value: None, + }, + &[ + (1,199_999,true), + (4,499_990,true), + ], // steps + 1 // distribution_interval + ; "negative a")] + + #[test_case::test_case( + Polynomial { + a: i64::MIN, + d: 1, + m: 1, + n: 1, + o: 1, + start_moment: Some(1), + b: 100_000, + min_value: None, + max_value: None, + }, + &[ + (1,100_000,false), + (4,100_000,true), + ], // steps + 1 // distribution_interval + ; "fails: a=i64::MIN")] + + #[test_case::test_case( + Polynomial { + a: -1, + d: 1, + m: 1, + n: 1, + o: 1, + start_moment: Some(1), + b: 0, + min_value: None, + max_value: None, + }, + &[ + (1,100_000,false), + (4,100_000,false), + ], // steps + 1 // distribution_interval + ; "a=-1 b=0")] + + #[test_case::test_case( + Polynomial { + a: 1, + d: 1, + m: 1, + n: 1, + o: i64::MIN, + start_moment: Some(1), + b: 0, + min_value: None, + max_value: None, + }, + &[ + (1,100_000,false), + (4,100_000,false), + ], // steps + 1 // distribution_interval + ; "o=i64::MIN")] + + #[test_case::test_case( + Polynomial { + a: 1, + d: 1, + m: 1, + n: 1, + o: i64::MAX, + start_moment: Some(1), + b: 0, + min_value: None, + max_value: None, + }, + &[ + (1,100_000,false), + (4,100_000,false), + ], // steps + 1 // distribution_interval + ; "o=i64::MAX")] + + #[test_case::test_case( + Polynomial { + a: 1, + d: 1, + m: -1, + n: 1, + o: 0, + start_moment: Some(1), + b: 0, + min_value: None, + max_value: None, + }, + &[ + (1,100_000,false), // this should fail, 0.pow(-1) is unspecified + (2,100_001,true), // it's 1.pow(-1) but not sure about handling of overflow at prev height + ], // steps + 1 // distribution_interval + ; "0.pow(-1) at h=1")] + + #[test_case::test_case( + Polynomial { + a: 1, + d: 1, + m: 1, + n: 2, + o: 0, + start_moment: Some(1), + b: 0, + min_value: None, + max_value: None, + }, + &[ + (1,100_000,false), // this should fail, 0.pow(-1) is unspecified + (2,100_001,true), // it's 1.pow(1/2) == 1 + (3,100_002,true), // 2.pow(1/2) == 1.41 - should round to 1 + (4,100_004,true), // 3.pow(1/2) == 1.73 - should round to 2; FAILS + (5,100_006,true), // 4.pow(1/2) == 2 + (6,100_008,true), // 5.pow(1/2) == 2.23 - should round to 2 + ], // steps + 1 // distribution_interval + ; "0.pow(1/2) at h=1")] + + #[test_case::test_case( + Polynomial { + a: 1, + d: 1, + m: 2, + n: 1, + o: i64::MAX, + start_moment: Some(1), + b: 0, + min_value: None, + max_value: None, + }, + &[ + (1,100_000,false), + (10,100_000,false), + ], // steps + 1 // distribution_interval + ; "fails: o=i64::MAX m=2")] + /// Test polynomial distribution function. + /// + /// `f(x) = (a * (x - s + o)^(m/n)) / d + b` + fn test_polynomial( + dist: DistributionFunction, + steps: &[(u64, u64, bool)], // height, expected balance, expect pass + distribution_interval: u64, + ) -> Result<(), String> { + check_heights( + dist, + steps, + None, //Some(S), + distribution_interval, + None, + ) + .inspect_err(|e| { + tracing::error!("{}", e); + }) + } + + #[test_case::test_matrix( + [i64::MIN,0,1,i64::MAX],// m + [0,1,u64::MAX] // n + ; "power m/n" + )] + // due to bug in test_matrix https://github.com/frondeus/test-case/issues/19, we need separate test for -1 + #[test_case::test_matrix( + -1,// m + [0,1,u64::MAX] // n + ; "negative power -1/n" + )] + /// Test various combinations of `m/n` in [DistributionFunction::Polynomial] distribution. + /// + /// We expect this test not to end with InternalError. + fn test_poynomial_power(m: i64, n: u64) { + let dist = Polynomial { + a: 1, + d: 1, + m, + n, + o: 1, + start_moment: Some(1), + b: 100_000, + min_value: None, + max_value: None, + }; + + let mut suite = TestSuite::new( + 10_200_000_000, + 0, + TokenDistributionType::Perpetual, + Some(move |token_configuration: &mut TokenConfiguration| { + token_configuration + .distribution_rules_mut() + .set_perpetual_distribution(Some(TokenPerpetualDistribution::V0( + TokenPerpetualDistributionV0 { + distribution_type: RewardDistributionType::BlockBasedDistribution { + interval: 1, + function: dist, + }, + distribution_recipient: TokenDistributionRecipient::ContractOwner, + }, + ))); + }), + ); + + suite = suite.with_contract_start_time(1); + + let step = TestStep { + base_height: 10, + base_time_ms: Default::default(), + expected_balance: None, + claim_transition_assertions: vec![ + |results: &[StateTransitionExecutionResult]| -> Result<(), String> { + let err = results + .iter() + .find(|r| format!("{:?}", r).contains("InternalError")); + + if let Some(e) = err { + Err(format!("InternalError: {:?}", e)) + } else { + Ok(()) + } + }, + ], + name: "test".to_string(), + }; + + suite + .execute(&[step]) + .inspect_err(|e| { + tracing::error!("{}", e); + }) + .expect("test should pass"); + } +} mod test_suite { use super::*; use crate::rpc::core::MockCoreRPCLike; @@ -1304,9 +1641,7 @@ mod test_suite { .map_err(|e| format!("join error: {:?}", e))? } - pub(super) fn contains(a: T, text: &str) -> bool { - a.to_string().contains(text) - } + /// Check that claim results at provided heights are as expected, and that balances match expectations. /// /// Note we take i128 into expected_balances, as we want to be able to detect overflows. From fe48a9e584cc04624295be1cf43735f258634f0c Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Fri, 4 Apr 2025 12:41:46 +0200 Subject: [PATCH 17/21] test: Logarithmic --- .../distribution/perpetual/block_based.rs | 278 ++++++++++++++++-- 1 file changed, 258 insertions(+), 20 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index 086ee9db356..daa1e733f5a 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -952,7 +952,7 @@ mod block_based_perpetual_step_decreasing { None,// s 100_000,// n None,// min_value - Some(vec![5,10,18,22,100]), // claim_heights + Some(vec![5,10,18,22,100]), // claim_heights [1,5]; // distribution_interval "fails: 1/2 decrease, changing step" )] @@ -1255,6 +1255,8 @@ mod block_based_perpetual_linear { } mod block_based_perpetual_polynomial { + use super::test_suite::{check_heights, TestStep, TestSuite}; + use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; use dpp::data_contract::{ associated_token::{ token_configuration::accessors::v0::TokenConfigurationV0Getters, @@ -1270,8 +1272,6 @@ mod block_based_perpetual_polynomial { }, TokenConfiguration, }; - use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; - use super::test_suite::{check_heights, TestStep, TestSuite}; /// Calculates f(x) = (a * (x - s + o)^(m/n)) / d + b @@ -1367,13 +1367,12 @@ mod block_based_perpetual_polynomial { min_value: None, max_value: None, }, - &[ + &[ (1,199_999,true), (4,499_990,true), ], // steps 1 // distribution_interval ; "negative a")] - #[test_case::test_case( Polynomial { a: i64::MIN, @@ -1392,7 +1391,6 @@ mod block_based_perpetual_polynomial { ], // steps 1 // distribution_interval ; "fails: a=i64::MIN")] - #[test_case::test_case( Polynomial { a: -1, @@ -1405,13 +1403,12 @@ mod block_based_perpetual_polynomial { min_value: None, max_value: None, }, - &[ + &[ (1,100_000,false), (4,100_000,false), ], // steps 1 // distribution_interval ; "a=-1 b=0")] - #[test_case::test_case( Polynomial { a: 1, @@ -1424,13 +1421,12 @@ mod block_based_perpetual_polynomial { min_value: None, max_value: None, }, - &[ + &[ (1,100_000,false), (4,100_000,false), ], // steps 1 // distribution_interval ; "o=i64::MIN")] - #[test_case::test_case( Polynomial { a: 1, @@ -1443,13 +1439,12 @@ mod block_based_perpetual_polynomial { min_value: None, max_value: None, }, - &[ + &[ (1,100_000,false), (4,100_000,false), ], // steps 1 // distribution_interval ; "o=i64::MAX")] - #[test_case::test_case( Polynomial { a: 1, @@ -1462,13 +1457,12 @@ mod block_based_perpetual_polynomial { min_value: None, max_value: None, }, - &[ + &[ (1,100_000,false), // this should fail, 0.pow(-1) is unspecified (2,100_001,true), // it's 1.pow(-1) but not sure about handling of overflow at prev height ], // steps 1 // distribution_interval - ; "0.pow(-1) at h=1")] - + ; "0.pow(-1) at h=1")] #[test_case::test_case( Polynomial { a: 1, @@ -1481,7 +1475,7 @@ mod block_based_perpetual_polynomial { min_value: None, max_value: None, }, - &[ + &[ (1,100_000,false), // this should fail, 0.pow(-1) is unspecified (2,100_001,true), // it's 1.pow(1/2) == 1 (3,100_002,true), // 2.pow(1/2) == 1.41 - should round to 1 @@ -1491,7 +1485,6 @@ mod block_based_perpetual_polynomial { ], // steps 1 // distribution_interval ; "0.pow(1/2) at h=1")] - #[test_case::test_case( Polynomial { a: 1, @@ -1504,9 +1497,9 @@ mod block_based_perpetual_polynomial { min_value: None, max_value: None, }, - &[ + &[ (1,100_000,false), - (10,100_000,false), + (10,100_000,false), ], // steps 1 // distribution_interval ; "fails: o=i64::MAX m=2")] @@ -1606,6 +1599,252 @@ mod block_based_perpetual_polynomial { .expect("test should pass"); } } +mod block_based_perpetual_logarithmic { + + use super::test_suite::check_heights; + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction::{self,Logarithmic}; + use test_case::{test_matrix,test_case}; + + #[test_case( + Logarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_001,true), // log(0)+1 = 1 + (2,100_002,true), // log(1)+1 = 1 + (3,100_003,true), // log(3)+1 = 1 + (4,100_005,true), // log(4)+1 = 2 (log(4) == 0.6, rounded up to 1) + ], + 1 + ; "fails: ones - use of ln instead of log as documented" + )] + #[test_case( + Logarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 0, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[(2,100_002,false)], + 1 + ; "fails: divide by 0" + )] + #[test_case( + Logarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 0, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[(1,100_001,true),(5,100_001,true)], + 1 + ; "fails: log(0)" + )] + #[test_case( + Logarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:Some(10), // min_value: Option, + max_value:Some(10), // max_value: Option, + }, + &[(1,100_010,true),(5,100_050,true)], + 1 + ; "min eq max means linear" + )] + #[test_case( + Logarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:Some(10), // min_value: Option, + max_value:Some(10), // max_value: Option, + }, + &[(5,100_010,true),(10,100_020,true)], + 5 + ; "min eq max means linear, interval 5" + )] + #[test_case( + Logarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:Some(10), // min_value: Option, + max_value:Some(5), // max_value: Option, + }, + &[(5,100_000,false),(10,100_000,false)], + 5 + ; "fails: min gt max" + )] + #[test_case( + Logarithmic{ + a: i64::MIN, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), // f(1) should be < 0, is 1 + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "fails: a=i64::MIN" + )] + #[test_case( + Logarithmic{ + a: i64::MAX, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "fails: a=i64::MAX overflows" + )] + #[test_case( + Logarithmic{ + a: 0, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "a=0 b=0" + )] + #[test_case( + Logarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: -10, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "fails: log(negative)" + )] + #[test_case( + Logarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: i64::MIN, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "fails: o=i64::MIN" + )] + #[test_case( + Logarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: u64::MAX, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "fails: b=u64::MAX" + )] + /// f(x) = (a * log(m * (x - s + o) / n)) / d + b + fn test_logarithmic( + dist: DistributionFunction, + steps: &[(u64, u64, bool)], // height, expected balance, expect pass + distribution_interval: u64, + ) -> Result<(), String> { + check_heights( + dist, + steps, + None, //Some(S), + distribution_interval, + None, + ) + .inspect_err(|e| { + tracing::error!("{}", e); + }) + } +} + mod test_suite { use super::*; use crate::rpc::core::MockCoreRPCLike; @@ -1641,7 +1880,6 @@ mod test_suite { .map_err(|e| format!("join error: {:?}", e))? } - /// Check that claim results at provided heights are as expected, and that balances match expectations. /// /// Note we take i128 into expected_balances, as we want to be able to detect overflows. From 9045516980ff88b7aff714a422d143cb23917780 Mon Sep 17 00:00:00 2001 From: Lukasz Klimek <842586+lklimek@users.noreply.github.com> Date: Fri, 4 Apr 2025 13:41:26 +0200 Subject: [PATCH 18/21] test: InvertedLogarithmic --- .../distribution/perpetual/block_based.rs | 279 +++++++++++++++++- 1 file changed, 278 insertions(+), 1 deletion(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index daa1e733f5a..193a44921f1 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -1599,12 +1599,28 @@ mod block_based_perpetual_polynomial { .expect("test should pass"); } } + mod block_based_perpetual_logarithmic { use super::test_suite::check_heights; use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction::{self,Logarithmic}; use test_case::{test_matrix,test_case}; - + #[test_case( + Logarithmic{ + a: 0, // a: i64, + d: 0, // d: u64, + m: 0, // m: u64, + n: 0, // n: u64, + o: 0, // o: i64, + start_moment:Some(0), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[(4,100_000,true)], + 1 + ; "zeros" + )] #[test_case( Logarithmic{ a: 1, // a: i64, @@ -1845,6 +1861,267 @@ mod block_based_perpetual_logarithmic { } } +mod block_based_perpetual_inverted_logarithmic { + use super::test_suite::check_heights; + use dpp::data_contract::associated_token::token_perpetual_distribution::distribution_function::DistributionFunction::{self,InvertedLogarithmic}; + use test_case::{test_matrix,test_case}; + + #[test_case( + InvertedLogarithmic{ + a: 0, // a: i64, + d: 0, // d: u64, + m: 0, // m: u64, + n: 0, // n: u64, + o: 0, // o: i64, + start_moment:Some(0), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[(4,100_000,true)], + 1 + ; "fails: zeros" + )] + #[test_case( + InvertedLogarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_001,true), + (2,100_002,true), + (3,100_003,true), + (4,100_005,true), // [InternalError("storage: protocol: divide by zero error: InvertedLogarithmic: divisor d is 0")] + ], + 1 + ; "fails: ones" + )] + #[test_case( + InvertedLogarithmic{ + a: 1, // a: i64, + d: 0, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[(2,100_002,false)], + 1 + ; "fails: divide by 0" + )] + #[test_case( + InvertedLogarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 0, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[(1,100_001,true),(5,100_001,true)], + 1 + ; "n=0 log(0)" + )] + #[test_case( + InvertedLogarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:Some(10), // min_value: Option, + max_value:Some(10), // max_value: Option, + }, + &[(1,100_010,true),(5,100_050,true)], + 1 + ; "min eq max means linear" + )] + #[test_case( + InvertedLogarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:Some(10), // min_value: Option, + max_value:Some(10), // max_value: Option, + }, + &[(5,100_010,true),(10,100_020,true)], + 5 + ; "min eq max means linear, interval 5" + )] + #[test_case( + InvertedLogarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:Some(10), // min_value: Option, + max_value:Some(5), // max_value: Option, + }, + &[(5,100_000,false),(10,100_000,false)], + 5 + ; "fails: min gt max" + )] + #[test_case( + InvertedLogarithmic{ + a: i64::MIN, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), // f(1) should be < 0, is 1 + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "fails: a=i64::MIN" + )] + #[test_case( + InvertedLogarithmic{ + a: i64::MAX, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 1, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_001,true), // f(x) = 0 for x>1 + (9,100_001,false), + (10,100_001,false), + ], + 1 + ; "a=i64::MAX" + )] + #[test_case( + InvertedLogarithmic{ + a: 0, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "a=0 b=0" + )] + #[test_case( + InvertedLogarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: -10, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "fails: log(negative)" + )] + #[test_case( + InvertedLogarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: i64::MIN, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: 0, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "fails: o=i64::MIN" + )] + #[test_case( + InvertedLogarithmic{ + a: 1, // a: i64, + d: 1, // d: u64, + m: 1, // m: u64, + n: 1, // n: u64, + o: 1, // o: i64, + start_moment:Some(1), // start_moment: Option, + b: u64::MAX, // b: TokenAmount, + min_value:None, // min_value: Option, + max_value:None, // max_value: Option, + }, + &[ + (1,100_000,false), + (9,100_000,false), + (10,100_000,false) + ], + 1 + ; "fails: b=u64::MAX" + )] + /// f(x) = (a * log( n / (m * (x - s + o)) )) / d + b + fn test_inverted_logarithmic( + dist: DistributionFunction, + steps: &[(u64, u64, bool)], // height, expected balance, expect pass + distribution_interval: u64, + ) -> Result<(), String> { + check_heights( + dist, + steps, + None, //Some(S), + distribution_interval, + None, + ) + .inspect_err(|e| { + tracing::error!("{}", e); + }) + } +} + mod test_suite { use super::*; use crate::rpc::core::MockCoreRPCLike; From 2fe0e713ba63e250fdbf558a145e535916d9e482 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Thu, 10 Apr 2025 19:03:56 +0700 Subject: [PATCH 19/21] clean up --- .../identity_by_non_unique_public_key_hash/mod.rs | 5 ++--- .../v0/mod.rs | 2 +- packages/rs-drive/src/drive/identity/fetch/queries/mod.rs | 3 ++- .../drive_abci_versions/drive_abci_query_versions/mod.rs | 1 + .../drive_abci_versions/drive_abci_query_versions/v1.rs | 5 +++++ packages/rs-platform-version/src/version/mocks/v2_test.rs | 5 +++++ 6 files changed, 16 insertions(+), 5 deletions(-) diff --git a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs index 9a1c7c104d5..ff0596e1c37 100644 --- a/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs +++ b/packages/rs-drive-abci/src/query/identity_based_queries/identity_by_non_unique_public_key_hash/mod.rs @@ -27,13 +27,12 @@ impl Platform { ), )); }; - // TODO why `identity_by_unique_public_key_hash`? - // Shouldn't we rename or add new field like `identity_by_non_unique_public_key_hash`? + let feature_version_bounds = &platform_version .drive_abci .query .identity_based_queries - .identity_by_unique_public_key_hash; + .identity_by_non_unique_public_key_hash; let feature_version = match &version { RequestVersion::V0(_) => 0, diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs index 93c8d26c519..54aefbea144 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -61,7 +61,7 @@ mod tests { use dpp::identity::accessors::IdentityGettersV0; use dpp::identity::identity_public_key::accessors::v0::IdentityPublicKeyGettersV0; use dpp::identity::identity_public_key::methods::hash::IdentityPublicKeyHashMethodsV0; - use dpp::identity::{Identity, IdentityPublicKey, KeyType}; + use dpp::identity::{Identity, IdentityPublicKey}; use dpp::version::PlatformVersion; use rand::prelude::StdRng; use rand::SeedableRng; diff --git a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs index 846a260eee1..258ea68153d 100644 --- a/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/queries/mod.rs @@ -102,7 +102,8 @@ impl Drive { let sub_query = if let Some(after) = after { Query::new_single_query_item(QueryItem::RangeAfter(after.to_vec()..)) } else { - // TODO: why not limit 1? + // We do range full because this sub query can get multiple identities + // as they are non unique. Query::new_range_full() }; query.set_subquery(sub_query); diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs index ac3412fd85e..77fd77eab74 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/mod.rs @@ -53,6 +53,7 @@ pub struct DriveAbciQueryIdentityVersions { pub identities_balances: FeatureVersionBounds, pub balance_and_revision: FeatureVersionBounds, pub identity_by_unique_public_key_hash: FeatureVersionBounds, + pub identity_by_non_unique_public_key_hash: FeatureVersionBounds, } #[derive(Clone, Debug, Default)] diff --git a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs index 829c9a218d2..a4569052568 100644 --- a/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs +++ b/packages/rs-platform-version/src/version/drive_abci_versions/drive_abci_query_versions/v1.rs @@ -72,6 +72,11 @@ pub const DRIVE_ABCI_QUERY_VERSIONS_V1: DriveAbciQueryVersions = DriveAbciQueryV max_version: 0, default_current_version: 0, }, + identity_by_non_unique_public_key_hash: FeatureVersionBounds { + min_version: 0, + max_version: 0, + default_current_version: 0, + }, }, token_queries: DriveAbciQueryTokenVersions { identity_token_balances: FeatureVersionBounds { diff --git a/packages/rs-platform-version/src/version/mocks/v2_test.rs b/packages/rs-platform-version/src/version/mocks/v2_test.rs index f3e5c2c6624..f22fb96bf11 100644 --- a/packages/rs-platform-version/src/version/mocks/v2_test.rs +++ b/packages/rs-platform-version/src/version/mocks/v2_test.rs @@ -210,6 +210,11 @@ pub const TEST_PLATFORM_V2: PlatformVersion = PlatformVersion { max_version: 0, default_current_version: 0, }, + identity_by_non_unique_public_key_hash: FeatureVersionBounds { + min_version: 0, + max_version: 0, + default_current_version: 0, + }, }, token_queries: DriveAbciQueryTokenVersions { identity_token_balances: FeatureVersionBounds { From c5e1de2d7154dfe134869cb22c55bb1d9de91ea5 Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Fri, 18 Apr 2025 06:07:37 +0700 Subject: [PATCH 20/21] fmt --- .../distribution/perpetual/block_based.rs | 150 +++++++++--------- 1 file changed, 75 insertions(+), 75 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs index 9a656df291e..0a356690b5d 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/batch/tests/token/distribution/perpetual/block_based.rs @@ -77,7 +77,7 @@ mod perpetual_distribution_block { None, None, ) - .expect("expect to create documents batch transition"); + .expect("expect to create documents batch transition"); let claim_serialized_transition = claim_transition .serialize_to_bytes() @@ -145,7 +145,7 @@ mod perpetual_distribution_block { None, None, ) - .expect("expect to create documents batch transition"); + .expect("expect to create documents batch transition"); let claim_serialized_transition = claim_transition .serialize_to_bytes() @@ -215,7 +215,7 @@ mod perpetual_distribution_block { None, None, ) - .expect("expect to create documents batch transition"); + .expect("expect to create documents batch transition"); let claim_serialized_transition = claim_transition .serialize_to_bytes() @@ -326,7 +326,7 @@ mod perpetual_distribution_block { None, None, ) - .expect("expect to create documents batch transition"); + .expect("expect to create documents batch transition"); let claim_serialized_transition = claim_transition .serialize_to_bytes() @@ -448,7 +448,7 @@ mod perpetual_distribution_block { None, None, ) - .expect("expect to create documents batch transition"); + .expect("expect to create documents batch transition"); let claim_serialized_transition = claim_transition .serialize_to_bytes() @@ -546,7 +546,7 @@ mod fixed_amount { 10, None, ) - .expect("\n-> fixed amount should pass"); + .expect("\n-> fixed amount should pass"); } /// Test case for overflow error. @@ -582,7 +582,7 @@ mod fixed_amount { 10, None, ) - .expect("\n-> fixed amount should pass"); + .expect("\n-> fixed amount should pass"); } #[test] @@ -597,7 +597,7 @@ mod fixed_amount { 10, None, ) - .expect_err("\namount should not be 0\n"); + .expect_err("\namount should not be 0\n"); } #[test] @@ -625,7 +625,7 @@ mod fixed_amount { 10, Some(Some(200_000)), ) - .expect("\nfixed amount zero increase\n"); + .expect("\nfixed amount zero increase\n"); } /// Given a fixed amount distribution with value of u64::MAX, @@ -640,7 +640,7 @@ mod fixed_amount { 10, None, ) - .expect_err("u64::Max is too much for DistributionFunction::FixedAmount"); + .expect_err("u64::Max is too much for DistributionFunction::FixedAmount"); } /// Given a fixed amount distribution with value of u64::MAX, @@ -661,7 +661,7 @@ mod fixed_amount { 10, None, ) - .expect("MAX_DISTRIBUTION_PARAM should be valid DistributionFunction::FixedAmount"); + .expect("MAX_DISTRIBUTION_PARAM should be valid DistributionFunction::FixedAmount"); } } mod random { @@ -735,7 +735,7 @@ mod random { 10, None, ) - .expect("no rewards"); + .expect("no rewards"); } #[test] #[ignore] @@ -750,7 +750,7 @@ mod random { 10, None, ) - .expect_err("max is too much for DistributionFunction::Random"); + .expect_err("max is too much for DistributionFunction::Random"); } #[test] @@ -770,7 +770,7 @@ mod random { 10, None, ) - .expect("no rewards"); + .expect("no rewards"); } /// Given a random distribution function with min=10, max=30, @@ -814,9 +814,9 @@ mod random { ))); }), ) - .with_step_success_fn(move |balance: u64| { - balances.lock().unwrap().push(balance); - }); + .with_step_success_fn(move |balance: u64| { + balances.lock().unwrap().push(balance); + }); suite.execute(&tests).expect("should execute"); @@ -995,7 +995,7 @@ mod step_decreasing { INITIAL_BALANCE + 9_900 + 9_801 + 9_702 + 9_604, ], ) - .expect("expected to succeed"); + .expect("expected to succeed"); } #[test] @@ -1028,7 +1028,7 @@ mod step_decreasing { + 8_946, ], ) - .expect("expected to succeed"); + .expect("expected to succeed"); } #[test] @@ -1046,7 +1046,7 @@ mod step_decreasing { 1, vec![], ) - .expect_err("should not allow to increase"); + .expect_err("should not allow to increase"); assert!( result_str.contains("Invalid parameter tuple in token distribution function: `decrease_per_interval_numerator` must be smaller than `decrease_per_interval_denominator`"), "Unexpected panic message: {result_str}" @@ -1068,7 +1068,7 @@ mod step_decreasing { 1, vec![], ) - .expect_err("should not allow to increase"); + .expect_err("should not allow to increase"); assert!( result_str.contains("Invalid parameter `decrease_per_interval_numerator` in token distribution function. Expected range: 1 to 65535"), "Unexpected panic message: {result_str}" @@ -1091,7 +1091,7 @@ mod step_decreasing { 1, sum_till_for_100k_step_1_interval_1(steps), ) - .expect("should pass"); + .expect("should pass"); } #[test] @@ -1119,7 +1119,7 @@ mod step_decreasing { 1, expected_amounts, ) - .expect("should pass"); + .expect("should pass"); } #[test] @@ -1148,7 +1148,7 @@ mod step_decreasing { 1, expected_amounts, ) - .expect("should pass"); + .expect("should pass"); } #[test] @@ -1178,7 +1178,7 @@ mod step_decreasing { 1, expected_amounts, ) - .expect("should pass"); + .expect("should pass"); } #[test] @@ -1205,7 +1205,7 @@ mod step_decreasing { 1, expected_amounts, ) - .expect("should pass"); + .expect("should pass"); } #[test] @@ -1226,8 +1226,8 @@ mod step_decreasing { 1, expected_amounts.clone(), ) - .map_err(|e| format!("failed with min {}: {}", min, e)) - .expect("should pass"); + .map_err(|e| format!("failed with min {}: {}", min, e)) + .expect("should pass"); } } @@ -1255,8 +1255,8 @@ mod step_decreasing { 1, expected_amounts, ) - .map_err(|e| format!("failed with min {}: {}", min, e)) - .expect("should pass"); + .map_err(|e| format!("failed with min {}: {}", min, e)) + .expect("should pass"); } } @@ -1275,7 +1275,7 @@ mod step_decreasing { 1, vec![], ) - .expect_err("should fail"); + .expect_err("should fail"); assert!( result_str.contains("Invalid parameter tuple in token distribution function: `n` must be greater than or equal to `min_value`"), "Unexpected panic message: {result_str}" @@ -1300,7 +1300,7 @@ mod step_decreasing { MAX_DISTRIBUTION_PARAM * 10 + INITIAL_BALANCE, ], ) - .expect("should succeed"); + .expect("should succeed"); } #[test] @@ -1328,7 +1328,7 @@ mod step_decreasing { 1, expected_balances, ) - .expect("should succeed"); + .expect("should succeed"); } #[test] @@ -1346,7 +1346,7 @@ mod step_decreasing { 1, vec![], ) - .expect_err("should fail"); + .expect_err("should fail"); assert!( result_str.contains("Invalid parameter `n` in token distribution function. Expected range: 1 to 281474976710655"), "Unexpected panic message: {result_str}" @@ -1373,7 +1373,7 @@ mod step_decreasing { distribution_interval, expected_balances, ) - .expect("should pass"); + .expect("should pass"); } #[test] @@ -1396,7 +1396,7 @@ mod step_decreasing { distribution_interval, expected_balances, ) - .expect("should pass"); + .expect("should pass"); } #[test] @@ -1420,7 +1420,7 @@ mod step_decreasing { distribution_interval, expected_balances, ) - .expect("should pass"); + .expect("should pass"); } #[test] @@ -1458,7 +1458,7 @@ mod step_decreasing { distribution_interval, expected_balances, ) - .expect("should pass"); + .expect("should pass"); } /// Test various combinations of [DistributionFunction::StepDecreasingAmount] distribution. @@ -1513,9 +1513,9 @@ mod step_decreasing { distribution_interval, None, ) - .inspect_err(|e| { - tracing::error!(e); - }) + .inspect_err(|e| { + tracing::error!(e); + }) } } @@ -1567,10 +1567,10 @@ mod stepwise { distribution_interval, None, ) - .inspect_err(|e| { - tracing::error!("{}", e); - }) - .expect("stepwise should pass"); + .inspect_err(|e| { + tracing::error!("{}", e); + }) + .expect("stepwise should pass"); } } @@ -1667,9 +1667,9 @@ mod linear { distribution_interval, None, ) - .inspect_err(|e| { - tracing::error!("{}", e); - }) + .inspect_err(|e| { + tracing::error!("{}", e); + }) } } @@ -1825,12 +1825,12 @@ mod exponential { .set_perpetual_distribution(Some(TokenPerpetualDistribution::V0( TokenPerpetualDistributionV0 { distribution_type: - RewardDistributionType::BlockBasedDistribution { - interval: 1, - function: dist, - }, + RewardDistributionType::BlockBasedDistribution { + interval: 1, + function: dist, + }, distribution_recipient: - TokenDistributionRecipient::ContractOwner, + TokenDistributionRecipient::ContractOwner, }, ))); }), @@ -2025,9 +2025,9 @@ mod polynomial { distribution_interval, None, ) - .inspect_err(|e| { - tracing::error!("{}", e); - }) + .inspect_err(|e| { + tracing::error!("{}", e); + }) } /// Test various combinations of `m/n` in `[DistributionFunction::Polynomial]` distribution. @@ -2064,18 +2064,18 @@ mod polynomial { token_configuration .distribution_rules_mut() .set_perpetual_distribution( - Some(TokenPerpetualDistribution::V0( - TokenPerpetualDistributionV0 { - distribution_type: + Some(TokenPerpetualDistribution::V0( + TokenPerpetualDistributionV0 { + distribution_type: RewardDistributionType::BlockBasedDistribution { interval: 1, function: dist, }, - distribution_recipient: + distribution_recipient: TokenDistributionRecipient::ContractOwner, - }, - )), - ); + }, + )), + ); }), ); @@ -2234,7 +2234,7 @@ mod logarithmic { ], 1, ) - .expect("expect to pass"); + .expect("expect to pass"); } #[test] @@ -2272,9 +2272,9 @@ mod logarithmic { distribution_interval, None, ) - .inspect_err(|e| { - tracing::error!("{}", e); - }) + .inspect_err(|e| { + tracing::error!("{}", e); + }) } } @@ -2450,9 +2450,9 @@ mod inverted_logarithmic { distribution_interval, None, ) - .inspect_err(|e| { - tracing::error!("{}", e); - }) + .inspect_err(|e| { + tracing::error!("{}", e); + }) } } @@ -2612,7 +2612,7 @@ mod test_suite { token_configuration_modification: None, // setup later on_step_success: Box::new(|_| {}), } - .with_genesis(1, genesis_time_ms); + .with_genesis(1, genesis_time_ms); if let Some(token_configuration_modification) = token_configuration_modification { me.with_token_configuration_modification_fn(token_configuration_modification) @@ -2625,9 +2625,9 @@ mod test_suite { pub(crate) fn with_token_configuration_modification_fn( mut self, token_configuration_modification: impl FnOnce(&mut TokenConfiguration) - + Send - + Sync - + 'static, + + Send + + Sync + + 'static, ) -> Self { if let Some(previous) = self.token_configuration_modification.take() { let f = Box::new(move |token_configuration: &mut TokenConfiguration| { @@ -2753,7 +2753,7 @@ mod test_suite { None, None, ) - .expect("expect to create documents batch transition"); + .expect("expect to create documents batch transition"); let claim_serialized_transition = claim_transition .serialize_to_bytes() From 160d7718fcef8f793139cd8407366c9e1e90c68a Mon Sep 17 00:00:00 2001 From: Quantum Explorer Date: Fri, 18 Apr 2025 06:18:33 +0700 Subject: [PATCH 21/21] fmt --- .../document/batch_transition/methods/mod.rs | 1 + .../document/batch_transition/methods/v1/mod.rs | 1 + .../document/batch_transition/v1/v1_methods.rs | 11 ++++++++++- .../v0/mod.rs | 2 +- 4 files changed, 13 insertions(+), 2 deletions(-) diff --git a/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/methods/mod.rs b/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/methods/mod.rs index d9eff435c46..b67c079ab9f 100644 --- a/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/methods/mod.rs +++ b/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/methods/mod.rs @@ -32,6 +32,7 @@ use crate::state_transition::StateTransition; use crate::tokens::emergency_action::TokenEmergencyAction; #[cfg(feature = "state-transition-signing")] use crate::tokens::token_payment_info::TokenPaymentInfo; +#[cfg(feature = "state-transition-signing")] use crate::tokens::token_pricing_schedule::TokenPricingSchedule; #[cfg(feature = "state-transition-signing")] use crate::tokens::{PrivateEncryptedNote, SharedEncryptedNote}; diff --git a/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/methods/v1/mod.rs b/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/methods/v1/mod.rs index 79646f75e62..809f398642f 100644 --- a/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/methods/v1/mod.rs +++ b/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/methods/v1/mod.rs @@ -19,6 +19,7 @@ use crate::state_transition::batch_transition::accessors::DocumentsBatchTransiti use crate::state_transition::StateTransition; #[cfg(feature = "state-transition-signing")] use crate::tokens::emergency_action::TokenEmergencyAction; +#[cfg(feature = "state-transition-signing")] use crate::tokens::token_pricing_schedule::TokenPricingSchedule; #[cfg(feature = "state-transition-signing")] use crate::tokens::{PrivateEncryptedNote, SharedEncryptedNote}; diff --git a/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/v1/v1_methods.rs b/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/v1/v1_methods.rs index 501eeb1efea..1130c0e17cb 100644 --- a/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/v1/v1_methods.rs +++ b/packages/rs-dpp/src/state_transition/state_transitions/document/batch_transition/v1/v1_methods.rs @@ -1,21 +1,27 @@ +#[cfg(feature = "state-transition-signing")] use crate::fee::Credits; #[cfg(feature = "state-transition-signing")] use crate::identity::signer::Signer; #[cfg(feature = "state-transition-signing")] use crate::identity::SecurityLevel; +#[cfg(feature = "state-transition-signing")] use crate::prelude::IdentityNonce; #[cfg(feature = "state-transition-signing")] use crate::prelude::IdentityPublicKey; #[cfg(feature = "state-transition-signing")] use crate::prelude::UserFeeIncrease; +#[cfg(feature = "state-transition-signing")] use crate::state_transition::batch_transition::batched_transition::BatchedTransition; -use crate::state_transition::batch_transition::{BatchTransitionV1, TokenDirectPurchaseTransition, TokenSetPriceForDirectPurchaseTransition}; +use crate::state_transition::batch_transition::BatchTransitionV1; +#[cfg(feature = "state-transition-signing")] +use crate::state_transition::batch_transition::{TokenDirectPurchaseTransition, TokenSetPriceForDirectPurchaseTransition}; #[cfg(feature = "state-transition-signing")] use crate::state_transition::batch_transition::{TokenClaimTransition, TokenBurnTransition, TokenConfigUpdateTransition, TokenDestroyFrozenFundsTransition, TokenEmergencyActionTransition, TokenFreezeTransition, TokenMintTransition, TokenTransferTransition, TokenUnfreezeTransition}; #[cfg(feature = "state-transition-signing")] use crate::state_transition::batch_transition::BatchTransition; #[cfg(feature = "state-transition-signing")] use crate::state_transition::StateTransition; +#[cfg(feature = "state-transition-signing")] use crate::ProtocolError; #[cfg(feature = "state-transition-signing")] use platform_value::Identifier; @@ -48,6 +54,7 @@ use crate::state_transition::batch_transition::token_claim_transition::TokenClai use crate::state_transition::batch_transition::token_config_update_transition::TokenConfigUpdateTransitionV0; #[cfg(feature = "state-transition-signing")] use crate::state_transition::batch_transition::token_destroy_frozen_funds_transition::TokenDestroyFrozenFundsTransitionV0; +#[cfg(feature = "state-transition-signing")] use crate::state_transition::batch_transition::token_direct_purchase_transition::TokenDirectPurchaseTransitionV0; #[cfg(feature = "state-transition-signing")] use crate::state_transition::batch_transition::token_emergency_action_transition::TokenEmergencyActionTransitionV0; @@ -55,6 +62,7 @@ use crate::state_transition::batch_transition::token_emergency_action_transition use crate::state_transition::batch_transition::token_freeze_transition::TokenFreezeTransitionV0; #[cfg(feature = "state-transition-signing")] use crate::state_transition::batch_transition::token_mint_transition::TokenMintTransitionV0; +#[cfg(feature = "state-transition-signing")] use crate::state_transition::batch_transition::token_set_price_for_direct_purchase_transition::TokenSetPriceForDirectPurchaseTransitionV0; #[cfg(feature = "state-transition-signing")] use crate::state_transition::batch_transition::token_transfer_transition::TokenTransferTransitionV0; @@ -64,6 +72,7 @@ use crate::state_transition::batch_transition::token_unfreeze_transition::TokenU use crate::tokens::emergency_action::TokenEmergencyAction; #[cfg(feature = "state-transition-signing")] use crate::tokens::{PrivateEncryptedNote, SharedEncryptedNote}; +#[cfg(feature = "state-transition-signing")] use crate::tokens::token_pricing_schedule::TokenPricingSchedule; impl DocumentsBatchTransitionMethodsV1 for BatchTransitionV1 { diff --git a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs index 54aefbea144..2e9d7d0e91d 100644 --- a/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs +++ b/packages/rs-drive/src/drive/identity/fetch/prove/prove_full_identity_by_non_unique_public_key_hash/v0/mod.rs @@ -35,7 +35,7 @@ impl Drive { )?; let identity_proof = if let Some(identity_id) = identity_ids.first() { let full_identity_query = - Self::full_identity_query(&identity_id, &platform_version.drive.grove_version)?; + Self::full_identity_query(identity_id, &platform_version.drive.grove_version)?; Some(self.grove_get_proved_path_query( &full_identity_query, transaction,