Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
517 changes: 55 additions & 462 deletions Cargo.lock

Large diffs are not rendered by default.

14 changes: 7 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -81,13 +81,13 @@ solana-pubkey = "2.3.0"

solana-transaction-status = "1.18.0"

light-concurrent-merkle-tree = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
light-batched-merkle-tree = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
light-merkle-tree-metadata = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
light-compressed-account = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }
light-hasher = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }

light-poseidon = "0.3.0"
light-batched-merkle-tree = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
light-compressed-account = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
light-concurrent-merkle-tree = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
light-hasher = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
light-merkle-tree-metadata = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
light-sdk = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }

sqlx = { version = "0.6.2", features = [
"macros",
Expand Down Expand Up @@ -131,7 +131,7 @@ rust-s3 = "0.34.0"
[dev-dependencies]
function_name = "0.3.0"
serial_test = "2.0.0"
light-merkle-tree-reference = { git = "https://github.com/Lightprotocol/light-protocol", rev = "368f9f08272db78c74b2ade1a1c2fead27dd0a96" }
light-merkle-tree-reference = { git = "https://github.com/Lightprotocol/light-protocol", rev = "a16015358aee65da5f67e4ae73197df5e75495d9" }

[profile.dev]
# Do not produce debug info for ~40% faster incremental compilation.
Expand Down
127 changes: 35 additions & 92 deletions src/api/method/get_validity_proof/prover/gnark.rs
Original file line number Diff line number Diff line change
@@ -1,103 +1,46 @@
use crate::api::error::PhotonApiError;
use crate::api::method::get_validity_proof::prover::structs::{CompressedProof, ProofABC};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
use solana_program::alt_bn128::compression::prelude::{
alt_bn128_g1_compress, alt_bn128_g2_compress, convert_endianness,
};
use std::ops::Neg;
use crate::api::error::PhotonApiError;

type G1 = ark_bn254::g1::G1Affine;

/// Changes the endianness of the given slice of bytes by reversing the order of every 32-byte chunk.
///
/// # Arguments
///
/// * `bytes` - A reference to a slice of bytes whose endianness will be changed.
///
/// # Returns
///
/// A `Vec<u8>` containing the bytes with their order reversed in chunks of 32 bytes. If the number
/// of bytes in the slice is not a multiple of 32, the remaining bytes at the end will also be reversed.
///
/// # Examples
///
/// ```
/// let input = vec![0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
/// 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10,
/// 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
/// 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20];
/// let output = change_endianness(&input);
/// assert_eq!(output, vec![0x20, 0x1F, 0x1E, 0x1D, 0x1C, 0x1B, 0x1A, 0x19,
/// 0x18, 0x17, 0x16, 0x15, 0x14, 0x13, 0x12, 0x11,
/// 0x10, 0x0F, 0x0E, 0x0D, 0x0C, 0x0B, 0x0A, 0x09,
/// 0x08, 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01]);
///
/// let input = vec![0x01, 0x02, 0x03];
/// let output = change_endianness(&input);
/// assert_eq!(output, vec![0x03, 0x02, 0x01]);
/// ```
fn change_endianness(bytes: &[u8]) -> Vec<u8> {
let mut vec = Vec::new();
for b in bytes.chunks(32) {
for byte in b.iter().rev() {
vec.push(*byte);
}
}
vec
}

/// Negates the `a` component of the given proof and compresses the proof into a `CompressedProof`.
///
/// # Arguments
///
/// * `proof` - A `ProofABC` structure containing three components: `a`, `b`, and `c`.
///
/// - `a` is negated and serialized in big-endian format.
/// - `b` and `c` are trimmed and included as-is in the compressed form.
///
/// # Returns
///
/// A `CompressedProof` containing:
///
/// * The negated and serialized `a` component as a vector of bytes.
/// * The first 64 bytes of the `b` component.
/// * The first 32 bytes of the `c` component.
///
/// # Panics
///
/// This function will panic if:
///
/// * The deserialization or serialization of the `G1` point fails.
/// * The `proof.a` slice length is insufficient to produce a valid G1 when adding padding for deserialization.
///
/// # Note
///
/// The function assumes that the `ProofABC` structure contains its `a`, `b`, and `c` components in valid formats
/// necessary for transformation and compression.
pub fn negate_proof(proof: ProofABC) -> Result<CompressedProof, PhotonApiError> {
let mut proof_a_neg = [0u8; 65];

let proof_a: G1 = G1::deserialize_with_mode(
&*[&change_endianness(&proof.a), &[0u8][..]].concat(),
Compress::No,
Validate::No,
).map_err(|e| PhotonApiError::UnexpectedError(format!("Failed to deserialize G1 point: {}", e)))?;
pub fn negate_g1(g1_be: &[u8; 64]) -> Result<[u8; 64], PhotonApiError> {
let g1_le = convert_endianness::<32, 64>(g1_be);
let g1: G1 = G1::deserialize_with_mode(g1_le.as_slice(), Compress::No, Validate::No).unwrap();

proof_a
.neg()
let g1_neg = g1.neg();
let mut g1_neg_be = [0u8; 64];
g1_neg
.x
.serialize_with_mode(&mut proof_a_neg[..32], Compress::No)
.map_err(|e| PhotonApiError::UnexpectedError(format!("Failed to serialize x coordinate: {}", e)))?;

proof_a
.neg()
.serialize_with_mode(&mut g1_neg_be[..32], Compress::No)
.map_err(|_| {
PhotonApiError::UnexpectedError("Failed to serialize G1 x coordinate".to_string())
})?;
g1_neg
.y
.serialize_with_mode(&mut proof_a_neg[32..], Compress::No)
.map_err(|e| PhotonApiError::UnexpectedError(format!("Failed to serialize y coordinate: {}", e)))?;

let compressed_proof = CompressedProof {
a: proof_a_neg[0..32].to_vec(),
b: proof.b[0..64].to_vec(),
c: proof.c[0..32].to_vec(),
};

Ok(compressed_proof)
.serialize_with_mode(&mut g1_neg_be[32..], Compress::No)
.map_err(|_| {
PhotonApiError::UnexpectedError("Failed to serialize G1 y coordinate".to_string())
})?;
let g1_neg_be: [u8; 64] = convert_endianness::<32, 64>(&g1_neg_be);
Ok(g1_neg_be)
}

pub fn compress_proof(proof: &ProofABC) -> Result<CompressedProof, PhotonApiError> {
let proof_a = alt_bn128_g1_compress(&proof.a)
.map_err(|_| PhotonApiError::UnexpectedError("Failed to compress G1 proof".to_string()))?;
let proof_b = alt_bn128_g2_compress(&proof.b)
.map_err(|_| PhotonApiError::UnexpectedError("Failed to compress G2 proof".to_string()))?;
let proof_c = alt_bn128_g1_compress(&proof.c)
.map_err(|_| PhotonApiError::UnexpectedError("Failed to compress G1 proof".to_string()))?;

Ok(CompressedProof {
a: Vec::from(proof_a),
b: Vec::from(proof_b),
c: Vec::from(proof_c),
})
}
128 changes: 91 additions & 37 deletions src/api/method/get_validity_proof/prover/helpers.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use crate::api::error::PhotonApiError;
use crate::api::method::get_multiple_new_address_proofs::MerkleContextWithNewAddressProof;
use crate::api::method::get_validity_proof::prover::gnark::negate_g1;
use crate::api::method::get_validity_proof::prover::structs::{
GnarkProofJson, InclusionHexInputsForProver, NonInclusionHexInputsForProver, ProofABC,
};
Expand Down Expand Up @@ -64,73 +66,125 @@ pub fn hash_to_hex(hash: &Hash) -> String {
fn pubkey_to_hex(pubkey: &SerializablePubkey) -> String {
let bytes = pubkey.to_bytes_vec();
let hex = hex::encode(bytes);

format!("0x{}", hex)
}

fn deserialize_hex_string_to_bytes(hex_str: &str) -> Vec<u8> {
pub fn deserialize_hex_string_to_bytes(hex_str: &str) -> Result<Vec<u8>, PhotonApiError> {
let hex_str = if hex_str.starts_with("0x") {
&hex_str[2..]
} else {
hex_str
};

// Left pad with 0s if the length is not 64
let hex_str = format!("{:0>64}", hex_str);

hex::decode(&hex_str).expect("Failed to decode hex string")
hex::decode(hex_str)
.map_err(|_| PhotonApiError::UnexpectedError("Failed to decode hex string".to_string()))
}

pub fn proof_from_json_struct(json: GnarkProofJson) -> ProofABC {
let proof_ax = deserialize_hex_string_to_bytes(&json.ar[0]);
let proof_ay = deserialize_hex_string_to_bytes(&json.ar[1]);
let proof_a = [proof_ax, proof_ay].concat();
pub fn proof_from_json_struct(json: GnarkProofJson) -> Result<ProofABC, PhotonApiError> {
let proof_a_x = deserialize_hex_string_to_bytes(&json.ar[0])?;
let proof_a_y = deserialize_hex_string_to_bytes(&json.ar[1])?;
let proof_a: [u8; 64] = [proof_a_x, proof_a_y].concat().try_into().map_err(|_| {
PhotonApiError::UnexpectedError("Failed to convert proof_a to [u8; 64]".to_string())
})?;
let proof_a = negate_g1(&proof_a)?;

let proof_bx0 = deserialize_hex_string_to_bytes(&json.bs[0][0]);
let proof_bx1 = deserialize_hex_string_to_bytes(&json.bs[0][1]);
let proof_by0 = deserialize_hex_string_to_bytes(&json.bs[1][0]);
let proof_by1 = deserialize_hex_string_to_bytes(&json.bs[1][1]);
let proof_b = [proof_bx0, proof_bx1, proof_by0, proof_by1].concat();
let proof_b_x_0 = deserialize_hex_string_to_bytes(&json.bs[0][0])?;
let proof_b_x_1 = deserialize_hex_string_to_bytes(&json.bs[0][1])?;
let proof_b_y_0 = deserialize_hex_string_to_bytes(&json.bs[1][0])?;
let proof_b_y_1 = deserialize_hex_string_to_bytes(&json.bs[1][1])?;
let proof_b: [u8; 128] = [proof_b_x_0, proof_b_x_1, proof_b_y_0, proof_b_y_1]
.concat()
.try_into()
.map_err(|_| {
PhotonApiError::UnexpectedError("Failed to convert proof_b to [u8; 128]".to_string())
})?;

let proof_cx = deserialize_hex_string_to_bytes(&json.krs[0]);
let proof_cy = deserialize_hex_string_to_bytes(&json.krs[1]);
let proof_c = [proof_cx, proof_cy].concat();
let proof_c_x = deserialize_hex_string_to_bytes(&json.krs[0])?;
let proof_c_y = deserialize_hex_string_to_bytes(&json.krs[1])?;
let proof_c: [u8; 64] = [proof_c_x, proof_c_y].concat().try_into().map_err(|_| {
PhotonApiError::UnexpectedError("Failed to convert proof_c to [u8; 64]".to_string())
})?;

ProofABC {
Ok(ProofABC {
a: proof_a,
b: proof_b,
c: proof_c,
}
})
}

pub fn get_public_input_hash(
account_proofs: &[MerkleProofWithContext],
new_address_proofs: &[MerkleContextWithNewAddressProof],
) -> [u8; 32] {
let account_hashes: Vec<[u8; 32]> = account_proofs
) -> Result<[u8; 32], PhotonApiError> {
let account_hashes: Result<Vec<[u8; 32]>, PhotonApiError> = account_proofs
.iter()
.map(|x| x.hash.to_vec().clone().try_into().unwrap())
.collect::<Vec<[u8; 32]>>();
let account_roots: Vec<[u8; 32]> = account_proofs
.map(|x| {
x.hash.to_vec().try_into().map_err(|_| {
PhotonApiError::UnexpectedError("Failed to convert hash to [u8; 32]".to_string())
})
})
.collect();
let account_hashes = account_hashes?;

let account_roots: Result<Vec<[u8; 32]>, PhotonApiError> = account_proofs
.iter()
.map(|x| x.root.to_vec().clone().try_into().unwrap())
.collect::<Vec<[u8; 32]>>();
let inclusion_hash_chain: [u8; 32] =
create_two_inputs_hash_chain(&account_roots, &account_hashes).unwrap();
let new_address_hashes: Vec<[u8; 32]> = new_address_proofs
.map(|x| {
x.root.to_vec().try_into().map_err(|_| {
PhotonApiError::UnexpectedError("Failed to convert root to [u8; 32]".to_string())
})
})
.collect();
let account_roots = account_roots?;

let inclusion_hash_chain = create_two_inputs_hash_chain(&account_roots, &account_hashes)
.map_err(|e| {
PhotonApiError::UnexpectedError(format!("Failed to create hash chain: {}", e))
})?;

let new_address_hashes: Result<Vec<[u8; 32]>, PhotonApiError> = new_address_proofs
.iter()
.map(|x| x.address.try_to_vec().unwrap().clone().try_into().unwrap())
.collect::<Vec<[u8; 32]>>();
let new_address_roots: Vec<[u8; 32]> = new_address_proofs
.map(|x| {
x.address
.try_to_vec()
.map_err(|e| {
PhotonApiError::UnexpectedError(format!("Failed to serialize address: {}", e))
})?
.try_into()
.map_err(|_| {
PhotonApiError::UnexpectedError(
"Failed to convert address bytes to [u8; 32]".to_string(),
)
})
})
.collect();
let new_address_hashes = new_address_hashes?;

let new_address_roots: Result<Vec<[u8; 32]>, PhotonApiError> = new_address_proofs
.iter()
.map(|x| x.root.to_vec().clone().try_into().unwrap())
.collect::<Vec<[u8; 32]>>();
.map(|x| {
x.root.to_vec().try_into().map_err(|_| {
PhotonApiError::UnexpectedError(
"Failed to convert new address root to [u8; 32]".to_string(),
)
})
})
.collect();
let new_address_roots = new_address_roots?;

let non_inclusion_hash_chain =
create_two_inputs_hash_chain(&new_address_roots, &new_address_hashes).unwrap();
create_two_inputs_hash_chain(&new_address_roots, &new_address_hashes).map_err(|e| {
PhotonApiError::UnexpectedError(format!("Failed to create hash chain: {}", e))
})?;

if non_inclusion_hash_chain != [0u8; 32] {
non_inclusion_hash_chain
Ok(non_inclusion_hash_chain)
} else if inclusion_hash_chain != [0u8; 32] {
inclusion_hash_chain
Ok(inclusion_hash_chain)
} else {
create_two_inputs_hash_chain(&[inclusion_hash_chain], &[non_inclusion_hash_chain]).unwrap()
create_two_inputs_hash_chain(&[inclusion_hash_chain], &[non_inclusion_hash_chain]).map_err(
|e| PhotonApiError::UnexpectedError(format!("Failed to create hash chain: {}", e)),
)
}
}
Loading