Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
a3a6f30
Add SPARK
batmendbar Mar 18, 2026
c53d3d2
Merge main into adds-spark-squashed
batmendbar Mar 18, 2026
5d9fef0
Remove comments
batmendbar Mar 18, 2026
6108fc8
Format
batmendbar Mar 18, 2026
fb9f3fc
Cleanup
batmendbar Mar 18, 2026
0fb0d8e
split commitments
batmendbar Mar 20, 2026
3cdbf0d
Merge branch 'main' into adds-spark-squashed
batmendbar Mar 24, 2026
f9bcf09
combine files
batmendbar Mar 24, 2026
ff5c3bd
format
batmendbar Mar 24, 2026
2a65bae
recompute timestamps on prover
batmendbar Mar 24, 2026
6e94a18
change file write
batmendbar Mar 25, 2026
44390ac
file write and ci change
batmendbar Mar 25, 2026
af2245b
fix ci
batmendbar Mar 25, 2026
abba4c4
format
batmendbar Mar 25, 2026
7c7d47d
update tracing
batmendbar Mar 25, 2026
c69eab7
combines prepare and spark-prove into a server
batmendbar Mar 26, 2026
5e15c6e
parallelize and update end-to-end.yml
batmendbar Mar 26, 2026
c7d174a
cleanup
batmendbar Mar 26, 2026
d96ad02
cleanup
batmendbar Mar 26, 2026
69ab9c7
cleanup
batmendbar Mar 26, 2026
941d799
format
batmendbar Mar 26, 2026
09056f9
organize tracing
batmendbar Apr 8, 2026
8bbeb5d
Format and organize
batmendbar Apr 8, 2026
db411d5
merge
batmendbar Apr 8, 2026
1062d1d
Adds temporary transcript draining
batmendbar Apr 8, 2026
c5cbc91
format
batmendbar Apr 27, 2026
70111ef
Separate spark query from noir proof
batmendbar Apr 27, 2026
ce1c677
limit message size
batmendbar Apr 27, 2026
76cbf24
stop user-given server write
batmendbar Apr 27, 2026
19f6ab9
zero check
batmendbar Apr 28, 2026
0afba35
write spark commitment to a file
batmendbar Apr 29, 2026
96530fa
cli command to natively verify spark proofs
batmendbar Apr 29, 2026
58e01e7
move spark commitments to setup
batmendbar Apr 29, 2026
83d95eb
use request in DS instance
batmendbar Apr 29, 2026
9c3eae6
format
batmendbar Apr 29, 2026
a2d65c1
Merge branch 'main' into adds-spark-squashed
batmendbar Apr 29, 2026
dcec2d0
fix CI errors
batmendbar Apr 29, 2026
b0ddd0d
remove spark server
batmendbar Apr 30, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 33 additions & 17 deletions .github/workflows/end-to-end.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,48 +57,64 @@ jobs:
- name: Clean stale benchmark artifacts
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
rm -f ./benchmark-inputs/*.pkp ./benchmark-inputs/*.pkv ./benchmark-inputs/*.np
rm -f ./benchmark-inputs/*.pkp ./benchmark-inputs/*.pkv ./benchmark-inputs/*.np ./benchmark-inputs/*.sp
echo "Cleaned stale benchmark artifacts"

- name: Prepare circuits
- name: Prepare circuits
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
for circuit in t_add_dsc_720 t_add_id_data_720 t_add_integrity_commit t_attest; do
echo "Preparing $circuit"
cargo run --release --bin provekit-cli prepare ./target/$circuit.json \
--pkp ./benchmark-inputs/$circuit-prover.pkp \
--pkv ./benchmark-inputs/$circuit-verifier.pkv
../../../target/release/provekit-cli prepare \
./target/$circuit.json \
--pkp ./benchmark-inputs/$circuit.pkp \
--pkv ./benchmark-inputs/$circuit.pkv \
--spark \
--spc ./benchmark-inputs/$circuit.spc
echo "Prepared $circuit"
done

- name: Generate proofs for all circuits
- name: Prove all circuits
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
for circuit in t_add_dsc_720 t_add_id_data_720 t_add_integrity_commit t_attest; do
echo "Proving $circuit"
cargo run --release --bin provekit-cli prove \
./benchmark-inputs/$circuit-prover.pkp \
../../../target/release/provekit-cli prove \
./benchmark-inputs/$circuit.pkp \
./benchmark-inputs/tbs_720/$circuit.toml \
-o ./benchmark-inputs/$circuit-proof.np
-o ./benchmark-inputs/$circuit-proof.np \
--spark-queries-dir ./benchmark-inputs/$circuit-spark
echo "Proved $circuit"
done

- name: Verify proofs for all circuits
- name: Generate SPARK proofs for all circuits
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
for circuit in t_add_dsc_720 t_add_id_data_720 t_add_integrity_commit t_attest; do
echo "Verifying $circuit"
cargo run --release --bin provekit-cli verify \
./benchmark-inputs/$circuit-verifier.pkv \
./benchmark-inputs/$circuit-proof.np
echo "Verified $circuit"
echo "SPARK proving $circuit"
../../../target/release/provekit-cli prove-spark \
./benchmark-inputs/$circuit.pkp \
--spark-dir ./benchmark-inputs/$circuit-spark
echo "SPARK proved $circuit"
done

- name: Verify SPARK proofs for all circuits
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
for circuit in t_add_dsc_720 t_add_id_data_720 t_add_integrity_commit t_attest; do
echo "SPARK verifying $circuit"
../../../target/release/provekit-cli verify-spark \
./benchmark-inputs/$circuit-spark/spark_proof_0.sp \
./benchmark-inputs/$circuit.spc \
./benchmark-inputs/$circuit-spark/spark_query_0.json
echo "SPARK verified $circuit"
done

- name: Generate Gnark inputs
working-directory: noir-examples/noir-passport/merkle_age_check
run: |
cargo run --release --bin provekit-cli generate-gnark-inputs \
./benchmark-inputs/t_attest-verifier.pkv \
../../../target/release/provekit-cli generate-gnark-inputs \
./benchmark-inputs/t_attest.pkv \
./benchmark-inputs/t_attest-proof.np


Expand Down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,15 @@
*.pkp
*.pkv
*.np
*.sp
*.spc
spark_proofs/
params_for_recursive_verifier
params
artifacts/
spartan_vm_debug/
mavros_debug/
mavros/

# Don't ignore benchmarking artifacts
!tooling/provekit-bench/benches/*
Expand Down
18 changes: 18 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ members = [
"tooling/provekit-wasm",
"tooling/verifier-server",
"ntt",
"provekit/spark",
"poseidon2",
"playground/passport-input-gen",
]
Expand Down Expand Up @@ -102,6 +103,7 @@ provekit-ffi = { path = "tooling/provekit-ffi" }
provekit-gnark = { path = "tooling/provekit-gnark" }
provekit-prover = { path = "provekit/prover", default-features = false }
provekit-r1cs-compiler = { path = "provekit/r1cs-compiler" }
provekit-spark = { path = "provekit/spark" }
provekit-verifier = { path = "provekit/verifier" }
provekit-verifier-server = { path = "tooling/verifier-server" }
provekit-wasm = { path = "tooling/provekit-wasm" }
Expand Down
2 changes: 1 addition & 1 deletion noir-examples/power/Nargo.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[package]
name = "basic"
name = "power"
type = "bin"
authors = [""]
compiler_version = ">=0.22.0"
Expand Down
Binary file added noir-examples/power/benchmark-inputs/power.spc
Binary file not shown.
2 changes: 1 addition & 1 deletion noir-examples/power/src/main.nr
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
fn main(mut x: Field, y: pub Field) {
let mut r = 1;
for i in 0..10 {
for _ in 0..1000 {
r *= x;
}
assert(r == y);
Expand Down
2 changes: 1 addition & 1 deletion playground/passport-input-gen/src/bin/passport_cli/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ fn prove_circuit<T: serde::Serialize>(
.map_err(|e| anyhow::anyhow!("ABI parse error for {circuit_name}: {e}"))?;

tee_println!(" [{circuit_name}] Generating proof...");
let proof = prover
let (proof, _) = prover
.prove(input_map)
.with_context(|| format!("Proving {circuit_name}"))?;

Expand Down
8 changes: 7 additions & 1 deletion provekit/common/src/file/binary_format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,10 @@ pub const NOIR_PROOF_SCHEME_FORMAT: [u8; 8] = *b"NrProScm";
pub const NOIR_PROOF_SCHEME_VERSION: (u16, u16) = (1, 2);

pub const NOIR_PROOF_FORMAT: [u8; 8] = *b"NPSProof";
pub const NOIR_PROOF_VERSION: (u16, u16) = (1, 1);
pub const NOIR_PROOF_VERSION: (u16, u16) = (1, 2);

pub const SPARK_PROOF_FORMAT: [u8; 8] = *b"SparkPrf";
pub const SPARK_PROOF_VERSION: (u16, u16) = (1, 0);

pub const SPARK_SETUP_FORMAT: [u8; 8] = *b"SparkStp";
pub const SPARK_SETUP_VERSION: (u16, u16) = (1, 0);
5 changes: 3 additions & 2 deletions provekit/common/src/file/io/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@ mod buf_ext;
mod counting_writer;
mod json;

pub use self::bin::Compression;
use {
self::{
bin::{
deserialize_from_bytes, read_bin, read_hash_config as read_hash_config_bin,
serialize_to_bytes, write_bin, Compression,
serialize_to_bytes, write_bin,
},
buf_ext::BufExt,
counting_writer::CountingWriter,
Expand All @@ -29,7 +30,7 @@ pub trait FileFormat: Serialize + for<'a> Deserialize<'a> {
}

/// Helper trait to optionally extract hash config.
pub(crate) trait MaybeHashAware {
pub trait MaybeHashAware {
fn maybe_hash_config(&self) -> Option<HashConfig>;
}

Expand Down
3 changes: 2 additions & 1 deletion provekit/common/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
pub mod file;
pub use file::binary_format;
pub mod hash_config;
mod interner;
pub mod interner;
mod mavros;
mod noir_proof_scheme;
pub mod ntt;
Expand All @@ -11,6 +11,7 @@ pub mod prefix_covector;
mod prover;
mod r1cs;
pub mod skyscraper;
pub mod spark;
pub mod sparse_matrix;
mod transcript_sponge;
pub mod u256_arith;
Expand Down
29 changes: 29 additions & 0 deletions provekit/common/src/spark.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
use {
crate::{utils::serde_ark, FieldElement},
serde::{Deserialize, Serialize},
sha3::{Digest, Sha3_256},
};

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Point {
#[serde(with = "serde_ark")]
pub row: Vec<FieldElement>,
#[serde(with = "serde_ark")]
pub col: Vec<FieldElement>,
}

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct R1CSSparkQuery {
pub point_to_evaluate: Point,
#[serde(with = "serde_ark")]
pub matrix_batching_randomness: FieldElement,
#[serde(with = "serde_ark")]
pub claimed_value: FieldElement,
}

impl R1CSSparkQuery {
pub fn hash_bytes(&self) -> [u8; 32] {
let bytes = postcard::to_allocvec(self).expect("serializing R1CSSparkQuery");
Sha3_256::digest(&bytes).into()
}
}
5 changes: 5 additions & 0 deletions provekit/common/src/utils/sumcheck.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,11 @@ fn eval_eq(
}
}

/// Evaluates a quadratic polynomial on a value
pub fn eval_quadratic_poly(poly: [FieldElement; 3], point: FieldElement) -> FieldElement {
poly[0] + point * (poly[1] + point * poly[2])
}

/// Evaluates a cubic polynomial on a value
pub fn eval_cubic_poly(poly: [FieldElement; 4], point: FieldElement) -> FieldElement {
poly[0] + point * (poly[1] + point * (poly[2] + point * poly[3]))
Expand Down
Loading
Loading