diff --git a/.github/workflows/end-to-end.yml b/.github/workflows/end-to-end.yml index 893b934de..443b090d0 100644 --- a/.github/workflows/end-to-end.yml +++ b/.github/workflows/end-to-end.yml @@ -52,12 +52,11 @@ jobs: - name: Generate Gnark inputs working-directory: noir-examples/poseidon-rounds run: | - cargo run --release --bin noir-r1cs prepare ./target/basic.json -o ./noir-proof-scheme.nps - cargo run --release --bin noir-r1cs prove ./noir-proof-scheme.nps ./Prover.toml -o ./noir-proof.np - cargo run --release --bin noir-r1cs generate-gnark-inputs ./noir-proof-scheme.nps ./noir-proof.np + cargo run --release --bin provekit-cli prepare ./target/basic.json -o ./noir-proof-scheme.nps + cargo run --release --bin provekit-cli prove ./noir-proof-scheme.nps ./Prover.toml -o ./noir-proof.np + cargo run --release --bin provekit-cli generate-gnark-inputs ./noir-proof-scheme.nps ./noir-proof.np - name: Run Gnark verifier working-directory: gnark-whir run: | go build -o gnark-verifier . # ./gnark-verifier - diff --git a/.gitignore b/.gitignore index c32fc8ad2..7aad1878b 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,10 @@ *.nps *.np params_for_recursive_verifier +params + +# Don't ignore benchmarking artifacts +!tooling/provekit-bench/benches/* # Generated by Cargo # will have compiled files and executables diff --git a/Cargo.toml b/Cargo.toml index a0fdef869..45f0846cb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,14 +1,18 @@ [workspace] resolver = "2" members = [ - "cm31_ntt", - "noir-r1cs", "skyscraper/fp-rounding", "skyscraper/hla", "skyscraper/block-multiplier", "skyscraper/block-multiplier-codegen", "skyscraper/core", - "noir-tools", + "provekit/common", + "provekit/r1cs-compiler", + "provekit/prover", + "provekit/verifier", + "tooling/cli", + "tooling/provekit-bench", + "tooling/provekit-gnark", ] [workspace.package] @@ -25,6 +29,7 @@ authors = [ "Benjamin Wilson", "Yogesh Swami", "Ryan Cao", + "Aditya Bisht", # TODO: More contributors ] license = "MIT" @@ -60,86 +65,68 @@ opt-level = 2 opt-level = 3 [workspace.dependencies] -# Workspace members +# Workspace members - Skyscraper block-multiplier = { path = "skyscraper/block-multiplier" } block-multiplier-codegen = { path = "skyscraper/block-multiplier-codegen" } fp-rounding = { path = "skyscraper/fp-rounding" } hla = { path = "skyscraper/hla" } -noir-r1cs = { path = "noir-r1cs" } -noir-tools = { path = "noir-tools" } skyscraper = { path = "skyscraper/core" } +# Workspace members - ProveKit +provekit-bench = { path = "tooling/provekit-bench" } +provekit-cli = { path = "tooling/cli" } +provekit-common = { path = "provekit/common" } +provekit-gnark = { path = "tooling/provekit-gnark" } +provekit-prover = { path = "provekit/prover" } +provekit-r1cs-compiler = { path = "provekit/r1cs-compiler" } +provekit-verifier = { path = "provekit/verifier" } + # 3rd party -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -toml = "0.8" +anyhow = "1.0.93" +argh = "0.1.12" base64 = "0.22.1" +bytes = "1.10.1" divan = { package = "codspeed-divan-compat", version = "3.0.1" } -ruint = { version = "1.12.3", features = ["num-traits", "rand"] } -seq-macro = "0.3.6" -primitive-types = "0.13.1" -paste = "1.0.15" -arrayvec = "0.7.6" -blake3 = "1.8.2" -gensym = "0.1.1" -rsa = { version = "0.9.8", features = ["sha2"] } -rand = "0.9.1" -rand08 = { package = "rand", version = "0.8" } -halo2curves = { version = "0.7.0", features = ["bn256-table"] } -criterion = "0.5.1" -linkme = "0.3.32" -lazy_static = "1.5.0" -num-traits = "0.2.19" -num = "0.4.3" -sha3 = { version = "0.10.8", features = ["asm"] } -sha2 = { version = "0.10", features = ["compress"] } -subtle = "2.6.1" -static_assertions = "1.1" -tracing = "0.1.41" -bytemuck = "1.22.0" -hex-literal = "1" hex = "0.4.3" -itertools = "0.14.0" -flate2 = "1.0" -rayon = "1.10.0" -bincode = "1.3" +paste = "1.0.15" postcard = { version = "1.1.1", features = ["use-std"] } -zstd = "0.13.3" -bytes = "1.10.1" +primitive-types = "0.13.1" proptest = "1.6.0" -zerocopy = "0.8.25" quickcheck = "1.0.3" quickcheck_macros = "1.0.0" -rand_chacha = "0.9.0" +rand = "0.9.1" +rand08 = { package = "rand", version = "0.8" } +rayon = "1.10.0" +ruint = { version = "1.12.3", features = ["num-traits", "rand"] } +seq-macro = "0.3.6" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" test-case = "3.3.1" +toml = "0.8.8" +tracing = "0.1.41" +tracing-subscriber = { version = "0.3.18", features = ["env-filter", "ansi"] } +zerocopy = "0.8.25" +zeroize = "1.8.1" +zstd = "0.13.3" -# Note: to simplify things, pick a version that has the same `ark_ff` version as `whir`. +# Noir language dependencies acir = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } -noirc_abi = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } -noirc_artifacts = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } bn254_blackbox_solver = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } nargo = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } -nargo_toml = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } -noirc_driver = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } nargo_cli = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } +nargo_toml = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } noir_artifact_cli = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } +noirc_abi = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } +noirc_artifacts = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } +noirc_driver = { git = "https://github.com/noir-lang/noir", rev = "v1.0.0-beta.6" } -whir = { git = "https://github.com/WizardOfMenlo/whir/", features = ["tracing"] } -spongefish = { git = "https://github.com/arkworks-rs/spongefish", features = [ - "arkworks-algebra", -] } -ark-ff = { version = "0.5", features = ["asm", "std"] } -ark-bn254 = { version = "0.5.0", default-features = false, features = [ - "scalar_field", -] } -zeroize = "1.8.1" +# Cryptography and proof systems +ark-bn254 = { version = "0.5.0", default-features = false, features = ["scalar_field"] } ark-crypto-primitives = { version = "0.5", features = ["merkle_tree"] } -spongefish-pow = { git = "https://github.com/arkworks-rs/spongefish" } +ark-ff = { version = "0.5", features = ["asm", "std"] } ark-poly = "0.5" -ark-std = { version = "0.5", features = ["std"] } ark-serialize = "0.5" - -# Bin -anyhow = "1.0.93" -argh = "0.1.12" -tracing-subscriber = { version = "0.3.18", features = ["env-filter", "ansi"] } \ No newline at end of file +ark-std = { version = "0.5", features = ["std"] } +spongefish = { git = "https://github.com/arkworks-rs/spongefish", features = ["arkworks-algebra"] } +spongefish-pow = { git = "https://github.com/arkworks-rs/spongefish" } +whir = { git = "https://github.com/WizardOfMenlo/whir/", features = ["tracing"], rev = "3e7f8c299783fddf4354869bbcbc995a5018a9d4" } diff --git a/noir-r1cs/Cargo.toml b/noir-r1cs/Cargo.toml deleted file mode 100644 index 8c7f8f1e6..000000000 --- a/noir-r1cs/Cargo.toml +++ /dev/null @@ -1,75 +0,0 @@ -[package] -name = "noir-r1cs" -version = "0.1.0" -edition = "2021" - -[lints] -workspace = true - -[[bin]] -name = "noir-r1cs" -path = "src/cli/main.rs" - -[dependencies] -# 3rd party -serde.workspace = true -serde_json.workspace = true -toml.workspace = true -base64.workspace = true -zeroize.workspace = true -itertools.workspace = true -ruint.workspace = true -flate2.workspace = true -bincode.workspace = true -rand.workspace = true -hex.workspace = true -rayon.workspace = true -postcard.workspace = true -zstd.workspace = true -bytes.workspace = true -bytemuck.workspace = true -skyscraper.workspace = true -static_assertions.workspace = true -zerocopy.workspace = true - -# Ark -rand08.workspace = true - -# Noir lang -acir.workspace = true -noirc_artifacts.workspace = true -noirc_abi.workspace = true -nargo.workspace = true -noir_artifact_cli.workspace = true -bn254_blackbox_solver.workspace = true - -# WHIR -whir.workspace = true -spongefish.workspace = true -spongefish-pow.workspace = true -ark-bn254.workspace = true -ark-ff.workspace = true -ark-crypto-primitives.workspace = true -ark-poly.workspace = true -ark-std.workspace = true -ark-serialize.workspace = true - -# Binary -# See -argh.workspace = true -anyhow.workspace = true -tracing.workspace = true -tracing-subscriber.workspace = true - -[dev-dependencies] -# Internal -noir-tools.workspace = true - -# 3rd Party -rand.workspace = true -divan.workspace = true -test-case.workspace = true - -[[bench]] -name = "bench" -harness = false diff --git a/noir-r1cs/README.md b/noir-r1cs/README.md deleted file mode 100644 index 0bc01b5f4..000000000 --- a/noir-r1cs/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# noir-r1cs - -PROTOTYPE: DO NOT USE IN PRODUCTION - -### Compile and generate witness - -```plaintext -nargo compile && nargo execute witness -``` - -### Generate R1CS - -```plaintext -cargo run -- r1cs noir-examples/basic/target/basic.json noir-examples/basic/target/witness.gz -``` - -Example output: - -```plaintext -Private inputs: 1 -Public inputs: 3 -Return values: 0 -Opcodes: 1 -Witnesses: 5 -Constraints: 1 -([0, 1, 0, 0, 0] x [1, 1, 2, 3, 5]ᵀ) * ([0, 0, 1, 0, 0] x [1, 1, 2, 3, 5]ᵀ) = ([0, 0, 0, -1, 1] x [1, 1, 2, 3, 5]ᵀ) -✅ All constraints are valid. -``` - diff --git a/noir-r1cs/src/lib.rs b/noir-r1cs/src/lib.rs deleted file mode 100644 index 09d56d10a..000000000 --- a/noir-r1cs/src/lib.rs +++ /dev/null @@ -1,48 +0,0 @@ -#![doc = include_str!("../README.md")] -#![allow(missing_docs)] -mod binops; -mod digits; -mod file; -mod gnark_config; -mod interner; -mod memory; -mod noir_proof_scheme; -mod noir_to_r1cs; -mod noir_witness; -mod r1cs; -mod r1cs_solver; -mod ram; -mod range_check; -mod rom; -mod skyscraper; -mod sparse_matrix; -pub mod utils; -mod whir_r1cs; - -pub use { - crate::{ - file::{read, write, FileFormat}, - noir_proof_scheme::{NoirProof, NoirProofScheme}, - noir_to_r1cs::noir_to_r1cs, - r1cs::R1CS, - utils::human, - }, - acir::FieldElement as NoirElement, - gnark_config::write_gnark_parameters_to_file, - whir::crypto::fields::Field256 as FieldElement, -}; -use { - crate::{ - interner::{InternedFieldElement, Interner}, - noir_witness::NoirWitnessGenerator, - sparse_matrix::{HydratedSparseMatrix, SparseMatrix}, - utils::serde_ark, - }, - serde::{Deserialize, Serialize}, -}; - -#[derive(Clone, Serialize, Deserialize)] -pub struct Proof { - #[serde(with = "serde_ark")] - transcript: Vec, -} diff --git a/noir-r1cs/src/noir_proof_scheme.rs b/noir-r1cs/src/noir_proof_scheme.rs deleted file mode 100644 index 142d6a20a..000000000 --- a/noir-r1cs/src/noir_proof_scheme.rs +++ /dev/null @@ -1,279 +0,0 @@ -use { - crate::{ - noir_to_r1cs, - noir_witness::WitnessIOPattern, - r1cs_solver::WitnessBuilder, - skyscraper::SkyscraperSponge, - utils::{noir_to_native, PrintAbi}, - whir_r1cs::{IOPattern, WhirR1CSProof, WhirR1CSScheme}, - FieldElement, NoirWitnessGenerator, R1CS, - }, - acir::{circuit::Program, native_types::WitnessMap, FieldElement as NoirFieldElement}, - anyhow::{ensure, Context as _, Result}, - bn254_blackbox_solver::Bn254BlackBoxSolver, - nargo::foreign_calls::DefaultForeignCallBuilder, - noir_artifact_cli::fs::inputs::read_inputs_from_file, - noirc_abi::InputMap, - noirc_artifacts::program::ProgramArtifact, - rand::{rng, Rng as _}, - serde::{Deserialize, Serialize}, - spongefish::{codecs::arkworks_algebra::FieldToUnitSerialize, ProverState}, - std::{fs::File, path::Path}, - tracing::{info, instrument}, -}; -/// A scheme for proving a Noir program. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct NoirProofScheme { - pub program: Program, - pub r1cs: R1CS, - pub witness_builders: Vec, - pub witness_generator: NoirWitnessGenerator, - pub whir_for_witness: WhirR1CSScheme, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct NoirProof { - pub whir_r1cs_proof: WhirR1CSProof, -} - -impl NoirProofScheme { - #[instrument(fields(size = path.as_ref().metadata().map(|m| m.len()).ok()))] - pub fn from_file(path: impl AsRef + std::fmt::Debug) -> Result { - let file = File::open(path).context("while opening Noir program")?; - let program = serde_json::from_reader(file).context("while reading Noir program")?; - - Self::from_program(program) - } - - #[instrument(skip_all)] - pub fn from_program(program: ProgramArtifact) -> Result { - info!("Program noir version: {}", program.noir_version); - info!("Program entry point: fn main{};", PrintAbi(&program.abi)); - ensure!( - program.bytecode.functions.len() == 1, - "Program must have one entry point." - ); - - // Extract bits from Program Artifact. - let main = &program.bytecode.functions[0]; - info!( - "ACIR: {} witnesses, {} opcodes.", - main.current_witness_index, - main.opcodes.len() - ); - - // Compile to R1CS schemes - let (r1cs, witness_map, witness_builders) = noir_to_r1cs(main)?; - info!( - "R1CS {} constraints, {} witnesses, A {} entries, B {} entries, C {} entries", - r1cs.num_constraints(), - r1cs.num_witnesses(), - r1cs.a.num_entries(), - r1cs.b.num_entries(), - r1cs.c.num_entries() - ); - - // Configure witness generator - let witness_generator = - NoirWitnessGenerator::new(&program, witness_map, r1cs.num_witnesses()); - - // Configure Whir - let whir_for_witness = WhirR1CSScheme::new_for_r1cs(&r1cs); - - Ok(Self { - program: program.bytecode, - r1cs, - witness_builders, - witness_generator, - whir_for_witness, - }) - } - - #[must_use] - pub const fn size(&self) -> (usize, usize) { - (self.r1cs.num_constraints(), self.r1cs.num_witnesses()) - } - - pub fn read_witness(&self, prover_toml: impl AsRef) -> Result { - let (input_map, _expected_return) = - read_inputs_from_file(prover_toml.as_ref(), self.witness_generator.abi())?; - - Ok(input_map) - } - - #[instrument(skip_all)] - pub fn generate_witness(&self, input_map: &InputMap) -> Result> { - let solver = Bn254BlackBoxSolver::default(); - let mut output_buffer = Vec::new(); - let mut foreign_call_executor = DefaultForeignCallBuilder { - output: &mut output_buffer, - enable_mocks: false, - resolver_url: None, - root_path: None, - package_name: None, - } - .build(); - - let initial_witness = self.witness_generator.abi().encode(input_map, None)?; - - let mut witness_stack = nargo::ops::execute_program( - &self.program, - initial_witness, - &solver, - &mut foreign_call_executor, - )?; - - Ok(witness_stack - .pop() - .context("Missing witness results")? - .witness) - } - - #[instrument(skip_all)] - pub fn prove(&self, input_map: &InputMap) -> Result { - let acir_witness_idx_to_value_map = self.generate_witness(input_map)?; - - // Solve R1CS instance - let witness_io = self.create_witness_io_pattern(); - let mut witness_merlin = witness_io.to_prover_state(); - self.seed_witness_merlin(&mut witness_merlin, &acir_witness_idx_to_value_map)?; - - let partial_witness = self.r1cs.solve_witness_vec( - &self.witness_builders, - &acir_witness_idx_to_value_map, - &mut witness_merlin, - ); - let witness = fill_witness(partial_witness).context("while filling witness")?; - - // Verify witness (redudant with solve) - #[cfg(test)] - self.r1cs - .test_witness_satisfaction(&witness) - .context("While verifying R1CS instance")?; - - // Prove R1CS instance - let whir_r1cs_proof = self - .whir_for_witness - .prove(&self.r1cs, witness) - .context("While proving R1CS instance")?; - - Ok(NoirProof { whir_r1cs_proof }) - } - - #[instrument(skip_all)] - pub fn verify(&self, proof: &NoirProof) -> Result<()> { - self.whir_for_witness.verify(&proof.whir_r1cs_proof)?; - Ok(()) - } - - fn create_witness_io_pattern(&self) -> IOPattern { - let circuit = &self.program.functions[0]; - let public_idxs = circuit.public_inputs().indices(); - let num_challenges = self - .witness_builders - .iter() - .filter(|b| matches!(b, WitnessBuilder::Challenge(_))) - .count(); - - // Create witness IO pattern - IOPattern::new("📜") - .add_shape() - .add_public_inputs(public_idxs.len()) - .add_logup_challenges(num_challenges) - } - - fn seed_witness_merlin( - &self, - merlin: &mut ProverState, - witness: &WitnessMap, - ) -> Result<()> { - // Absorb circuit shape - let _ = merlin.add_scalars(&[ - FieldElement::from(self.r1cs.num_constraints() as u64), - FieldElement::from(self.r1cs.num_witnesses() as u64), - ]); - - // Absorb public inputs (values) in canonical order - let circuit = &self.program.functions[0]; - let public_idxs = circuit.public_inputs().indices(); - if !public_idxs.is_empty() { - let pub_vals: Vec = public_idxs - .iter() - .map(|&i| noir_to_native(*witness.get_index(i).expect("missing public input"))) - .collect(); - let _ = merlin.add_scalars(&pub_vals); - } - - Ok(()) - } -} - -/// Complete a partial witness with random values. -#[instrument(skip_all, fields(size = witness.len()))] -fn fill_witness(witness: Vec>) -> Result> { - // TODO: Use better entropy source and proper sampling. - let mut rng = rng(); - let mut count = 0; - let witness = witness - .iter() - .map(|f| { - f.unwrap_or_else(|| { - count += 1; - FieldElement::from(rng.random::()) - }) - }) - .collect::>(); - info!("Filled witness with {count} random values"); - Ok(witness) -} - -#[cfg(test)] -mod tests { - use { - super::NoirProofScheme, - crate::{ - r1cs_solver::{ConstantTerm, SumTerm, WitnessBuilder}, - FieldElement, - }, - ark_std::One, - serde::{Deserialize, Serialize}, - std::path::PathBuf, - }; - - #[track_caller] - fn test_serde(value: &T) - where - T: std::fmt::Debug + PartialEq + Serialize + for<'a> Deserialize<'a>, - { - // Test JSON - let json = serde_json::to_string(value).unwrap(); - let deserialized = serde_json::from_str(&json).unwrap(); - assert_eq!(value, &deserialized); - - // Test Postcard - let bin = postcard::to_allocvec(value).unwrap(); - let deserialized = postcard::from_bytes(&bin).unwrap(); - assert_eq!(value, &deserialized); - } - - #[test] - fn test_noir_proof_scheme_serde() { - let path = PathBuf::from("benches/poseidon_rounds.json"); - let proof_schema = NoirProofScheme::from_file(path).unwrap(); - - test_serde(&proof_schema.r1cs); - test_serde(&proof_schema.witness_builders); - test_serde(&proof_schema.witness_generator); - test_serde(&proof_schema.whir_for_witness); - } - - #[test] - fn test_witness_builder_serde() { - let sum_term = SumTerm(Some(FieldElement::one()), 2); - test_serde(&sum_term); - let constant_term = ConstantTerm(2, FieldElement::one()); - test_serde(&constant_term); - let witness_builder = WitnessBuilder::Constant(constant_term); - test_serde(&witness_builder); - } -} diff --git a/noir-r1cs/src/test_programs/bin-opcode/Nargo.toml b/noir-r1cs/src/test_programs/bin-opcode/Nargo.toml deleted file mode 100644 index 92c044bfe..000000000 --- a/noir-r1cs/src/test_programs/bin-opcode/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "main" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir-r1cs/src/test_programs/bin-opcode/Prover.toml b/noir-r1cs/src/test_programs/bin-opcode/Prover.toml deleted file mode 100644 index 5d34a6ed4..000000000 --- a/noir-r1cs/src/test_programs/bin-opcode/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -w = 4294960000 -x = 4294960000 diff --git a/noir-r1cs/src/test_programs/bin-opcode/src/main.nr b/noir-r1cs/src/test_programs/bin-opcode/src/main.nr deleted file mode 100644 index f001f4218..000000000 --- a/noir-r1cs/src/test_programs/bin-opcode/src/main.nr +++ /dev/null @@ -1,3 +0,0 @@ -fn main(w: u32, x: u32) -> pub u32 { - w & x -} diff --git a/noir-r1cs/src/test_programs/brillig-conditional/target/main.json b/noir-r1cs/src/test_programs/brillig-conditional/target/main.json deleted file mode 100644 index 2cd5d54dc..000000000 --- a/noir-r1cs/src/test_programs/brillig-conditional/target/main.json +++ /dev/null @@ -1 +0,0 @@ -{"noir_version":"1.0.0-beta.3+ceaa1986628197bd1170147f6a07f0f98d21030a","hash":6085181827510053099,"abi":{"parameters":[{"name":"a","type":{"kind":"field"},"visibility":"private"},{"name":"b","type":{"kind":"field"},"visibility":"private"}],"return_type":null,"error_types":{}},"bytecode":"H4sIAAAAAAAA/7WUTQ6DIBCF+bFpu67egxFQ2HmVmsL9j9A2QjIZ2Tm8xAxhzON7k4AUh/TvG8q6VinOqntbqeaaAHtZsziX1jmBhbeZ4x68cX5fAgTwwX/mYG0KLqxxj6uJ4GyC7KPNxQzzqp7ACh3ABa8YGTUjV68Z6obvVVbNx9glt2TOnfIhztx/toFeItF+DGjvSeqE+oyMUP3HPv7m3sg4ofVIcuI5bUwM1U+XehNnKdKr/z4In+TnA8qiG2dVvUod0V6d5xfRZbyygQYAAA==","debug_symbols":"lY9JCsMwEAT/MmcdvIagr4RgtJoBIQktgSD898jGBgd88bG6axqmgFQ8zxNa7SLQVwHjBEvobKUCzRZFz+xKMbGQgPYDAWUl0KFZCGg0CuijXd4E2nt6d0/vr/Sx2/Xx+adX4AGNwXk6f1TjDwvIuFE76mzFqU1ffzTHvQ9OKJmDWpe2rs7/AA==","file_map":{"61":{"source":"fn main(a: Field, b: Field) {\n\tif a == 1 {\n\t\tassert(b == 2);\n\t}\n}\n","path":"/Users/ryan.cao/ryan_tfh/Client_Side_Proving/ProveKit/noir-r1cs/src/test_programs/brillig-conditional/src/main.nr"}},"names":["main"],"brillig_names":["directive_invert"]} \ No newline at end of file diff --git a/noir-r1cs/src/test_programs/range-check/Nargo.toml b/noir-r1cs/src/test_programs/range-check/Nargo.toml deleted file mode 100644 index 92c044bfe..000000000 --- a/noir-r1cs/src/test_programs/range-check/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "main" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir-r1cs/src/test_programs/range-check/Prover.toml b/noir-r1cs/src/test_programs/range-check/Prover.toml deleted file mode 100644 index 06d7b94cf..000000000 --- a/noir-r1cs/src/test_programs/range-check/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = 2 -y = 2410 \ No newline at end of file diff --git a/noir-r1cs/src/test_programs/range-check/src/main.nr b/noir-r1cs/src/test_programs/range-check/src/main.nr deleted file mode 100644 index aa53eeacc..000000000 --- a/noir-r1cs/src/test_programs/range-check/src/main.nr +++ /dev/null @@ -1,3 +0,0 @@ -fn main(x: Field, y: Field) -> pub u16 { - (x as u16) + (y as u16) -} diff --git a/noir-r1cs/src/test_programs/read-only-memory/target/main.gz b/noir-r1cs/src/test_programs/read-only-memory/target/main.gz deleted file mode 100644 index 9c6dcd5c6..000000000 Binary files a/noir-r1cs/src/test_programs/read-only-memory/target/main.gz and /dev/null differ diff --git a/noir-r1cs/src/test_programs/read-only-memory/target/main.json b/noir-r1cs/src/test_programs/read-only-memory/target/main.json deleted file mode 100644 index 53954d6b9..000000000 --- a/noir-r1cs/src/test_programs/read-only-memory/target/main.json +++ /dev/null @@ -1 +0,0 @@ -{"noir_version":"1.0.0-beta.3+ceaa1986628197bd1170147f6a07f0f98d21030a","hash":12074195982711912988,"abi":{"parameters":[{"name":"mem","type":{"kind":"array","length":6,"type":{"kind":"field"}},"visibility":"private"},{"name":"addr0","type":{"kind":"field"},"visibility":"private"},{"name":"addr1","type":{"kind":"field"},"visibility":"private"},{"name":"addr2","type":{"kind":"field"},"visibility":"private"}],"return_type":{"abi_type":{"kind":"field"},"visibility":"public"},"error_types":{}},"bytecode":"H4sIAAAAAAAA/+1W2w6DIAxl6rzrt1ABLW/7lZnh/3/CJNa4GN6sDyQ7SQPhcjilFHiIDf1qBdVTKnNx4LFaQn3Zak9qT0QYLyrlRfyuz8gLeQQaa06NsQaqiEBj8w+UhDICjS23xjtSvgnwKjlq7abBgYK3HOyMRmozjwgIBs1nQKUcapzsbCdpQSsHi7FqIeKO2fE7Mqi7we+e2e+UWd8eby4+z9Uz8Lllg0+YNuC3vAaoGDXWjPvHeUH4s5wRVyUOhL5S/jviX7ryNO48d8cXedZKnbcJAAA=","debug_symbols":"hZDfCoMgFMbf5Vx3kbpF+ipjhJWFIBpmgyG9+ywUmghdfn9+34HjYRT9NndST2YF9vKgzMCdNDooD+i01oXrQ62OWwesbSoQegRGm72CSSoBrEH7uwJcqlMa66imWZ+U+gjXCSB1BjyKQEsigFF+4XkDEPwHBNFbqZScu+sfgv3hVvJeiSinTQ+X1H2XlCR+sWYQ42bFsXRmYf4H","file_map":{"61":{"source":"fn main(mem: [Field; 6], addr0: Field, addr1: Field, addr2: Field) -> pub Field {\n mem[addr0] * mem[addr1]\n + mem[addr2]\n + mem[addr0]\n + mem[addr0]\n + mem[addr0] * mem[addr1] * mem[addr1] * mem[addr1]\n + mem[addr2] * mem[addr2]\n + mem[addr2] * mem[addr2]\n}\n","path":"/Users/vishruti.ganesh/Desktop/ProveKit/noir-r1cs/src/test_programs/read-only-memory/src/main.nr"}},"names":["main"],"brillig_names":[]} \ No newline at end of file diff --git a/noir-r1cs/src/test_programs/read-write-memory/target/main.json b/noir-r1cs/src/test_programs/read-write-memory/target/main.json deleted file mode 100644 index 7fb7806f2..000000000 --- a/noir-r1cs/src/test_programs/read-write-memory/target/main.json +++ /dev/null @@ -1 +0,0 @@ -{"noir_version":"1.0.0-beta.3+ceaa1986628197bd1170147f6a07f0f98d21030a","hash":17235759886466631013,"abi":{"parameters":[{"name":"mem","type":{"kind":"array","length":6,"type":{"kind":"field"}},"visibility":"private"},{"name":"addr0","type":{"kind":"field"},"visibility":"private"},{"name":"addr1","type":{"kind":"field"},"visibility":"private"},{"name":"addr","type":{"kind":"field"},"visibility":"private"}],"return_type":{"abi_type":{"kind":"field"},"visibility":"public"},"error_types":{}},"bytecode":"H4sIAAAAAAAA/+1WSQ7DIAxM0jRLm8dglmBu/UpRyf+fUGhdFUW5hRwsZSQLxGLGHtmirr6YovU0v9DYVX/U0Rraa6Ndab2ptvGgUexE/n5Bv9Ax4HgryZGrUD0DjndGQsFRSeBQUadQTCrqbH0xCQMDjtMBHD9oChMdM19KzFoHKwMoeArpPBqhjZ8REAyal0SlAmq0zjsrHGgVYDFOLeSsZNBJkJZ8jVn8Wx+u1GJT9Q6rc+u7P7wBpAFwXN0JAAA=","debug_symbols":"bdDRCoMgFAbgdznXXWjaIl9ljLCyEETDdDCkd5/FHCVe/uf3O8gJMInBL73Us9mAPQMoM3InjY4pAD5H28r1kTbHrQPW0QqEnoBhRPcKZqkEsAfeXxXUpfe4RgkQlAFSBKhNALcZoEVASAKUZKApgub/paa7gRgGK5WSS3+9RBy/uZV8UOIXZ6/HS+s+a2qSX60ZxeStODadXVz/BQ==","file_map":{"61":{"source":"fn main(mut mem: [Field; 6], addr0: Field, addr1: Field, addr: Field) -> pub Field {\n\tlet z = mem[addr0];\n\tmem[addr0] = mem[addr1];\n\tmem[addr1] = z;\n\tmem[addr]\n}\n","path":"/Users/ryan.cao/ryan_tfh/Client_Side_Proving/ProveKit/noir-r1cs/src/test_programs/read-write-memory/src/main.nr"}},"names":["main"],"brillig_names":[]} \ No newline at end of file diff --git a/noir-r1cs/src/test_programs/small-sha/Nargo.toml b/noir-r1cs/src/test_programs/small-sha/Nargo.toml deleted file mode 100644 index dcc9332ea..000000000 --- a/noir-r1cs/src/test_programs/small-sha/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "main" -type = "bin" -authors = [""] - -[dependencies] -noir_native_sha256 = { path = "../../../../noir-examples/noir-passport-examples/noir_native_sha256" } diff --git a/noir-r1cs/src/test_programs/small-sha/Prover.toml b/noir-r1cs/src/test_programs/small-sha/Prover.toml deleted file mode 100644 index fe7e087c0..000000000 --- a/noir-r1cs/src/test_programs/small-sha/Prover.toml +++ /dev/null @@ -1,189 +0,0 @@ -a = [ - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, - 120, - 98, - 76, -] -# dg1 = [ -# 42, -# 19, -# 88, -# 237, -# 91, -# 63, -# 129, -# 7, -# 250, -# 198, -# 14, -# 76, -# 211, -# 45, -# 5, -# 99, -# 170, -# 38, -# 240, -# 201, -# 64, -# 123, -# 3, -# 81, -# 33, -# 156, -# 212, -# 18, -# 199, -# 111, -# 222, -# 61, -# 143, -# 247, -# 8, -# 195, -# 29, -# 157, -# 132, -# 71, -# 102, -# 17, -# 173, -# 108, -# 245, -# 214, -# 224, -# 66, -# 115, -# 86, -# 189, -# 234, -# 90, -# 44, -# 207, -# 16, -# 58, -# 134, -# 103, -# 251, -# 193, -# 35, -# 140, -# 248, -# 79, -# 105, -# 202, -# 137, -# 100, -# 196, -# 125, -# 31, -# 83, -# 41, -# 209, -# 93, -# 10, -# 229, -# 144, -# 116, -# 225, -# 67, -# 133, -# 204, -# 77, -# 112, -# 69, -# 253, -# 154, -# 246, -# 17, -# 99, -# 88, -# 0, -# 0, -# ] diff --git a/noir-r1cs/src/test_programs/small-sha/src/main.nr b/noir-r1cs/src/test_programs/small-sha/src/main.nr deleted file mode 100644 index ed0bbe036..000000000 --- a/noir-r1cs/src/test_programs/small-sha/src/main.nr +++ /dev/null @@ -1,13 +0,0 @@ -fn main(a: [u8; 90]) -> pub [u8; 32] { - let mut x = 3; - if a[0] == 1 { - // This is not true, but it doesn't matter - x = 5; - } - let result = noir_native_sha256::ryan_sha256_noir::sha256_var(a, x + (a.len() as u64) - x); - - println("The result is:"); - println(result); - - result -} diff --git a/noir-r1cs/src/utils/field_utils.rs b/noir-r1cs/src/utils/field_utils.rs deleted file mode 100644 index b04ca6f00..000000000 --- a/noir-r1cs/src/utils/field_utils.rs +++ /dev/null @@ -1,18 +0,0 @@ -use acir::{AcirField, FieldElement}; - -pub fn pow_field(base: FieldElement, exponent: u32) -> FieldElement { - let mut exponent_to_bits: Vec = (0..32).map(|i| (exponent >> i) & 1 != 0).collect(); - // Truncate to only get the most significant bits. - while let Some(false) = exponent_to_bits.last() { - exponent_to_bits.pop(); - } - let mut field_element_exponentiated = FieldElement::one(); - let mut repeated_squaring_value = base; - exponent_to_bits.iter().for_each(|bit| { - if *bit { - field_element_exponentiated = field_element_exponentiated * repeated_squaring_value; - } - repeated_squaring_value = repeated_squaring_value * repeated_squaring_value; - }); - field_element_exponentiated -} diff --git a/noir-r1cs/tests/compiler.rs b/noir-r1cs/tests/compiler.rs deleted file mode 100644 index b5188a2a6..000000000 --- a/noir-r1cs/tests/compiler.rs +++ /dev/null @@ -1,54 +0,0 @@ -use { - noir_r1cs::NoirProofScheme, noir_tools::compile_workspace, serde::Deserialize, std::path::Path, - test_case::test_case, -}; - -#[derive(Debug, Deserialize)] -struct NargoToml { - package: NargoTomlPackage, -} - -#[derive(Debug, Deserialize)] -struct NargoTomlPackage { - name: String, -} - -fn test_compiler(test_case_path: impl AsRef) { - let test_case_path = test_case_path.as_ref(); - - compile_workspace(test_case_path).expect("Compiling workspace"); - - let nargo_toml_path = test_case_path.join("Nargo.toml"); - - let nargo_toml = std::fs::read_to_string(&nargo_toml_path).expect("Reading Nargo.toml"); - let nargo_toml: NargoToml = toml::from_str(&nargo_toml).expect("Deserializing Nargo.toml"); - - let package_name = nargo_toml.package.name; - - let circuit_path = test_case_path.join(format!("target/{package_name}.json")); - let witness_file_path = test_case_path.join("Prover.toml"); - - let proof_schema = NoirProofScheme::from_file(&circuit_path).expect("Reading proof scheme"); - let input_map = proof_schema - .read_witness(&witness_file_path) - .expect("Reading witness data"); - - let _proof = proof_schema - .prove(&input_map) - .expect("While proving Noir program statement"); -} - -#[test_case("../noir-examples/noir-r1cs-test-programs/acir_assert_zero")] -#[test_case("../noir-examples/noir-r1cs-test-programs/simplest-read-only-memory")] -#[test_case("../noir-examples/noir-r1cs-test-programs/read-only-memory")] -#[test_case("../noir-examples/noir-r1cs-test-programs/range-check-u8")] -#[test_case("../noir-examples/noir-r1cs-test-programs/range-check-u16")] -#[test_case("../noir-examples/noir-r1cs-test-programs/range-check-mixed-bases")] -#[test_case("../noir-examples/noir-r1cs-test-programs/read-write-memory")] -#[test_case("../noir-examples/noir-r1cs-test-programs/conditional-write")] -#[test_case("../noir-examples/noir-r1cs-test-programs/bin-opcode")] -#[test_case("../noir-examples/noir-r1cs-test-programs/small-sha")] -#[test_case("../noir-examples/noir-passport-examples/complete_age_check"; "complete_age_check")] -fn case(path: &str) { - test_compiler(path); -} diff --git a/noir-tools/src/lib.rs b/noir-tools/src/lib.rs deleted file mode 100644 index 04771f667..000000000 --- a/noir-tools/src/lib.rs +++ /dev/null @@ -1,29 +0,0 @@ -use { - anyhow::Result, - nargo::workspace::Workspace, - nargo_cli::cli::compile_cmd::compile_workspace_full, - nargo_toml::{resolve_workspace_from_toml, PackageSelection}, - noirc_driver::CompileOptions, - std::path::Path, -}; - -pub fn compile_workspace(workspace_path: impl AsRef) -> Result { - let workspace_path = workspace_path.as_ref(); - let workspace_path = if workspace_path.ends_with("Nargo.toml") { - workspace_path.to_owned() - } else { - workspace_path.join("Nargo.toml") - }; - - // `resolve_workspace_from_toml` calls .normalize() under the hood which messes - // up path resolution - let workspace_path = workspace_path.canonicalize()?; - - let workspace = - resolve_workspace_from_toml(&workspace_path, PackageSelection::DefaultOrAll, None)?; - let compile_options = CompileOptions::default(); - - compile_workspace_full(&workspace, &compile_options, None)?; - - Ok(workspace) -} diff --git a/params b/params deleted file mode 100644 index 6096e5b90..000000000 --- a/params +++ /dev/null @@ -1 +0,0 @@ -{"n_rounds":0,"n_vars":4,"rate":1,"folding_factor":[],"ood_samples":[],"num_queries":[],"pow_bits":[],"final_queries":98,"final_pow_bits":2,"final_folding_pow_bits":0,"domain_generator":"4419234939496763621076330863786513495701855246241724391626358375488475697872","io_pattern":"🌪️\u0000S3rand\u0000A4Sumcheck Polynomials\u0000S1Sumcheck Random\u0000A4Sumcheck Polynomials\u0000S1Sumcheck Random\u0000A4Sumcheck Polynomials\u0000S1Sumcheck Random\u0000A1merkle_digest\u0000S1ood_query\u0000A1ood_ans\u0000S1initial_combination_randomness\u0000A3sumcheck_poly\u0000S1folding_randomness\u0000A3sumcheck_poly\u0000S1folding_randomness\u0000A3sumcheck_poly\u0000S1folding_randomness\u0000A3sumcheck_poly\u0000S1folding_randomness\u0000A1final_coeffs\u0000S7final_queries\u0000S3pow_queries\u0000A8pow-nonce","transcript":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,167,39,73,70,53,17,77,26,201,207,235,111,136,199,218,152,94,24,78,40,133,249,208,19,61,219,123,174,105,131,189,9,169,222,25,132,50,160,168,68,215,203,225,208,56,157,168,7,240,207,165,223,104,96,177,94,83,214,106,228,155,12,177,36,177,249,156,37,44,68,236,228,240,212,235,56,135,131,176,135,14,112,141,121,200,235,205,69,153,238,74,78,109,190,245,1,70,48,198,190,72,223,121,195,247,199,59,10,38,160,106,56,144,244,226,158,8,20,34,113,115,127,85,230,123,50,77,24,153,195,40,48,4,62,147,146,12,96,10,86,124,31,137,70,34,191,240,142,35,88,48,166,144,99,138,234,178,176,200,29,189,154,74,84,21,166,0,214,191,191,48,83,186,194,152,33,169,186,20,62,54,24,93,17,234,172,24,223,250,175,174,36,32,199,231,80,188,106,5,9,47,238,234,84,118,108,68,109,2,119,91,84,51,164,95,235,150,233,60,27,11,253,110,19,197,36,64,35,233,254,175,152,215,11,107,250,51,52,252,97,113,73,239,46,223,108,41,21,111,133,240,239,219,76,141,33,200,59,132,181,201,164,31,14,29,247,204,149,113,38,104,72,91,168,175,129,182,252,3,22,181,72,8,242,109,84,68,3,104,58,70,194,94,13,53,155,225,17,144,0,200,135,88,74,140,235,99,223,204,21,87,9,71,179,36,217,240,206,179,36,198,54,86,159,159,73,14,247,144,181,91,36,155,101,202,143,108,25,198,129,66,93,47,126,89,230,255,12,71,81,149,30,228,122,201,167,56,105,223,48,17,9,191,50,40,38,7,4,132,155,197,52,24,244,20,115,152,142,208,7,99,208,76,35,64,102,7,156,122,95,143,88,94,201,244,102,145,49,172,215,120,211,198,4,69,197,157,112,207,158,38,106,114,98,75,24,70,140,165,3,175,70,228,116,219,166,179,101,248,157,31,87,199,2,199,132,71,24,68,81,249,75,2,237,54,234,19,28,175,97,83,171,164,161,92,45,118,195,148,3,97,20,158,151,132,90,99,55,48,41,235,191,103,214,249,105,134,26,92,25,156,58,169,48,135,130,67,9,74,229,120,39,253,194,49,177,187,207,71,3,109,63,47,161,68,39,227,252,147,244,48,36,173,33,24,234,51,187,142,55,133,216,185,12,155,89,244,40,71,196,226,118,225,17,31,115,162,115,69,59,239,139,214,29,162,239,163,153,112,63,194,165,64,182,133,163,2,221,131,87,66,54,200,195,219,29,82,10,17,177,150,226,27,173,102,40,161,24,93,114,156,32,14,78,187,59,89,228,224,169,173,86,48,172,217,166,62,21,128,6,152,243,216,126,233,233,252,11,63,233,14,204,166,97,178,164,212,3,18,74,18,152,23,212,38,23,133,64,226,214,133,209,190,126,148,103,56,130,38,8,49,156,112,14,170,142,36,59,75,176,69,231,225,9,232,143,167,30,15,226,68,202,77,214,194,138,124,169,38,43,74,38,128,123,194,88,218,187,189,46,20,153,228,250,145,135,206,29,206,200,161,73,82,86,73,1,51,23,206,48,95,67,183,39,24,2,134,58,16,86,169,153,105,17,30,174,233,59,182,206,158,128,204,145,220,25,75,183,3,146,27,9,71,184,46,24,30,84,173,67,205,167,192,202,154,94,137,252,147,79,131,82,194,103,100,238,81,244,188,245,196,0,200,210,212,187,56,34,55,129,192,242,198,41,178,180,112,236,63,70,67,54,192,196,13,199,135,195,146,122,29,129,246,243,124,159,90,103,245,30,52,47,173,34,191,187,193,135,76,82,137,213,57,165,231,224,207,198,164,29,124,168,241,151,84,86,244,102,242,142,104,11,0,0,0,0,0,0,0,1],"transcript_len":872} \ No newline at end of file diff --git a/cm31_ntt/Cargo.toml b/playground/cm31_ntt/Cargo.toml similarity index 100% rename from cm31_ntt/Cargo.toml rename to playground/cm31_ntt/Cargo.toml diff --git a/cm31_ntt/README.md b/playground/cm31_ntt/README.md similarity index 100% rename from cm31_ntt/README.md rename to playground/cm31_ntt/README.md diff --git a/cm31_ntt/benches/ntt_block_8.rs b/playground/cm31_ntt/benches/ntt_block_8.rs similarity index 100% rename from cm31_ntt/benches/ntt_block_8.rs rename to playground/cm31_ntt/benches/ntt_block_8.rs diff --git a/cm31_ntt/benches/ntt_r8_hybrid_p.rs b/playground/cm31_ntt/benches/ntt_r8_hybrid_p.rs similarity index 100% rename from cm31_ntt/benches/ntt_r8_hybrid_p.rs rename to playground/cm31_ntt/benches/ntt_r8_hybrid_p.rs diff --git a/cm31_ntt/benches/ntt_r8_hybrid_ps.rs b/playground/cm31_ntt/benches/ntt_r8_hybrid_ps.rs similarity index 100% rename from cm31_ntt/benches/ntt_r8_hybrid_ps.rs rename to playground/cm31_ntt/benches/ntt_r8_hybrid_ps.rs diff --git a/cm31_ntt/benches/ntt_r8_ip.rs b/playground/cm31_ntt/benches/ntt_r8_ip.rs similarity index 100% rename from cm31_ntt/benches/ntt_r8_ip.rs rename to playground/cm31_ntt/benches/ntt_r8_ip.rs diff --git a/cm31_ntt/benches/ntt_r8_ip_p.rs b/playground/cm31_ntt/benches/ntt_r8_ip_p.rs similarity index 100% rename from cm31_ntt/benches/ntt_r8_ip_p.rs rename to playground/cm31_ntt/benches/ntt_r8_ip_p.rs diff --git a/cm31_ntt/benches/ntt_r8_s2_hybrid_p.rs b/playground/cm31_ntt/benches/ntt_r8_s2_hybrid_p.rs similarity index 100% rename from cm31_ntt/benches/ntt_r8_s2_hybrid_p.rs rename to playground/cm31_ntt/benches/ntt_r8_s2_hybrid_p.rs diff --git a/cm31_ntt/benches/ntt_r8_s4_hybrid_p.rs b/playground/cm31_ntt/benches/ntt_r8_s4_hybrid_p.rs similarity index 100% rename from cm31_ntt/benches/ntt_r8_s4_hybrid_p.rs rename to playground/cm31_ntt/benches/ntt_r8_s4_hybrid_p.rs diff --git a/cm31_ntt/benches/ntt_r8_vec.rs b/playground/cm31_ntt/benches/ntt_r8_vec.rs similarity index 100% rename from cm31_ntt/benches/ntt_r8_vec.rs rename to playground/cm31_ntt/benches/ntt_r8_vec.rs diff --git a/cm31_ntt/benches/ntt_r8_vec_p.rs b/playground/cm31_ntt/benches/ntt_r8_vec_p.rs similarity index 100% rename from cm31_ntt/benches/ntt_r8_vec_p.rs rename to playground/cm31_ntt/benches/ntt_r8_vec_p.rs diff --git a/cm31_ntt/src/cm31.rs b/playground/cm31_ntt/src/cm31.rs similarity index 100% rename from cm31_ntt/src/cm31.rs rename to playground/cm31_ntt/src/cm31.rs diff --git a/cm31_ntt/src/lib.rs b/playground/cm31_ntt/src/lib.rs similarity index 100% rename from cm31_ntt/src/lib.rs rename to playground/cm31_ntt/src/lib.rs diff --git a/cm31_ntt/src/ntt.rs b/playground/cm31_ntt/src/ntt.rs similarity index 100% rename from cm31_ntt/src/ntt.rs rename to playground/cm31_ntt/src/ntt.rs diff --git a/cm31_ntt/src/ntt_utils.rs b/playground/cm31_ntt/src/ntt_utils.rs similarity index 100% rename from cm31_ntt/src/ntt_utils.rs rename to playground/cm31_ntt/src/ntt_utils.rs diff --git a/cm31_ntt/src/rm31.rs b/playground/cm31_ntt/src/rm31.rs similarity index 100% rename from cm31_ntt/src/rm31.rs rename to playground/cm31_ntt/src/rm31.rs diff --git a/sage/.gitignore b/playground/sage/.gitignore similarity index 100% rename from sage/.gitignore rename to playground/sage/.gitignore diff --git a/sage/GR1CS.pdf b/playground/sage/GR1CS.pdf similarity index 100% rename from sage/GR1CS.pdf rename to playground/sage/GR1CS.pdf diff --git a/sage/GR1CS.typst b/playground/sage/GR1CS.typst similarity index 100% rename from sage/GR1CS.typst rename to playground/sage/GR1CS.typst diff --git a/sage/M31.pdf b/playground/sage/M31.pdf similarity index 100% rename from sage/M31.pdf rename to playground/sage/M31.pdf diff --git a/sage/M31.typst b/playground/sage/M31.typst similarity index 100% rename from sage/M31.typst rename to playground/sage/M31.typst diff --git a/sage/System.pdf b/playground/sage/System.pdf similarity index 100% rename from sage/System.pdf rename to playground/sage/System.pdf diff --git a/sage/System.typst b/playground/sage/System.typst similarity index 100% rename from sage/System.typst rename to playground/sage/System.typst diff --git a/sage/fri-and-friends/README.md b/playground/sage/fri-and-friends/README.md similarity index 100% rename from sage/fri-and-friends/README.md rename to playground/sage/fri-and-friends/README.md diff --git a/sage/fri-and-friends/Zero Knowledge for WHIR.md b/playground/sage/fri-and-friends/Zero Knowledge for WHIR.md similarity index 100% rename from sage/fri-and-friends/Zero Knowledge for WHIR.md rename to playground/sage/fri-and-friends/Zero Knowledge for WHIR.md diff --git a/sage/fri-and-friends/coding_theory.ipynb b/playground/sage/fri-and-friends/coding_theory.ipynb similarity index 100% rename from sage/fri-and-friends/coding_theory.ipynb rename to playground/sage/fri-and-friends/coding_theory.ipynb diff --git a/sage/fri-and-friends/fri.ipynb b/playground/sage/fri-and-friends/fri.ipynb similarity index 100% rename from sage/fri-and-friends/fri.ipynb rename to playground/sage/fri-and-friends/fri.ipynb diff --git a/sage/fri-and-friends/src/chat_gpt_generated.py b/playground/sage/fri-and-friends/src/chat_gpt_generated.py similarity index 100% rename from sage/fri-and-friends/src/chat_gpt_generated.py rename to playground/sage/fri-and-friends/src/chat_gpt_generated.py diff --git a/sage/fri-and-friends/src/fri.py b/playground/sage/fri-and-friends/src/fri.py similarity index 100% rename from sage/fri-and-friends/src/fri.py rename to playground/sage/fri-and-friends/src/fri.py diff --git a/sage/fri-and-friends/src/proth_primes.py b/playground/sage/fri-and-friends/src/proth_primes.py similarity index 100% rename from sage/fri-and-friends/src/proth_primes.py rename to playground/sage/fri-and-friends/src/proth_primes.py diff --git a/sage/fri-and-friends/src/whir.py b/playground/sage/fri-and-friends/src/whir.py similarity index 100% rename from sage/fri-and-friends/src/whir.py rename to playground/sage/fri-and-friends/src/whir.py diff --git a/sage/fri-and-friends/src/zkwhir.py b/playground/sage/fri-and-friends/src/zkwhir.py similarity index 100% rename from sage/fri-and-friends/src/zkwhir.py rename to playground/sage/fri-and-friends/src/zkwhir.py diff --git a/sage/fri-and-friends/whir.ipynb b/playground/sage/fri-and-friends/whir.ipynb similarity index 100% rename from sage/fri-and-friends/whir.ipynb rename to playground/sage/fri-and-friends/whir.ipynb diff --git a/sage/mersene-31.ipynb b/playground/sage/mersene-31.ipynb similarity index 100% rename from sage/mersene-31.ipynb rename to playground/sage/mersene-31.ipynb diff --git a/sage/preamble.typst b/playground/sage/preamble.typst similarity index 100% rename from sage/preamble.typst rename to playground/sage/preamble.typst diff --git a/sage/references.bib b/playground/sage/references.bib similarity index 100% rename from sage/references.bib rename to playground/sage/references.bib diff --git a/provekit/common/Cargo.toml b/provekit/common/Cargo.toml new file mode 100644 index 000000000..340c7287f --- /dev/null +++ b/provekit/common/Cargo.toml @@ -0,0 +1,46 @@ +[package] +name = "provekit-common" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +# Workspace crates +skyscraper.workspace = true + +# Noir language +acir.workspace = true +noir_artifact_cli.workspace = true +noirc_abi.workspace = true + +# Cryptography and proof systems +ark-bn254.workspace = true +ark-crypto-primitives.workspace = true +ark-ff.workspace = true +ark-serialize.workspace = true +ark-std.workspace = true +spongefish.workspace = true +spongefish-pow.workspace = true +whir.workspace = true + +# 3rd party +anyhow.workspace = true +bytes.workspace = true +hex.workspace = true +postcard.workspace = true +rand08.workspace = true +rayon.workspace = true +ruint.workspace = true +serde.workspace = true +serde_json.workspace = true +tracing.workspace = true +zerocopy.workspace = true +zeroize.workspace = true +zstd.workspace = true + +[lints] +workspace = true diff --git a/noir-r1cs/src/file/bin.rs b/provekit/common/src/file/bin.rs similarity index 100% rename from noir-r1cs/src/file/bin.rs rename to provekit/common/src/file/bin.rs diff --git a/noir-r1cs/src/file/buf_ext.rs b/provekit/common/src/file/buf_ext.rs similarity index 100% rename from noir-r1cs/src/file/buf_ext.rs rename to provekit/common/src/file/buf_ext.rs diff --git a/noir-r1cs/src/file/counting_writer.rs b/provekit/common/src/file/counting_writer.rs similarity index 100% rename from noir-r1cs/src/file/counting_writer.rs rename to provekit/common/src/file/counting_writer.rs diff --git a/noir-r1cs/src/file/json.rs b/provekit/common/src/file/json.rs similarity index 100% rename from noir-r1cs/src/file/json.rs rename to provekit/common/src/file/json.rs diff --git a/noir-r1cs/src/file/mod.rs b/provekit/common/src/file/mod.rs similarity index 96% rename from noir-r1cs/src/file/mod.rs rename to provekit/common/src/file/mod.rs index a01838f40..4165752f1 100644 --- a/noir-r1cs/src/file/mod.rs +++ b/provekit/common/src/file/mod.rs @@ -10,7 +10,7 @@ use { counting_writer::CountingWriter, json::{read_json, write_json}, }, - crate::{noir_proof_scheme::NoirProof, NoirProofScheme}, + crate::{NoirProof, NoirProofScheme}, anyhow::Result, serde::{Deserialize, Serialize}, std::{ffi::OsStr, path::Path}, diff --git a/noir-r1cs/src/interner.rs b/provekit/common/src/interner.rs similarity index 100% rename from noir-r1cs/src/interner.rs rename to provekit/common/src/interner.rs diff --git a/provekit/common/src/lib.rs b/provekit/common/src/lib.rs new file mode 100644 index 000000000..b60f6921f --- /dev/null +++ b/provekit/common/src/lib.rs @@ -0,0 +1,24 @@ +pub mod file; +mod interner; +mod noir_proof_scheme; +mod r1cs; +pub mod skyscraper; +mod sparse_matrix; +pub mod utils; +mod whir_r1cs; +pub mod witness; + +use crate::{ + interner::{InternedFieldElement, Interner}, + sparse_matrix::{HydratedSparseMatrix, SparseMatrix}, +}; +pub use { + acir::FieldElement as NoirElement, + noir_proof_scheme::{NoirProof, NoirProofScheme}, + r1cs::R1CS, + whir::crypto::fields::Field256 as FieldElement, + whir_r1cs::{IOPattern, WhirConfig, WhirR1CSProof, WhirR1CSScheme}, +}; + +#[cfg(test)] +mod tests {} diff --git a/provekit/common/src/noir_proof_scheme.rs b/provekit/common/src/noir_proof_scheme.rs new file mode 100644 index 000000000..b5a9cb3a2 --- /dev/null +++ b/provekit/common/src/noir_proof_scheme.rs @@ -0,0 +1,42 @@ +use { + crate::{ + whir_r1cs::{WhirR1CSProof, WhirR1CSScheme}, + witness::{NoirWitnessGenerator, WitnessBuilder}, + NoirElement, R1CS, + }, + acir::circuit::Program, + anyhow::Result, + noir_artifact_cli::fs::inputs::read_inputs_from_file, + noirc_abi::InputMap, + serde::{Deserialize, Serialize}, + std::path::Path, +}; + +/// A scheme for proving a Noir program. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct NoirProofScheme { + pub program: Program, + pub r1cs: R1CS, + pub witness_builders: Vec, + pub witness_generator: NoirWitnessGenerator, + pub whir_for_witness: WhirR1CSScheme, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct NoirProof { + pub whir_r1cs_proof: WhirR1CSProof, +} + +impl NoirProofScheme { + #[must_use] + pub const fn size(&self) -> (usize, usize) { + (self.r1cs.num_constraints(), self.r1cs.num_witnesses()) + } + + pub fn read_witness(&self, prover_toml: impl AsRef) -> Result { + let (input_map, _expected_return) = + read_inputs_from_file(prover_toml.as_ref(), self.witness_generator.abi())?; + + Ok(input_map) + } +} diff --git a/noir-r1cs/src/r1cs.rs b/provekit/common/src/r1cs.rs similarity index 63% rename from noir-r1cs/src/r1cs.rs rename to provekit/common/src/r1cs.rs index 531e74f70..48392bb0e 100644 --- a/noir-r1cs/src/r1cs.rs +++ b/provekit/common/src/r1cs.rs @@ -1,13 +1,6 @@ use { - crate::{ - r1cs_solver::WitnessBuilder, skyscraper::SkyscraperSponge, FieldElement, - HydratedSparseMatrix, Interner, SparseMatrix, - }, - acir::{native_types::WitnessMap, FieldElement as NoirFieldElement}, - anyhow::{ensure, Result}, + crate::{FieldElement, HydratedSparseMatrix, Interner, SparseMatrix}, serde::{Deserialize, Serialize}, - spongefish::ProverState, - tracing::instrument, }; /// Represents a R1CS constraint system. @@ -108,42 +101,4 @@ impl R1CS { ); } } - - /// Given the ACIR witness values, solve for the R1CS witness values. - pub fn solve_witness_vec( - &self, - witness_builder_vec: &[WitnessBuilder], - acir_witness_idx_to_value_map: &WitnessMap, - transcript: &mut ProverState, - ) -> Vec> { - let mut witness = vec![None; self.num_witnesses()]; - witness_builder_vec.iter().for_each(|witness_builder| { - witness_builder.solve(acir_witness_idx_to_value_map, &mut witness, transcript); - }); - witness - } - - // Tests R1CS Witness satisfaction given the constraints provided by the - // R1CS Matrices. - #[instrument(skip_all, fields(size = witness.len()))] - pub fn test_witness_satisfaction(&self, witness: &[FieldElement]) -> Result<()> { - ensure!( - witness.len() == self.num_witnesses(), - "Witness size does not match" - ); - - // Verify - let a = self.a() * witness; - let b = self.b() * witness; - let c = self.c() * witness; - for (row, ((a, b), c)) in a - .into_iter() - .zip(b.into_iter()) - .zip(c.into_iter()) - .enumerate() - { - ensure!(a * b == c, "Constraint {row} failed"); - } - Ok(()) - } } diff --git a/noir-r1cs/src/skyscraper/mod.rs b/provekit/common/src/skyscraper/mod.rs similarity index 100% rename from noir-r1cs/src/skyscraper/mod.rs rename to provekit/common/src/skyscraper/mod.rs diff --git a/noir-r1cs/src/skyscraper/pow.rs b/provekit/common/src/skyscraper/pow.rs similarity index 100% rename from noir-r1cs/src/skyscraper/pow.rs rename to provekit/common/src/skyscraper/pow.rs diff --git a/noir-r1cs/src/skyscraper/sponge.rs b/provekit/common/src/skyscraper/sponge.rs similarity index 100% rename from noir-r1cs/src/skyscraper/sponge.rs rename to provekit/common/src/skyscraper/sponge.rs diff --git a/noir-r1cs/src/skyscraper/whir.rs b/provekit/common/src/skyscraper/whir.rs similarity index 100% rename from noir-r1cs/src/skyscraper/whir.rs rename to provekit/common/src/skyscraper/whir.rs diff --git a/noir-r1cs/src/sparse_matrix.rs b/provekit/common/src/sparse_matrix.rs similarity index 100% rename from noir-r1cs/src/sparse_matrix.rs rename to provekit/common/src/sparse_matrix.rs diff --git a/noir-r1cs/src/utils/file_io.rs b/provekit/common/src/utils/file_io.rs similarity index 100% rename from noir-r1cs/src/utils/file_io.rs rename to provekit/common/src/utils/file_io.rs diff --git a/noir-r1cs/src/utils/helpers.rs b/provekit/common/src/utils/helpers.rs similarity index 100% rename from noir-r1cs/src/utils/helpers.rs rename to provekit/common/src/utils/helpers.rs diff --git a/noir-r1cs/src/utils/mod.rs b/provekit/common/src/utils/mod.rs similarity index 99% rename from noir-r1cs/src/utils/mod.rs rename to provekit/common/src/utils/mod.rs index c9f0bb1f0..07cadbe2e 100644 --- a/noir-r1cs/src/utils/mod.rs +++ b/provekit/common/src/utils/mod.rs @@ -1,4 +1,4 @@ -pub mod file_io; +// pub mod file_io; mod print_abi; pub mod serde_ark; pub mod serde_ark_option; diff --git a/noir-r1cs/src/utils/print_abi.rs b/provekit/common/src/utils/print_abi.rs similarity index 100% rename from noir-r1cs/src/utils/print_abi.rs rename to provekit/common/src/utils/print_abi.rs diff --git a/noir-r1cs/src/utils/serde_ark.rs b/provekit/common/src/utils/serde_ark.rs similarity index 100% rename from noir-r1cs/src/utils/serde_ark.rs rename to provekit/common/src/utils/serde_ark.rs diff --git a/noir-r1cs/src/utils/serde_ark_option.rs b/provekit/common/src/utils/serde_ark_option.rs similarity index 100% rename from noir-r1cs/src/utils/serde_ark_option.rs rename to provekit/common/src/utils/serde_ark_option.rs diff --git a/noir-r1cs/src/utils/serde_hex.rs b/provekit/common/src/utils/serde_hex.rs similarity index 100% rename from noir-r1cs/src/utils/serde_hex.rs rename to provekit/common/src/utils/serde_hex.rs diff --git a/noir-r1cs/src/utils/serde_jsonify.rs b/provekit/common/src/utils/serde_jsonify.rs similarity index 100% rename from noir-r1cs/src/utils/serde_jsonify.rs rename to provekit/common/src/utils/serde_jsonify.rs diff --git a/noir-r1cs/src/utils/sumcheck.rs b/provekit/common/src/utils/sumcheck.rs similarity index 100% rename from noir-r1cs/src/utils/sumcheck.rs rename to provekit/common/src/utils/sumcheck.rs diff --git a/noir-r1cs/src/utils/zk_utils.rs b/provekit/common/src/utils/zk_utils.rs similarity index 100% rename from noir-r1cs/src/utils/zk_utils.rs rename to provekit/common/src/utils/zk_utils.rs diff --git a/provekit/common/src/whir_r1cs.rs b/provekit/common/src/whir_r1cs.rs new file mode 100644 index 000000000..8798472ef --- /dev/null +++ b/provekit/common/src/whir_r1cs.rs @@ -0,0 +1,56 @@ +use { + crate::{ + skyscraper::{SkyscraperMerkleConfig, SkyscraperPoW, SkyscraperSponge}, + utils::{serde_hex, sumcheck::SumcheckIOPattern}, + FieldElement, + }, + serde::{Deserialize, Serialize}, + spongefish::DomainSeparator, + std::fmt::{Debug, Formatter}, + tracing::instrument, + whir::whir::{domainsep::WhirDomainSeparator, parameters::WhirConfig as GenericWhirConfig}, +}; + +pub type WhirConfig = GenericWhirConfig; +pub type IOPattern = DomainSeparator; + +#[derive(Clone, PartialEq, Serialize, Deserialize)] +pub struct WhirR1CSScheme { + pub m: usize, + pub m_0: usize, + pub a_num_terms: usize, + pub whir_witness: WhirConfig, + pub whir_for_hiding_spartan: WhirConfig, +} + +impl WhirR1CSScheme { + #[instrument(skip_all)] + pub fn create_io_pattern(&self) -> IOPattern { + let io = IOPattern::new("🌪️") + .commit_statement(&self.whir_witness) + .add_rand(self.m_0) + .commit_statement(&self.whir_for_hiding_spartan) + .add_zk_sumcheck_polynomials(self.m_0) + .add_whir_proof(&self.whir_for_hiding_spartan) + .hint("claimed_evaluations") + .add_whir_proof(&self.whir_witness); + + io + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct WhirR1CSProof { + #[serde(with = "serde_hex")] + pub transcript: Vec, +} + +// TODO: Implement Debug for WhirConfig and derive. +impl Debug for WhirR1CSScheme { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("WhirR1CSScheme") + .field("m", &self.m) + .field("m_0", &self.m_0) + .finish() + } +} diff --git a/provekit/common/src/witness/binops.rs b/provekit/common/src/witness/binops.rs new file mode 100644 index 000000000..6f181f19c --- /dev/null +++ b/provekit/common/src/witness/binops.rs @@ -0,0 +1,9 @@ +/// The number of bits that ACIR uses for the inputs and output of the binop. +pub const BINOP_BITS: usize = 32; + +/// The number of bits that used by us for the inputs and output of the binop. +/// 2x this number of bits is used for the lookup table. +pub const BINOP_ATOMIC_BITS: usize = 8; + +/// Each operand is decomposed into this many digits. +pub const NUM_DIGITS: usize = BINOP_BITS / BINOP_ATOMIC_BITS; diff --git a/provekit/common/src/witness/digits.rs b/provekit/common/src/witness/digits.rs new file mode 100644 index 000000000..5511b6796 --- /dev/null +++ b/provekit/common/src/witness/digits.rs @@ -0,0 +1,21 @@ +use serde::{Deserialize, Serialize}; + +/// Allocates witnesses for the digital decomposition of the given witnesses +/// into its digits in the given bases. A log base is specified for each digit +/// (permitting mixed base decompositions). The order of bases is little-endian. +/// Witnesses are grouped by digital place, in the order of the bases, +/// where each group of witnesses is in 1:1 correspondence with +/// witnesses_to_decompose. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct DigitalDecompositionWitnesses { + /// The log base of each digit (in little-endian order) + pub log_bases: Vec, + /// The number of witnesses to decompose + pub num_witnesses_to_decompose: usize, + /// Witness indices of the values to be decomposed + pub witnesses_to_decompose: Vec, + /// The index of the first witness written to + pub first_witness_idx: usize, + /// The number of witnesses written to + pub num_witnesses: usize, +} diff --git a/provekit/common/src/witness/mod.rs b/provekit/common/src/witness/mod.rs new file mode 100644 index 000000000..7e25df450 --- /dev/null +++ b/provekit/common/src/witness/mod.rs @@ -0,0 +1,38 @@ +mod binops; +mod digits; +mod ram; +mod witness_builder; +mod witness_generator; + +use { + crate::{utils::serde_ark, FieldElement}, + ark_ff::One, + serde::{Deserialize, Serialize}, +}; +pub use { + binops::{BINOP_ATOMIC_BITS, BINOP_BITS, NUM_DIGITS}, + digits::DigitalDecompositionWitnesses, + ram::{SpiceMemoryOperation, SpiceWitnesses}, + witness_builder::{ + ConstantTerm, ProductLinearTerm, SumTerm, WitnessBuilder, WitnessCoefficient, + }, + witness_generator::NoirWitnessGenerator, +}; + +/// The index of the constant 1 witness in the R1CS instance +pub const WITNESS_ONE_IDX: usize = 0; + +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub enum ConstantOrR1CSWitness { + Constant(#[serde(with = "serde_ark")] FieldElement), + Witness(usize), +} + +impl ConstantOrR1CSWitness { + pub fn to_tuple(&self) -> (FieldElement, usize) { + match self { + ConstantOrR1CSWitness::Constant(c) => (*c, WITNESS_ONE_IDX), + ConstantOrR1CSWitness::Witness(w) => (FieldElement::one(), *w), + } + } +} diff --git a/provekit/common/src/witness/ram.rs b/provekit/common/src/witness/ram.rs new file mode 100644 index 000000000..6a7a80568 --- /dev/null +++ b/provekit/common/src/witness/ram.rs @@ -0,0 +1,38 @@ +use serde::{Deserialize, Serialize}; + +/// Like MemoryOperation, but with the indices of the additional witnesses +/// needed by Spice. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub enum SpiceMemoryOperation { + /// Load operation. Arguments are R1CS witness indices: + /// (address, value read, read timestamp) + /// `address` is already solved for by the ACIR solver. + Load(usize, usize, usize), + /// Store operation. Arguments are R1CS witness indices: + /// (address, old value, new value, read timestamp) + /// `address`, `old value`, `new value` are already solved for by the ACIR + /// solver. + Store(usize, usize, usize, usize), +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct SpiceWitnesses { + /// The length of the memory block + pub memory_length: usize, + /// The witness index of the first initial value (they are stored + /// contiguously) (Not written to) + pub initial_values_start: usize, + /// The memory operations, in the order that they occur; each + /// SpiceMemoryOperation contains witness indices that will be written to) + pub memory_operations: Vec, + /// The witness index of the first of the memory_length final read values + /// (stored contiguously) (these witnesses are written to) + pub rv_final_start: usize, + /// The witness index of the first of the memory_length final read + /// timestamps (stored contiguously) (these witnesses are written to) + pub rt_final_start: usize, + /// The index of the first witness written to by the SpiceWitnesses struct + pub first_witness_idx: usize, + /// The number of witnesses written to by the SpiceWitnesses struct + pub num_witnesses: usize, +} diff --git a/provekit/common/src/witness/witness_builder.rs b/provekit/common/src/witness/witness_builder.rs new file mode 100644 index 000000000..4c18d7a85 --- /dev/null +++ b/provekit/common/src/witness/witness_builder.rs @@ -0,0 +1,127 @@ +use { + crate::{ + utils::{serde_ark, serde_ark_option}, + witness::{ + binops::BINOP_ATOMIC_BITS, digits::DigitalDecompositionWitnesses, ram::SpiceWitnesses, + ConstantOrR1CSWitness, + }, + FieldElement, + }, + serde::{Deserialize, Serialize}, +}; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct SumTerm( + #[serde(with = "serde_ark_option")] pub Option, + pub usize, +); + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ConstantTerm(pub usize, #[serde(with = "serde_ark")] pub FieldElement); + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct WitnessCoefficient(#[serde(with = "serde_ark")] pub FieldElement, pub usize); + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ProductLinearTerm( + pub usize, + #[serde(with = "serde_ark")] pub FieldElement, + #[serde(with = "serde_ark")] pub FieldElement, +); + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +/// Indicates how to solve for a collection of R1CS witnesses in terms of +/// earlier (i.e. already solved for) R1CS witnesses and/or ACIR witness values. +pub enum WitnessBuilder { + /// Constant value, used for the constant one witness & e.g. static lookups + /// (witness index, constant value) + Constant(ConstantTerm), + /// A witness value carried over from the ACIR circuit (at the specified + /// ACIR witness index) (includes ACIR inputs and outputs) + /// (witness index, ACIR witness index) + Acir(usize, usize), + /// A linear combination of witness values, where the coefficients are field + /// elements. First argument is the witness index of the sum. + /// Vector consists of (optional coefficient, witness index) tuples, one for + /// each summand. The coefficient is optional, and if it is None, the + /// coefficient is 1. + Sum(usize, Vec), + /// The product of the values at two specified witness indices + /// (witness index, operand witness index a, operand witness index b) + Product(usize, usize, usize), + /// Solves for the number of times that each memory address occurs in + /// read-only memory. Arguments: (first witness index, range size, + /// vector of all witness indices for values purported to be in the range) + MultiplicitiesForRange(usize, usize, Vec), + /// A Fiat-Shamir challenge value + /// (witness index) + Challenge(usize), + /// For solving for the denominator of an indexed lookup. + /// Fields are (witness index, sz_challenge, (index_coeff, index), + /// rs_challenge, value). + IndexedLogUpDenominator(usize, usize, WitnessCoefficient, usize, usize), + /// The inverse of the value at a specified witness index + /// (witness index, operand witness index) + Inverse(usize, usize), + /// Products with linear operations on the witness indices. + /// Fields are ProductLinearOperation(witness_idx, (index, a, b), (index, c, + /// d)) such that we wish to compute (ax + b) * (cx + d). + ProductLinearOperation(usize, ProductLinearTerm, ProductLinearTerm), + /// For solving for the denominator of a lookup (non-indexed). + /// Field are (witness index, sz_challenge, (value_coeff, value)). + LogUpDenominator(usize, usize, WitnessCoefficient), + /// Builds the witnesses values required for the mixed base digital + /// decomposition of other witness values. + DigitalDecomposition(DigitalDecompositionWitnesses), + /// A factor of the multiset check used in read/write memory checking. + /// Values: (witness index, sz_challenge, rs_challenge, (addr, + /// addr_witness), value, (timer, timer_witness)) where sz_challenge, + /// rs_challenge, addr_witness, timer_witness are witness indices. + /// Solver computes: + /// sz_challenge - (addr * addr_witness + rs_challenge * value + + /// rs_challenge * rs_challenge * timer * timer_witness) + SpiceMultisetFactor( + usize, + usize, + usize, + WitnessCoefficient, + usize, + WitnessCoefficient, + ), + /// Builds the witnesses values required for the Spice memory model. + /// (Note that some witness values are already solved for by the ACIR + /// solver.) + SpiceWitnesses(SpiceWitnesses), + /// A witness value for the denominator of a bin op lookup. + /// Arguments: `(witness index, sz_challenge, rs_challenge, + /// rs_challenge_sqrd, lhs, rhs, output)`, where `lhs`, `rhs`, and + /// `output` are either constant or witness values. + BinOpLookupDenominator( + usize, + usize, + usize, + usize, + ConstantOrR1CSWitness, + ConstantOrR1CSWitness, + ConstantOrR1CSWitness, + ), + /// Witness values for the number of times that each pair of input values + /// occurs in the bin op. + MultiplicitiesForBinOp(usize, Vec<(ConstantOrR1CSWitness, ConstantOrR1CSWitness)>), +} + +impl WitnessBuilder { + /// The number of witness values that this builder writes to the witness + /// vector. + pub fn num_witnesses(&self) -> usize { + match self { + WitnessBuilder::MultiplicitiesForRange(_, range_size, _) => *range_size, + WitnessBuilder::DigitalDecomposition(dd_struct) => dd_struct.num_witnesses, + WitnessBuilder::SpiceWitnesses(spice_witnesses_struct) => { + spice_witnesses_struct.num_witnesses + } + WitnessBuilder::MultiplicitiesForBinOp(..) => 2usize.pow(2 * BINOP_ATOMIC_BITS as u32), + _ => 1, + } + } +} diff --git a/provekit/common/src/witness/witness_generator.rs b/provekit/common/src/witness/witness_generator.rs new file mode 100644 index 000000000..30e80267c --- /dev/null +++ b/provekit/common/src/witness/witness_generator.rs @@ -0,0 +1,35 @@ +use { + crate::utils::serde_jsonify, + noirc_abi::Abi, + serde::{Deserialize, Serialize}, + std::num::NonZeroU32, +}; + +// TODO: Handling of the return value for the verifier. + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct NoirWitnessGenerator { + // Note: Abi uses an [internally tagged] enum format in Serde, which is not compatible + // with some schemaless formats like Postcard. + // [internally-tagged]: https://serde.rs/enum-representations.html + // TODO: serializes the ABI as a json string. Something like CBOR might be better. + #[serde(with = "serde_jsonify")] + pub abi: Abi, + + /// ACIR witness index to R1CS witness index + /// Index zero is reserved for constant one, so we can use `NonZeroU32` + pub witness_map: Vec>, +} + +impl NoirWitnessGenerator { + pub fn abi(&self) -> &Abi { + &self.abi + } +} + +impl PartialEq for NoirWitnessGenerator { + fn eq(&self, other: &Self) -> bool { + format!("{:?}", self.abi) == format!("{:?}", other.abi) + && self.witness_map == other.witness_map + } +} diff --git a/provekit/prover/.gitignore b/provekit/prover/.gitignore new file mode 100644 index 000000000..ea8c4bf7f --- /dev/null +++ b/provekit/prover/.gitignore @@ -0,0 +1 @@ +/target diff --git a/provekit/prover/Cargo.toml b/provekit/prover/Cargo.toml new file mode 100644 index 000000000..5584465d1 --- /dev/null +++ b/provekit/prover/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "provekit-prover" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +# Workspace crates +provekit-common.workspace = true +skyscraper.workspace = true + +# Noir language +acir.workspace = true +bn254_blackbox_solver.workspace = true +nargo.workspace = true +noirc_abi.workspace = true + +# Cryptography and proof systems +ark-ff.workspace = true +ark-std.workspace = true +spongefish.workspace = true +whir.workspace = true + +# 3rd party +anyhow.workspace = true +rand.workspace = true +rayon.workspace = true +tracing.workspace = true + +[lints] +workspace = true diff --git a/provekit/prover/src/lib.rs b/provekit/prover/src/lib.rs new file mode 100644 index 000000000..704615629 --- /dev/null +++ b/provekit/prover/src/lib.rs @@ -0,0 +1,9 @@ +mod noir_proof_scheme; +mod r1cs; +mod whir_r1cs; +mod witness; + +pub use noir_proof_scheme::NoirProofSchemeProver; + +#[cfg(test)] +mod tests {} diff --git a/provekit/prover/src/noir_proof_scheme.rs b/provekit/prover/src/noir_proof_scheme.rs new file mode 100644 index 000000000..37ec5f468 --- /dev/null +++ b/provekit/prover/src/noir_proof_scheme.rs @@ -0,0 +1,134 @@ +use { + crate::{ + r1cs::R1CSSolver, + whir_r1cs::WhirR1CSProver, + witness::{fill_witness, witness_io_pattern::WitnessIOPattern}, + }, + acir::native_types::WitnessMap, + anyhow::{Context, Result}, + bn254_blackbox_solver::Bn254BlackBoxSolver, + nargo::foreign_calls::DefaultForeignCallBuilder, + noirc_abi::InputMap, + provekit_common::{ + skyscraper::SkyscraperSponge, utils::noir_to_native, witness::WitnessBuilder, FieldElement, + IOPattern, NoirElement, NoirProof, NoirProofScheme, + }, + spongefish::{codecs::arkworks_algebra::FieldToUnitSerialize, ProverState}, + tracing::instrument, +}; + +pub trait NoirProofSchemeProver { + fn generate_witness(&self, input_map: &InputMap) -> Result>; + + fn prove(&self, input_map: &InputMap) -> Result; + + fn create_witness_io_pattern(&self) -> IOPattern; + + fn seed_witness_merlin( + &self, + merlin: &mut ProverState, + witness: &WitnessMap, + ) -> Result<()>; +} + +impl NoirProofSchemeProver for NoirProofScheme { + #[instrument(skip_all)] + fn generate_witness(&self, input_map: &InputMap) -> Result> { + let solver = Bn254BlackBoxSolver::default(); + let mut output_buffer = Vec::new(); + let mut foreign_call_executor = DefaultForeignCallBuilder { + output: &mut output_buffer, + enable_mocks: false, + resolver_url: None, + root_path: None, + package_name: None, + } + .build(); + + let initial_witness = self.witness_generator.abi().encode(input_map, None)?; + + let mut witness_stack = nargo::ops::execute_program( + &self.program, + initial_witness, + &solver, + &mut foreign_call_executor, + )?; + + Ok(witness_stack + .pop() + .context("Missing witness results")? + .witness) + } + + #[instrument(skip_all)] + fn prove(&self, input_map: &InputMap) -> Result { + let acir_witness_idx_to_value_map = self.generate_witness(input_map)?; + + // Solve R1CS instance + let witness_io = self.create_witness_io_pattern(); + let mut witness_merlin = witness_io.to_prover_state(); + self.seed_witness_merlin(&mut witness_merlin, &acir_witness_idx_to_value_map)?; + + let partial_witness = self.r1cs.solve_witness_vec( + &self.witness_builders, + &acir_witness_idx_to_value_map, + &mut witness_merlin, + ); + let witness = fill_witness(partial_witness).context("while filling witness")?; + + // Verify witness (redudant with solve) + #[cfg(test)] + self.r1cs + .test_witness_satisfaction(&witness) + .context("While verifying R1CS instance")?; + + // Prove R1CS instance + let whir_r1cs_proof = self + .whir_for_witness + .prove(&self.r1cs, witness) + .context("While proving R1CS instance")?; + + Ok(NoirProof { whir_r1cs_proof }) + } + + fn create_witness_io_pattern(&self) -> IOPattern { + let circuit = &self.program.functions[0]; + let public_idxs = circuit.public_inputs().indices(); + let num_challenges = self + .witness_builders + .iter() + .filter(|b| matches!(b, WitnessBuilder::Challenge(_))) + .count(); + + // Create witness IO pattern + IOPattern::new("📜") + .add_shape() + .add_public_inputs(public_idxs.len()) + .add_logup_challenges(num_challenges) + } + + fn seed_witness_merlin( + &self, + merlin: &mut ProverState, + witness: &WitnessMap, + ) -> Result<()> { + // Absorb circuit shape + let _ = merlin.add_scalars(&[ + FieldElement::from(self.r1cs.num_constraints() as u64), + FieldElement::from(self.r1cs.num_witnesses() as u64), + ]); + + // Absorb public inputs (values) in canonical order + let circuit = &self.program.functions[0]; + let public_idxs = circuit.public_inputs().indices(); + if !public_idxs.is_empty() { + let pub_vals: Vec = public_idxs + .iter() + .map(|&i| noir_to_native(*witness.get_index(i).expect("missing public input"))) + .collect(); + let _ = merlin.add_scalars(&pub_vals); + } + + Ok(()) + } +} diff --git a/provekit/prover/src/r1cs.rs b/provekit/prover/src/r1cs.rs new file mode 100644 index 000000000..5dc8c56fd --- /dev/null +++ b/provekit/prover/src/r1cs.rs @@ -0,0 +1,61 @@ +use { + crate::witness::witness_builder::WitnessBuilderSolver, + acir::native_types::WitnessMap, + anyhow::{ensure, Result}, + provekit_common::{ + skyscraper::SkyscraperSponge, witness::WitnessBuilder, FieldElement, NoirElement, R1CS, + }, + spongefish::ProverState, + tracing::instrument, +}; + +pub trait R1CSSolver { + fn solve_witness_vec( + &self, + witness_builder_vec: &[WitnessBuilder], + acir_witness_idx_to_value_map: &WitnessMap, + transcript: &mut ProverState, + ) -> Vec>; + + fn test_witness_satisfaction(&self, witness: &[FieldElement]) -> Result<()>; +} + +impl R1CSSolver for R1CS { + fn solve_witness_vec( + &self, + witness_builder_vec: &[WitnessBuilder], + acir_witness_idx_to_value_map: &WitnessMap, + transcript: &mut ProverState, + ) -> Vec> { + let mut witness = vec![None; self.num_witnesses()]; + witness_builder_vec.iter().for_each(|witness_builder| { + witness_builder.solve(acir_witness_idx_to_value_map, &mut witness, transcript); + }); + + witness + } + + // Tests R1CS Witness satisfaction given the constraints provided by the + // R1CS Matrices. + #[instrument(skip_all, fields(size = witness.len()))] + fn test_witness_satisfaction(&self, witness: &[FieldElement]) -> Result<()> { + ensure!( + witness.len() == self.num_witnesses(), + "Witness size does not match" + ); + + // Verify + let a = self.a() * witness; + let b = self.b() * witness; + let c = self.c() * witness; + for (row, ((a, b), c)) in a + .into_iter() + .zip(b.into_iter()) + .zip(c.into_iter()) + .enumerate() + { + ensure!(a * b == c, "Constraint {row} failed"); + } + Ok(()) + } +} diff --git a/noir-r1cs/src/whir_r1cs.rs b/provekit/prover/src/whir_r1cs.rs similarity index 59% rename from noir-r1cs/src/whir_r1cs.rs rename to provekit/prover/src/whir_r1cs.rs index d39210017..cfe4febbd 100644 --- a/noir-r1cs/src/whir_r1cs.rs +++ b/provekit/prover/src/whir_r1cs.rs @@ -1,116 +1,45 @@ use { - crate::{ - skyscraper::{SkyscraperMerkleConfig, SkyscraperPoW, SkyscraperSponge}, + anyhow::{ensure, Result}, + ark_ff::UniformRand, + ark_std::{One, Zero}, + provekit_common::{ + skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, utils::{ - next_power_of_two, pad_to_power_of_two, serde_hex, + pad_to_power_of_two, sumcheck::{ - calculate_eq, calculate_evaluations_over_boolean_hypercube_for_eq, + calculate_evaluations_over_boolean_hypercube_for_eq, calculate_external_row_of_r1cs_matrices, calculate_witness_bounds, eval_cubic_poly, sumcheck_fold_map_reduce, SumcheckIOPattern, }, zk_utils::{create_masked_polynomial, generate_random_multilinear_polynomial}, HALF, }, - FieldElement, R1CS, + FieldElement, IOPattern, WhirConfig, WhirR1CSProof, WhirR1CSScheme, R1CS, }, - anyhow::{ensure, Context, Result}, - ark_ff::UniformRand, - ark_std::{One, Zero}, - serde::{Deserialize, Serialize}, spongefish::{ - codecs::arkworks_algebra::{FieldToUnitDeserialize, FieldToUnitSerialize, UnitToField}, - DomainSeparator, ProverState, VerifierState, + codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, + ProverState, }, - std::fmt::{Debug, Formatter}, tracing::{info, instrument, warn}, whir::{ - parameters::{ - default_max_pow, FoldingFactor, - MultivariateParameters as GenericMultivariateParameters, - ProtocolParameters as GenericProtocolParameters, SoundnessType, - }, poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, whir::{ - committer::{reader::ParsedCommitment, CommitmentReader, CommitmentWriter, Witness}, + committer::{CommitmentWriter, Witness}, domainsep::WhirDomainSeparator, - parameters::WhirConfig as GenericWhirConfig, prover::Prover, statement::{Statement, Weights}, - utils::{HintDeserialize, HintSerialize}, - verifier::Verifier, + utils::HintSerialize, }, }, }; -pub type MultivariateParameters = GenericMultivariateParameters; -pub type ProtocolParameters = GenericProtocolParameters; -pub type WhirConfig = GenericWhirConfig; -pub type IOPattern = DomainSeparator; - -#[derive(Clone, PartialEq, Serialize, Deserialize)] -pub struct WhirR1CSScheme { - pub m: usize, - pub m_0: usize, - pub a_num_terms: usize, - pub whir_witness: WhirConfig, - pub whir_for_hiding_spartan: WhirConfig, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct WhirR1CSProof { - #[serde(with = "serde_hex")] - pub transcript: Vec, -} - -pub struct DataFromSumcheckVerifier { - r: Vec, - alpha: Vec, - last_sumcheck_val: FieldElement, +pub trait WhirR1CSProver { + fn prove(&self, r1cs: &R1CS, witness: Vec) -> Result; } -impl WhirR1CSScheme { - pub fn new_for_r1cs(r1cs: &R1CS) -> Self { - // m is equal to ceiling(log(number of variables in constraint system)). It is - // equal to the log of the width of the matrices. - let m = next_power_of_two(r1cs.num_witnesses()); - - // m_0 is equal to ceiling(log(number_of_constraints)). It is equal to the - // number of variables in the multilinear polynomial we are running our sumcheck - // on. - let m_0 = next_power_of_two(r1cs.num_constraints()); - - // Whir parameters - Self { - m: m + 1, - m_0, - a_num_terms: next_power_of_two(r1cs.a().iter().count()), - whir_witness: Self::new_whir_config_for_size(m + 1, 2), - whir_for_hiding_spartan: Self::new_whir_config_for_size( - next_power_of_two(4 * m_0) + 1, - 2, - ), - } - } - - pub fn new_whir_config_for_size(num_variables: usize, batch_size: usize) -> WhirConfig { - let mv_params = MultivariateParameters::new(num_variables); - let whir_params = ProtocolParameters { - initial_statement: true, - security_level: 128, - pow_bits: default_max_pow(num_variables, 1), - folding_factor: FoldingFactor::Constant(4), - leaf_hash_params: (), - two_to_one_params: (), - soundness_type: SoundnessType::ConjectureList, - _pow_parameters: Default::default(), - starting_log_inv_rate: 1, - batch_size, - }; - WhirConfig::new(mv_params, whir_params) - } - +impl WhirR1CSProver for WhirR1CSScheme { #[instrument(skip_all)] - pub fn prove(&self, r1cs: &R1CS, witness: Vec) -> Result { + fn prove(&self, r1cs: &R1CS, witness: Vec) -> Result { ensure!( witness.len() == r1cs.num_witnesses(), "Unexpected witness length for R1CS instance" @@ -169,85 +98,6 @@ impl WhirR1CSScheme { Ok(WhirR1CSProof { transcript }) } - - #[instrument(skip_all)] - #[allow(unused)] // TODO: Fix implementation - pub fn verify(&self, proof: &WhirR1CSProof) -> Result<()> { - // Set up transcript - let io = self.create_io_pattern(); - let mut arthur = io.to_verifier_state(&proof.transcript); - - let commitment_reader = CommitmentReader::new(&self.whir_witness); - let parsed_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); - - let data_from_sumcheck_verifier = run_sumcheck_verifier( - &mut arthur, - self.m_0, - &self.whir_for_hiding_spartan, - // proof.whir_spartan_blinding_values, - ) - .context("while verifying sumcheck")?; - - let whir_query_answer_sum_vectors: (Vec, Vec) = - arthur.hint().unwrap(); - - let whir_query_answer_sums = ( - whir_query_answer_sum_vectors.0.try_into().unwrap(), - whir_query_answer_sum_vectors.1.try_into().unwrap(), - ); - - let statement_verifier = prepare_statement_for_witness_verifier::<3>( - self.m, - &parsed_commitment, - &whir_query_answer_sums, - ); - - let (folding_randomness, deferred) = run_whir_pcs_verifier( - &mut arthur, - &parsed_commitment, - &self.whir_witness, - &statement_verifier, - ) - .context("while verifying WHIR proof")?; - - // Check the Spartan sumcheck relation. - ensure!( - data_from_sumcheck_verifier.last_sumcheck_val - == (whir_query_answer_sums.0[0] * whir_query_answer_sums.0[1] - - whir_query_answer_sums.0[2]) - * calculate_eq( - &data_from_sumcheck_verifier.r, - &data_from_sumcheck_verifier.alpha - ), - "last sumcheck value does not match" - ); - - Ok(()) - } - - #[instrument(skip_all)] - pub fn create_io_pattern(&self) -> IOPattern { - let io = IOPattern::new("🌪️") - .commit_statement(&self.whir_witness) - .add_rand(self.m_0) - .commit_statement(&self.whir_for_hiding_spartan) - .add_zk_sumcheck_polynomials(self.m_0) - .add_whir_proof(&self.whir_for_hiding_spartan) - .hint("claimed_evaluations") - .add_whir_proof(&self.whir_witness); - - io - } -} - -// TODO: Implement Debug for WhirConfig and derive. -impl Debug for WhirR1CSScheme { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.debug_struct("WhirR1CSScheme") - .field("m", &self.m) - .field("m_0", &self.m_0) - .finish() - } } pub fn compute_blinding_coefficients_for_round( @@ -329,23 +179,6 @@ pub fn sum_over_hypercube(g_univariates: &[[FieldElement; 4]]) -> FieldElement { + eval_cubic_poly(&polynomial_coefficient, &FieldElement::one()) } -fn prepare_statement_for_witness_verifier( - m: usize, - parsed_commitment: &ParsedCommitment, - whir_query_answer_sums: &([FieldElement; N], [FieldElement; N]), -) -> Statement { - let mut statement_verifier = Statement::::new(m); - for i in 0..whir_query_answer_sums.0.len() { - let claimed_sum = whir_query_answer_sums.0[i] - + whir_query_answer_sums.1[i] * parsed_commitment.batching_randomness; - statement_verifier.add_constraint( - Weights::linear(EvaluationsList::new(vec![FieldElement::zero(); 1 << m])), - claimed_sum, - ); - } - statement_verifier -} - pub fn batch_commit_to_polynomial( m: usize, whir_config: &WhirConfig, @@ -602,84 +435,3 @@ pub fn run_zk_whir_pcs_prover( (merlin, randomness, deferred) } - -#[instrument(skip_all)] -pub fn run_sumcheck_verifier( - arthur: &mut VerifierState, - m_0: usize, - whir_for_spartan_blinding_config: &WhirConfig, -) -> Result { - // r is the combination randomness from the 2nd item of the interaction phase - let mut r = vec![FieldElement::zero(); m_0]; - let _ = arthur.fill_challenge_scalars(&mut r); - - let commitment_reader = CommitmentReader::new(whir_for_spartan_blinding_config); - let parsed_commitment = commitment_reader.parse_commitment(arthur).unwrap(); - - let mut sum_g_buf = [FieldElement::zero()]; - arthur.fill_next_scalars(&mut sum_g_buf)?; - - let mut rho_buf = [FieldElement::zero()]; - arthur.fill_challenge_scalars(&mut rho_buf)?; - let rho = rho_buf[0]; - - let mut saved_val_for_sumcheck_equality_assertion = rho * sum_g_buf[0]; - - let mut alpha = vec![FieldElement::zero(); m_0]; - - for item in alpha.iter_mut().take(m_0) { - let mut hhat_i = [FieldElement::zero(); 4]; - let mut alpha_i = [FieldElement::zero(); 1]; - let _ = arthur.fill_next_scalars(&mut hhat_i); - let _ = arthur.fill_challenge_scalars(&mut alpha_i); - *item = alpha_i[0]; - let hhat_i_at_zero = eval_cubic_poly(&hhat_i, &FieldElement::zero()); - let hhat_i_at_one = eval_cubic_poly(&hhat_i, &FieldElement::one()); - ensure!( - saved_val_for_sumcheck_equality_assertion == hhat_i_at_zero + hhat_i_at_one, - "Sumcheck equality assertion failed" - ); - saved_val_for_sumcheck_equality_assertion = eval_cubic_poly(&hhat_i, &alpha_i[0]); - } - let mut values_of_polynomial_sums = [FieldElement::zero(); 2]; - let _ = arthur.fill_next_scalars(&mut values_of_polynomial_sums); - - let statement_verifier = prepare_statement_for_witness_verifier::<1>( - whir_for_spartan_blinding_config.mv_parameters.num_variables, - &parsed_commitment, - &([values_of_polynomial_sums[0]], [ - values_of_polynomial_sums[1] - ]), - ); - run_whir_pcs_verifier( - arthur, - &parsed_commitment, - whir_for_spartan_blinding_config, - &statement_verifier, - ) - .context("while verifying WHIR")?; - - let f_at_alpha = saved_val_for_sumcheck_equality_assertion - rho * values_of_polynomial_sums[0]; - - Ok(DataFromSumcheckVerifier { - r, - alpha, - last_sumcheck_val: f_at_alpha, - }) -} - -#[instrument(skip_all)] -pub fn run_whir_pcs_verifier( - arthur: &mut VerifierState, - parsed_commitment: &ParsedCommitment, - params: &WhirConfig, - statement_verifier: &Statement, -) -> Result<(MultilinearPoint, Vec)> { - let verifier = Verifier::new(params); - - let (folding_randomness, deferred) = verifier - .verify(arthur, parsed_commitment, statement_verifier) - .context("while verifying WHIR")?; - - Ok((folding_randomness, deferred)) -} diff --git a/provekit/prover/src/witness/digits.rs b/provekit/prover/src/witness/digits.rs new file mode 100644 index 000000000..afa12e0a6 --- /dev/null +++ b/provekit/prover/src/witness/digits.rs @@ -0,0 +1,113 @@ +use { + ark_ff::{BigInteger, PrimeField}, + ark_std::Zero, + provekit_common::{witness::DigitalDecompositionWitnesses, FieldElement}, +}; + +pub(crate) trait DigitalDecompositionWitnessesSolver { + fn solve(&self, witness: &mut [Option]); +} + +impl DigitalDecompositionWitnessesSolver for DigitalDecompositionWitnesses { + fn solve(&self, witness: &mut [Option]) { + self.witnesses_to_decompose + .iter() + .enumerate() + .for_each(|(i, value_witness_idx)| { + let value = witness[*value_witness_idx].unwrap(); + let digits = decompose_into_digits(value, &self.log_bases); + digits + .iter() + .enumerate() + .for_each(|(digit_place, digit_value)| { + witness[self.first_witness_idx + + digit_place * self.witnesses_to_decompose.len() + + i] = Some(*digit_value); + }); + }); + } +} + +/// Compute a mixed-base decomposition of a field element into its digits, using +/// the given log bases. Decomposition is little-endian. +/// Panics if the value provided can not be represented in the given bases. +pub(crate) fn decompose_into_digits(value: FieldElement, log_bases: &[usize]) -> Vec { + let num_digits = log_bases.len(); + let mut digits = vec![FieldElement::zero(); num_digits]; + let value_bits = field_to_le_bits(value); + // Grab the bits of the element that we need for each digit, and turn them back + // into field elements. + let mut start_bit = 0; + for digit_idx in 0..num_digits { + let log_base = log_bases[digit_idx]; + let digit_bits = &value_bits[start_bit..start_bit + log_base]; + let digit_value = le_bits_to_field(digit_bits); + digits[digit_idx] = digit_value; + start_bit += log_base; + } + let remaining_bits = &value_bits[start_bit..]; + assert!( + remaining_bits.iter().all(|&bit| !bit), + "Higher order bits are not zero" + ); + digits +} + +/// Decomposes a field element into its bits, in little-endian order. +pub(crate) fn field_to_le_bits(value: FieldElement) -> Vec { + value.into_bigint().to_bits_le() +} + +/// Given the binary representation of a field element in little-endian order, +/// convert it to a field element. The input is padded to the next multiple of 8 +/// bits. +pub(crate) fn le_bits_to_field(bits: &[bool]) -> FieldElement { + let next_multiple_of_8 = bits.len().div_ceil(8) * 8; + let padding_amt = next_multiple_of_8 - bits.len(); + let mut padded_bits_le = vec![false; next_multiple_of_8]; + padded_bits_le[..(next_multiple_of_8 - padding_amt)].copy_from_slice(bits); + let be_byte_vec: Vec = padded_bits_le + .chunks(8) + .map(|chunk_in_bits| { + chunk_in_bits + .iter() + .enumerate() + .fold(0u8, |acc, (i, bit)| acc | ((*bit as u8) << i)) + }) + .rev() + .collect(); + FieldElement::from_be_bytes_mod_order(&be_byte_vec) +} + +#[cfg(test)] +#[test] +fn test_decompose_into_digits() { + let value = FieldElement::from(3 + 2u32 * 256 + 256 * 256); + let log_bases = vec![8, 8, 4]; + let digits = decompose_into_digits(value, &log_bases); + assert_eq!(digits.len(), log_bases.len()); + assert_eq!(digits[0], FieldElement::from(3u32)); + assert_eq!(digits[1], FieldElement::from(2u32)); + assert_eq!(digits[2], FieldElement::from(1u32)); +} + +#[cfg(test)] +#[test] +fn test_field_to_le_bits() { + let value = FieldElement::from(5u32); + let bits = field_to_le_bits(value); + assert_eq!(bits.len(), 256); + assert!(bits[0]); + assert!(!bits[1]); + assert!(bits[2]); + assert!(!bits[254]); + assert!(!bits[255]); +} + +#[cfg(test)] +#[test] +fn test_le_bits_to_field() { + let bits = vec![true, false, true, false, false]; + let value = le_bits_to_field(&bits); + assert_eq!(value.into_bigint().0[0], 5); +} diff --git a/provekit/prover/src/witness/mod.rs b/provekit/prover/src/witness/mod.rs new file mode 100644 index 000000000..2bbcfceaa --- /dev/null +++ b/provekit/prover/src/witness/mod.rs @@ -0,0 +1,30 @@ +use { + anyhow::Result, + provekit_common::FieldElement, + rand::{rng, Rng}, + tracing::{info, instrument}, +}; + +mod digits; +mod ram; +pub(crate) mod witness_builder; +pub(crate) mod witness_io_pattern; + +/// Complete a partial witness with random values. +#[instrument(skip_all, fields(size = witness.len()))] +pub(crate) fn fill_witness(witness: Vec>) -> Result> { + // TODO: Use better entropy source and proper sampling. + let mut rng = rng(); + let mut count = 0; + let witness = witness + .iter() + .map(|f| { + f.unwrap_or_else(|| { + count += 1; + FieldElement::from(rng.random::()) + }) + }) + .collect::>(); + info!("Filled witness with {count} random values"); + Ok(witness) +} diff --git a/provekit/prover/src/witness/ram.rs b/provekit/prover/src/witness/ram.rs new file mode 100644 index 000000000..71e7c0c2e --- /dev/null +++ b/provekit/prover/src/witness/ram.rs @@ -0,0 +1,47 @@ +use { + ark_ff::PrimeField, + provekit_common::{ + witness::{SpiceMemoryOperation, SpiceWitnesses}, + FieldElement, + }, +}; + +pub(crate) trait SpiceWitnessesSolver { + fn solve(&self, witness: &mut [Option]); +} + +impl SpiceWitnessesSolver for SpiceWitnesses { + fn solve(&self, witness: &mut [Option]) { + let mut rv_final = witness + [self.initial_values_start..self.initial_values_start + self.memory_length] + .to_vec(); + let mut rt_final = vec![0; self.memory_length]; + for (mem_op_index, mem_op) in self.memory_operations.iter().enumerate() { + match mem_op { + SpiceMemoryOperation::Load(addr, value, read_timestamp) => { + let addr = witness[*addr].unwrap(); + let addr_as_usize = addr.into_bigint().0[0] as usize; + witness[*read_timestamp] = + Some(FieldElement::from(rt_final[addr_as_usize] as u64)); + rv_final[addr_as_usize] = witness[*value]; + rt_final[addr_as_usize] = mem_op_index + 1; + } + SpiceMemoryOperation::Store(addr, old_value, new_value, read_timestamp) => { + let addr = witness[*addr].unwrap(); + let addr_as_usize = addr.into_bigint().0[0] as usize; + witness[*old_value] = rv_final[addr_as_usize]; + witness[*read_timestamp] = + Some(FieldElement::from(rt_final[addr_as_usize] as u64)); + let new_value = witness[*new_value]; + rv_final[addr_as_usize] = new_value; + rt_final[addr_as_usize] = mem_op_index + 1; + } + } + } + // Copy the final values and read timestamps into the witness vector + for i in 0..self.memory_length { + witness[self.rv_final_start + i] = rv_final[i]; + witness[self.rt_final_start + i] = Some(FieldElement::from(rt_final[i] as u64)); + } + } +} diff --git a/noir-r1cs/src/r1cs_solver.rs b/provekit/prover/src/witness/witness_builder.rs similarity index 51% rename from noir-r1cs/src/r1cs_solver.rs rename to provekit/prover/src/witness/witness_builder.rs index 648dd0387..a18baca2e 100644 --- a/noir-r1cs/src/r1cs_solver.rs +++ b/provekit/prover/src/witness/witness_builder.rs @@ -1,164 +1,33 @@ use { - crate::{ - binops::BINOP_ATOMIC_BITS, - digits::DigitalDecompositionWitnesses, - noir_to_r1cs::ConstantOrR1CSWitness, - ram::SpiceWitnesses, - skyscraper::SkyscraperSponge, - utils::{noir_to_native, serde_ark, serde_ark_option}, - FieldElement, - }, - acir::{native_types::WitnessMap, FieldElement as NoirFieldElement}, + crate::witness::{digits::DigitalDecompositionWitnessesSolver, ram::SpiceWitnessesSolver}, + acir::native_types::WitnessMap, ark_ff::{Field, PrimeField}, ark_std::Zero, - serde::{Deserialize, Serialize}, + provekit_common::{ + skyscraper::SkyscraperSponge, + utils::noir_to_native, + witness::{ + ConstantOrR1CSWitness, ConstantTerm, ProductLinearTerm, SumTerm, WitnessBuilder, + WitnessCoefficient, BINOP_ATOMIC_BITS, + }, + FieldElement, NoirElement, + }, spongefish::{codecs::arkworks_algebra::UnitToField, ProverState}, }; -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct SumTerm( - #[serde(with = "serde_ark_option")] pub Option, - pub usize, -); - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct ConstantTerm(pub usize, #[serde(with = "serde_ark")] pub FieldElement); - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct WitnessCoefficient(#[serde(with = "serde_ark")] pub FieldElement, pub usize); - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct ProductLinearTerm( - pub usize, - #[serde(with = "serde_ark")] pub FieldElement, - #[serde(with = "serde_ark")] pub FieldElement, -); - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -/// Indicates how to solve for a collection of R1CS witnesses in terms of -/// earlier (i.e. already solved for) R1CS witnesses and/or ACIR witness values. -pub enum WitnessBuilder { - /// Constant value, used for the constant one witness & e.g. static lookups - /// (witness index, constant value) - Constant(ConstantTerm), - /// A witness value carried over from the ACIR circuit (at the specified - /// ACIR witness index) (includes ACIR inputs and outputs) - /// (witness index, ACIR witness index) - Acir(usize, usize), - /// A linear combination of witness values, where the coefficients are field - /// elements. First argument is the witness index of the sum. - /// Vector consists of (optional coefficient, witness index) tuples, one for - /// each summand. The coefficient is optional, and if it is None, the - /// coefficient is 1. - Sum(usize, Vec), - /// The product of the values at two specified witness indices - /// (witness index, operand witness index a, operand witness index b) - Product(usize, usize, usize), - /// Solves for the number of times that each memory address occurs in - /// read-only memory. Arguments: (first witness index, range size, - /// vector of all witness indices for values purported to be in the range) - MultiplicitiesForRange(usize, usize, Vec), - /// A Fiat-Shamir challenge value - /// (witness index) - Challenge(usize), - /// For solving for the denominator of an indexed lookup. - /// Fields are (witness index, sz_challenge, (index_coeff, index), - /// rs_challenge, value). - IndexedLogUpDenominator(usize, usize, WitnessCoefficient, usize, usize), - /// The inverse of the value at a specified witness index - /// (witness index, operand witness index) - Inverse(usize, usize), - /// Products with linear operations on the witness indices. - /// Fields are ProductLinearOperation(witness_idx, (index, a, b), (index, c, - /// d)) such that we wish to compute (ax + b) * (cx + d). - ProductLinearOperation(usize, ProductLinearTerm, ProductLinearTerm), - /// For solving for the denominator of a lookup (non-indexed). - /// Field are (witness index, sz_challenge, (value_coeff, value)). - LogUpDenominator(usize, usize, WitnessCoefficient), - /// Builds the witnesses values required for the mixed base digital - /// decomposition of other witness values. - DigitalDecomposition(DigitalDecompositionWitnesses), - /// A factor of the multiset check used in read/write memory checking. - /// Values: (witness index, sz_challenge, rs_challenge, (addr, - /// addr_witness), value, (timer, timer_witness)) where sz_challenge, - /// rs_challenge, addr_witness, timer_witness are witness indices. - /// Solver computes: - /// sz_challenge - (addr * addr_witness + rs_challenge * value + - /// rs_challenge * rs_challenge * timer * timer_witness) - SpiceMultisetFactor( - usize, - usize, - usize, - WitnessCoefficient, - usize, - WitnessCoefficient, - ), - /// Builds the witnesses values required for the Spice memory model. - /// (Note that some witness values are already solved for by the ACIR - /// solver.) - SpiceWitnesses(SpiceWitnesses), - /// A witness value for the denominator of a bin op lookup. - /// Arguments: `(witness index, sz_challenge, rs_challenge, - /// rs_challenge_sqrd, lhs, rhs, output)`, where `lhs`, `rhs`, and - /// `output` are either constant or witness values. - BinOpLookupDenominator( - usize, - usize, - usize, - usize, - ConstantOrR1CSWitness, - ConstantOrR1CSWitness, - ConstantOrR1CSWitness, - ), - /// Witness values for the number of times that each pair of input values - /// occurs in the bin op. - MultiplicitiesForBinOp(usize, Vec<(ConstantOrR1CSWitness, ConstantOrR1CSWitness)>), +pub trait WitnessBuilderSolver { + fn solve( + &self, + acir_witness_idx_to_value_map: &WitnessMap, + witness: &mut [Option], + transcript: &mut ProverState, + ); } -impl WitnessBuilder { - /// The number of witness values that this builder writes to the witness - /// vector. - pub fn num_witnesses(&self) -> usize { - match self { - WitnessBuilder::MultiplicitiesForRange(_, range_size, _) => *range_size, - WitnessBuilder::DigitalDecomposition(dd_struct) => dd_struct.num_witnesses, - WitnessBuilder::SpiceWitnesses(spice_witnesses_struct) => { - spice_witnesses_struct.num_witnesses - } - WitnessBuilder::MultiplicitiesForBinOp(..) => 2usize.pow(2 * BINOP_ATOMIC_BITS as u32), - _ => 1, - } - } - - /// Return the index of the first witness value that this builder writes to. - pub fn first_witness_idx(&self) -> usize { - match self { - WitnessBuilder::Constant(ConstantTerm(start_idx, _)) => *start_idx, - WitnessBuilder::Acir(start_idx, _) => *start_idx, - WitnessBuilder::Sum(start_idx, _) => *start_idx, - WitnessBuilder::Product(start_idx, ..) => *start_idx, - WitnessBuilder::MultiplicitiesForRange(start_idx, ..) => *start_idx, - WitnessBuilder::IndexedLogUpDenominator(start_idx, ..) => *start_idx, - WitnessBuilder::Challenge(start_idx) => *start_idx, - WitnessBuilder::Inverse(start_idx, _) => *start_idx, - WitnessBuilder::LogUpDenominator(start_idx, ..) => *start_idx, - WitnessBuilder::ProductLinearOperation(start_idx, ..) => *start_idx, - WitnessBuilder::DigitalDecomposition(dd_struct) => dd_struct.first_witness_idx, - WitnessBuilder::SpiceMultisetFactor(start_idx, ..) => *start_idx, - WitnessBuilder::SpiceWitnesses(spice_witnesses_struct) => { - spice_witnesses_struct.first_witness_idx - } - - WitnessBuilder::BinOpLookupDenominator(start_idx, ..) => *start_idx, - WitnessBuilder::MultiplicitiesForBinOp(start_idx, _) => *start_idx, - } - } - - /// Solves for the witness value(s) specified by this builder and writes - /// them to the witness vector. - pub fn solve( +impl WitnessBuilderSolver for WitnessBuilder { + fn solve( &self, - acir_witness_idx_to_value_map: &WitnessMap, + acir_witness_idx_to_value_map: &WitnessMap, witness: &mut [Option], transcript: &mut ProverState, ) { diff --git a/provekit/prover/src/witness/witness_io_pattern.rs b/provekit/prover/src/witness/witness_io_pattern.rs new file mode 100644 index 000000000..e260b114a --- /dev/null +++ b/provekit/prover/src/witness/witness_io_pattern.rs @@ -0,0 +1,40 @@ +use {provekit_common::FieldElement, spongefish::codecs::arkworks_algebra::FieldDomainSeparator}; + +/// Trait which is used to add witness RNG for IOPattern +pub trait WitnessIOPattern { + /// Schedule absorption of circuit shape (2 scalars): (num_constraints, + /// num_witnesses). + fn add_shape(self) -> Self; + + /// Schedule absorption of `num_pub_inputs` public input scalars. + fn add_public_inputs(self, num_pub_inputs: usize) -> Self; + + /// Schedule absorption of `num_challenges` Fiat–Shamir challenges for + /// LogUp/Spice. + fn add_logup_challenges(self, num_challenges: usize) -> Self; +} + +impl WitnessIOPattern for IOPattern +where + IOPattern: FieldDomainSeparator, +{ + fn add_shape(self) -> Self { + self.add_scalars(2, "shape") + } + + fn add_public_inputs(self, num_pub_inputs: usize) -> Self { + if num_pub_inputs > 0 { + self.add_scalars(num_pub_inputs, "pub_inputs") + } else { + self + } + } + + fn add_logup_challenges(self, num_challenges: usize) -> Self { + if num_challenges > 0 { + self.challenge_scalars(num_challenges, "wb:challenges") + } else { + self + } + } +} diff --git a/provekit/r1cs-compiler/Cargo.toml b/provekit/r1cs-compiler/Cargo.toml new file mode 100644 index 000000000..ab99798b9 --- /dev/null +++ b/provekit/r1cs-compiler/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "provekit-r1cs-compiler" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +# Workspace crates +provekit-common.workspace = true + +# Noir language +acir.workspace = true +noirc_abi.workspace = true +noirc_artifacts.workspace = true + +# Cryptography and proof systems +ark-ff.workspace = true +ark-std.workspace = true +whir.workspace = true + +# 3rd party +anyhow.workspace = true +postcard.workspace = true +serde.workspace = true +serde_json.workspace = true +tracing.workspace = true + +[lints] +workspace = true diff --git a/noir-r1cs/src/binops.rs b/provekit/r1cs-compiler/src/binops.rs similarity index 93% rename from noir-r1cs/src/binops.rs rename to provekit/r1cs-compiler/src/binops.rs index cf901a2c0..3006b8457 100644 --- a/noir-r1cs/src/binops.rs +++ b/provekit/r1cs-compiler/src/binops.rs @@ -1,11 +1,15 @@ use { crate::{ - digits::{add_digital_decomposition, decompose_into_digits}, - noir_to_r1cs::{ConstantOrR1CSWitness, NoirToR1CSCompiler}, - r1cs_solver::{SumTerm, WitnessBuilder}, + digits::{ + add_digital_decomposition, decompose_into_digits, DigitalDecompositionWitnessesBuilder, + }, + noir_to_r1cs::NoirToR1CSCompiler, + }, + ark_std::One, + provekit_common::{ + witness::{ConstantOrR1CSWitness, SumTerm, WitnessBuilder, BINOP_ATOMIC_BITS, NUM_DIGITS}, FieldElement, }, - ark_ff::One, std::ops::Neg, }; @@ -15,16 +19,6 @@ pub enum BinOp { Xor, } -/// The number of bits that ACIR uses for the inputs and output of the binop. -pub const BINOP_BITS: usize = 32; - -/// The number of bits that used by us for the inputs and output of the binop. -/// 2x this number of bits is used for the lookup table. -pub const BINOP_ATOMIC_BITS: usize = 8; - -/// Each operand is decomposed into this many digits. -pub const NUM_DIGITS: usize = BINOP_BITS / BINOP_ATOMIC_BITS; - /// Add the witnesses and constraints for a [BinOp] (i.e. AND, XOR). Uses a /// digital decomposition of the operands and output into [NUM_DIGITS] digits of /// [BINOP_ATOMIC_BITS] bits each, followed by a lookup table of size 2x diff --git a/noir-r1cs/src/digits.rs b/provekit/r1cs-compiler/src/digits.rs similarity index 73% rename from noir-r1cs/src/digits.rs rename to provekit/r1cs-compiler/src/digits.rs index bb33d3dcf..f53e0aa74 100644 --- a/noir-r1cs/src/digits.rs +++ b/provekit/r1cs-compiler/src/digits.rs @@ -1,31 +1,25 @@ use { - crate::{noir_to_r1cs::NoirToR1CSCompiler, r1cs_solver::WitnessBuilder, FieldElement}, - ark_ff::{BigInteger, One, PrimeField, Zero}, - serde::{Deserialize, Serialize}, + crate::noir_to_r1cs::NoirToR1CSCompiler, + ark_ff::{BigInteger, PrimeField}, + ark_std::{One, Zero}, + provekit_common::{ + witness::{DigitalDecompositionWitnesses, WitnessBuilder}, + FieldElement, + }, }; -/// Allocates witnesses for the digital decomposition of the given witnesses -/// into its digits in the given bases. A log base is specified for each digit -/// (permitting mixed base decompositions). The order of bases is little-endian. -/// Witnesses are grouped by digital place, in the order of the bases, -/// where each group of witnesses is in 1:1 correspondence with -/// witnesses_to_decompose. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct DigitalDecompositionWitnesses { - /// The log base of each digit (in little-endian order) - pub log_bases: Vec, - /// The number of witnesses to decompose - pub num_witnesses_to_decompose: usize, - /// Witness indices of the values to be decomposed - pub witnesses_to_decompose: Vec, - /// The index of the first witness written to - pub first_witness_idx: usize, - /// The number of witnesses written to - pub num_witnesses: usize, +pub trait DigitalDecompositionWitnessesBuilder { + fn new( + next_witness_idx: usize, + log_bases: Vec, + witnesses_to_decompose: Vec, + ) -> Self; + + fn get_digit_witness_index(&self, digit_place: usize, value_offset: usize) -> usize; } -impl DigitalDecompositionWitnesses { - pub fn new( +impl DigitalDecompositionWitnessesBuilder for DigitalDecompositionWitnesses { + fn new( next_witness_idx: usize, log_bases: Vec, witnesses_to_decompose: Vec, @@ -45,30 +39,11 @@ impl DigitalDecompositionWitnesses { /// `digit_place`-th digit. Note that `value_offset` is the index of the /// witness in the original list of witnesses (not itself a witness /// index). - pub fn get_digit_witness_index(&self, digit_place: usize, value_offset: usize) -> usize { + fn get_digit_witness_index(&self, digit_place: usize, value_offset: usize) -> usize { debug_assert!(digit_place < self.log_bases.len()); debug_assert!(value_offset < self.num_witnesses_to_decompose); self.first_witness_idx + digit_place * self.num_witnesses_to_decompose + value_offset } - - /// Solve for the witness values allocated to the digital decomposition. - pub fn solve(&self, witness: &mut [Option]) { - self.witnesses_to_decompose - .iter() - .enumerate() - .for_each(|(i, value_witness_idx)| { - let value = witness[*value_witness_idx].unwrap(); - let digits = decompose_into_digits(value, &self.log_bases); - digits - .iter() - .enumerate() - .for_each(|(digit_place, digit_value)| { - witness[self.first_witness_idx - + digit_place * self.witnesses_to_decompose.len() - + i] = Some(*digit_value); - }); - }); - } } /// Adds the witnesses and constraints for the digital decomposition of the diff --git a/provekit/r1cs-compiler/src/lib.rs b/provekit/r1cs-compiler/src/lib.rs new file mode 100644 index 000000000..eb7f42472 --- /dev/null +++ b/provekit/r1cs-compiler/src/lib.rs @@ -0,0 +1,16 @@ +mod binops; +mod digits; +mod memory; +mod noir_proof_scheme; +mod noir_to_r1cs; +mod range_check; +mod whir_r1cs; +mod witness_generator; + +pub use { + noir_proof_scheme::NoirProofSchemeBuilder, noir_to_r1cs::noir_to_r1cs, + whir_r1cs::WhirR1CSSchemeBuilder, +}; + +#[cfg(test)] +mod tests {} diff --git a/noir-r1cs/src/memory.rs b/provekit/r1cs-compiler/src/memory/mod.rs similarity index 92% rename from noir-r1cs/src/memory.rs rename to provekit/r1cs-compiler/src/memory/mod.rs index bba4f4973..b3322d758 100644 --- a/noir-r1cs/src/memory.rs +++ b/provekit/r1cs-compiler/src/memory/mod.rs @@ -1,3 +1,16 @@ +mod ram; +mod rom; + +pub(crate) use {ram::add_ram_checking, rom::add_rom_checking}; + +#[derive(Debug, Clone)] +pub enum MemoryOperation { + /// (R1CS witness index of address, R1CS witness index of value read) + Load(usize, usize), + /// (R1CS witness index of address, R1CS witness index of value to write) + Store(usize, usize), +} + #[derive(Debug, Clone)] /// Used for tracking operations on a memory block. pub struct MemoryBlock { @@ -22,11 +35,3 @@ impl MemoryBlock { }) } } - -#[derive(Debug, Clone)] -pub enum MemoryOperation { - /// (R1CS witness index of address, R1CS witness index of value read) - Load(usize, usize), - /// (R1CS witness index of address, R1CS witness index of value to write) - Store(usize, usize), -} diff --git a/noir-r1cs/src/ram.rs b/provekit/r1cs-compiler/src/memory/ram.rs similarity index 76% rename from noir-r1cs/src/ram.rs rename to provekit/r1cs-compiler/src/memory/ram.rs index 6afb7cf61..2c34b9f9a 100644 --- a/noir-r1cs/src/ram.rs +++ b/provekit/r1cs-compiler/src/memory/ram.rs @@ -2,53 +2,28 @@ use { crate::{ memory::{MemoryBlock, MemoryOperation}, noir_to_r1cs::NoirToR1CSCompiler, - r1cs_solver::{SumTerm, WitnessBuilder, WitnessCoefficient}, + }, + ark_ff::{One, Zero}, + provekit_common::{ + witness::{ + SpiceMemoryOperation, SpiceWitnesses, SumTerm, WitnessBuilder, WitnessCoefficient, + }, FieldElement, }, - ark_ff::{One, PrimeField, Zero}, - serde::{Deserialize, Serialize}, std::ops::Neg, }; -/// Like [MemoryOperation], but with the indices of the additional witnesses -/// needed by Spice. -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub enum SpiceMemoryOperation { - /// Load operation. Arguments are R1CS witness indices: - /// (address, value read, read timestamp) - /// `address` is already solved for by the ACIR solver. - Load(usize, usize, usize), - /// Store operation. Arguments are R1CS witness indices: - /// (address, old value, new value, read timestamp) - /// `address`, `old value`, `new value` are already solved for by the ACIR - /// solver. - Store(usize, usize, usize, usize), -} - -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] -pub struct SpiceWitnesses { - /// The length of the memory block - pub memory_length: usize, - /// The witness index of the first initial value (they are stored - /// contiguously) (Not written to) - pub initial_values_start: usize, - /// The memory operations, in the order that they occur; each - /// SpiceMemoryOperation contains witness indices that will be written to) - pub memory_operations: Vec, - /// The witness index of the first of the memory_length final read values - /// (stored contiguously) (these witnesses are written to) - pub rv_final_start: usize, - /// The witness index of the first of the memory_length final read - /// timestamps (stored contiguously) (these witnesses are written to) - pub rt_final_start: usize, - /// The index of the first witness written to by the SpiceWitnesses struct - pub first_witness_idx: usize, - /// The number of witnesses written to by the SpiceWitnesses struct - pub num_witnesses: usize, +pub trait SpiceWitnessesBuilder { + fn new( + next_witness_idx: usize, + memory_length: usize, + initial_values_start: usize, + memory_operations: Vec, + ) -> Self; } -impl SpiceWitnesses { - pub fn new( +impl SpiceWitnessesBuilder for SpiceWitnesses { + fn new( mut next_witness_idx: usize, memory_length: usize, initial_values_start: usize, @@ -89,41 +64,6 @@ impl SpiceWitnesses { num_witnesses, } } - - /// Solve for the values of the Spice witnesses. - pub fn solve(&self, witness: &mut [Option]) { - let mut rv_final = witness - [self.initial_values_start..self.initial_values_start + self.memory_length] - .to_vec(); - let mut rt_final = vec![0; self.memory_length]; - for (mem_op_index, mem_op) in self.memory_operations.iter().enumerate() { - match mem_op { - SpiceMemoryOperation::Load(addr, value, read_timestamp) => { - let addr = witness[*addr].unwrap(); - let addr_as_usize = addr.into_bigint().0[0] as usize; - witness[*read_timestamp] = - Some(FieldElement::from(rt_final[addr_as_usize] as u64)); - rv_final[addr_as_usize] = witness[*value]; - rt_final[addr_as_usize] = mem_op_index + 1; - } - SpiceMemoryOperation::Store(addr, old_value, new_value, read_timestamp) => { - let addr = witness[*addr].unwrap(); - let addr_as_usize = addr.into_bigint().0[0] as usize; - witness[*old_value] = rv_final[addr_as_usize]; - witness[*read_timestamp] = - Some(FieldElement::from(rt_final[addr_as_usize] as u64)); - let new_value = witness[*new_value]; - rv_final[addr_as_usize] = new_value; - rt_final[addr_as_usize] = mem_op_index + 1; - } - } - } - // Copy the final values and read timestamps into the witness vector - for i in 0..self.memory_length { - witness[self.rv_final_start + i] = rv_final[i]; - witness[self.rt_final_start + i] = Some(FieldElement::from(rt_final[i] as u64)); - } - } } /// Add witnesses and constraints ensuring the integrity of read/write diff --git a/noir-r1cs/src/rom.rs b/provekit/r1cs-compiler/src/memory/rom.rs similarity index 97% rename from noir-r1cs/src/rom.rs rename to provekit/r1cs-compiler/src/memory/rom.rs index 9754e0b01..8430e78b6 100644 --- a/noir-r1cs/src/rom.rs +++ b/provekit/r1cs-compiler/src/memory/rom.rs @@ -2,11 +2,12 @@ use { crate::{ memory::{MemoryBlock, MemoryOperation}, noir_to_r1cs::NoirToR1CSCompiler, - r1cs_solver::{SumTerm, WitnessBuilder, WitnessCoefficient}, + }, + ark_std::{ops::Neg, One}, + provekit_common::{ + witness::{SumTerm, WitnessBuilder, WitnessCoefficient}, FieldElement, }, - ark_std::One, - std::ops::Neg, }; /// Add witnesses and constraints enforcing the integrity of read operations diff --git a/provekit/r1cs-compiler/src/noir_proof_scheme.rs b/provekit/r1cs-compiler/src/noir_proof_scheme.rs new file mode 100644 index 000000000..9cd4a372e --- /dev/null +++ b/provekit/r1cs-compiler/src/noir_proof_scheme.rs @@ -0,0 +1,128 @@ +use { + crate::{ + noir_to_r1cs, whir_r1cs::WhirR1CSSchemeBuilder, + witness_generator::NoirWitnessGeneratorBuilder, + }, + anyhow::{ensure, Context as _, Result}, + noirc_artifacts::program::ProgramArtifact, + provekit_common::{ + utils::PrintAbi, witness::NoirWitnessGenerator, NoirProofScheme, WhirR1CSScheme, + }, + std::{fs::File, path::Path}, + tracing::{info, instrument}, +}; + +pub trait NoirProofSchemeBuilder { + fn from_file(path: impl AsRef + std::fmt::Debug) -> Result + where + Self: Sized; + + fn from_program(program: ProgramArtifact) -> Result + where + Self: Sized; +} + +impl NoirProofSchemeBuilder for NoirProofScheme { + #[instrument(fields(size = path.as_ref().metadata().map(|m| m.len()).ok()))] + fn from_file(path: impl AsRef + std::fmt::Debug) -> Result { + let file = File::open(path).context("while opening Noir program")?; + let program = serde_json::from_reader(file).context("while reading Noir program")?; + + Self::from_program(program) + } + + #[instrument(skip_all)] + fn from_program(program: ProgramArtifact) -> Result { + info!("Program noir version: {}", program.noir_version); + info!("Program entry point: fn main{};", PrintAbi(&program.abi)); + ensure!( + program.bytecode.functions.len() == 1, + "Program must have one entry point." + ); + + // Extract bits from Program Artifact. + let main = &program.bytecode.functions[0]; + info!( + "ACIR: {} witnesses, {} opcodes.", + main.current_witness_index, + main.opcodes.len() + ); + + // Compile to R1CS schemes + let (r1cs, witness_map, witness_builders) = noir_to_r1cs(main)?; + info!( + "R1CS {} constraints, {} witnesses, A {} entries, B {} entries, C {} entries", + r1cs.num_constraints(), + r1cs.num_witnesses(), + r1cs.a.num_entries(), + r1cs.b.num_entries(), + r1cs.c.num_entries() + ); + + // Configure witness generator + let witness_generator = + NoirWitnessGenerator::new(&program, witness_map, r1cs.num_witnesses()); + + // Configure Whir + let whir_for_witness = WhirR1CSScheme::new_for_r1cs(&r1cs); + + Ok(Self { + program: program.bytecode, + r1cs, + witness_builders, + witness_generator, + whir_for_witness, + }) + } +} + +#[cfg(test)] +mod tests { + use { + crate::NoirProofSchemeBuilder, + ark_std::One, + provekit_common::{ + witness::{ConstantTerm, SumTerm, WitnessBuilder}, + FieldElement, NoirProofScheme, + }, + serde::{Deserialize, Serialize}, + std::path::PathBuf, + }; + + #[track_caller] + fn test_serde(value: &T) + where + T: std::fmt::Debug + PartialEq + Serialize + for<'a> Deserialize<'a>, + { + // Test JSON + let json = serde_json::to_string(value).unwrap(); + let deserialized = serde_json::from_str(&json).unwrap(); + assert_eq!(value, &deserialized); + + // Test Postcard + let bin = postcard::to_allocvec(value).unwrap(); + let deserialized = postcard::from_bytes(&bin).unwrap(); + assert_eq!(value, &deserialized); + } + + #[test] + fn test_noir_proof_scheme_serde() { + let path = PathBuf::from("../../tooling/provekit-bench/benches/poseidon_rounds.json"); + let proof_schema = NoirProofScheme::from_file(path).unwrap(); + + test_serde(&proof_schema.r1cs); + test_serde(&proof_schema.witness_builders); + test_serde(&proof_schema.witness_generator); + test_serde(&proof_schema.whir_for_witness); + } + + #[test] + fn test_witness_builder_serde() { + let sum_term = SumTerm(Some(FieldElement::one()), 2); + test_serde(&sum_term); + let constant_term = ConstantTerm(2, FieldElement::one()); + test_serde(&constant_term); + let witness_builder = WitnessBuilder::Constant(constant_term); + test_serde(&witness_builder); + } +} diff --git a/noir-r1cs/src/noir_to_r1cs.rs b/provekit/r1cs-compiler/src/noir_to_r1cs.rs similarity index 95% rename from noir-r1cs/src/noir_to_r1cs.rs rename to provekit/r1cs-compiler/src/noir_to_r1cs.rs index 5160a1333..f764346bb 100644 --- a/noir-r1cs/src/noir_to_r1cs.rs +++ b/provekit/r1cs-compiler/src/noir_to_r1cs.rs @@ -1,14 +1,8 @@ use { crate::{ binops::{add_binop, BinOp}, - memory::{MemoryBlock, MemoryOperation}, - r1cs_solver::{ConstantTerm, SumTerm, WitnessBuilder}, - ram::add_ram_checking, + memory::{add_ram_checking, add_rom_checking, MemoryBlock, MemoryOperation}, range_check::add_range_checks, - rom::add_rom_checking, - serde_ark, - utils::noir_to_native, - FieldElement, NoirElement, R1CS, }, acir::{ circuit::{ @@ -21,11 +15,13 @@ use { }, anyhow::{bail, Result}, ark_std::One, - serde::{Deserialize, Serialize}, + provekit_common::{ + utils::noir_to_native, + witness::{ConstantOrR1CSWitness, ConstantTerm, SumTerm, WitnessBuilder}, + FieldElement, NoirElement, R1CS, + }, std::{collections::BTreeMap, num::NonZeroU32, ops::Neg}, }; -/// The index of the constant 1 witness in the R1CS instance -pub const WITNESS_ONE_IDX: usize = 0; /// Compiles an ACIR circuit into an [R1CS] instance, comprising of the A, B, /// and C R1CS matrices, along with the witness vector. @@ -412,18 +408,3 @@ impl NoirToR1CSCompiler { Ok(()) } } - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] -pub enum ConstantOrR1CSWitness { - Constant(#[serde(with = "serde_ark")] FieldElement), - Witness(usize), -} - -impl ConstantOrR1CSWitness { - pub fn to_tuple(&self) -> (FieldElement, usize) { - match self { - ConstantOrR1CSWitness::Constant(c) => (*c, WITNESS_ONE_IDX), - ConstantOrR1CSWitness::Witness(w) => (FieldElement::one(), *w), - } - } -} diff --git a/noir-r1cs/src/range_check.rs b/provekit/r1cs-compiler/src/range_check.rs similarity index 97% rename from noir-r1cs/src/range_check.rs rename to provekit/r1cs-compiler/src/range_check.rs index f239fb167..0f7603251 100644 --- a/noir-r1cs/src/range_check.rs +++ b/provekit/r1cs-compiler/src/range_check.rs @@ -1,11 +1,13 @@ use { crate::{ - digits::add_digital_decomposition, + digits::{add_digital_decomposition, DigitalDecompositionWitnessesBuilder}, noir_to_r1cs::NoirToR1CSCompiler, - r1cs_solver::{ProductLinearTerm, SumTerm, WitnessBuilder, WitnessCoefficient}, + }, + ark_std::{One, Zero}, + provekit_common::{ + witness::{ProductLinearTerm, SumTerm, WitnessBuilder, WitnessCoefficient}, FieldElement, }, - ark_ff::{One, Zero}, std::{collections::BTreeMap, ops::Neg}, }; diff --git a/provekit/r1cs-compiler/src/whir_r1cs.rs b/provekit/r1cs-compiler/src/whir_r1cs.rs new file mode 100644 index 000000000..500036700 --- /dev/null +++ b/provekit/r1cs-compiler/src/whir_r1cs.rs @@ -0,0 +1,54 @@ +use { + provekit_common::{utils::next_power_of_two, WhirConfig, WhirR1CSScheme, R1CS}, + whir::parameters::{ + default_max_pow, FoldingFactor, MultivariateParameters, ProtocolParameters, SoundnessType, + }, +}; + +pub trait WhirR1CSSchemeBuilder { + fn new_for_r1cs(r1cs: &R1CS) -> Self; + + fn new_whir_config_for_size(num_variables: usize, batch_size: usize) -> WhirConfig; +} + +impl WhirR1CSSchemeBuilder for WhirR1CSScheme { + fn new_for_r1cs(r1cs: &R1CS) -> Self { + // m is equal to ceiling(log(number of variables in constraint system)). It is + // equal to the log of the width of the matrices. + let m = next_power_of_two(r1cs.num_witnesses()); + + // m_0 is equal to ceiling(log(number_of_constraints)). It is equal to the + // number of variables in the multilinear polynomial we are running our sumcheck + // on. + let m_0 = next_power_of_two(r1cs.num_constraints()); + + // Whir parameters + Self { + m: m + 1, + m_0, + a_num_terms: next_power_of_two(r1cs.a().iter().count()), + whir_witness: Self::new_whir_config_for_size(m + 1, 2), + whir_for_hiding_spartan: Self::new_whir_config_for_size( + next_power_of_two(4 * m_0) + 1, + 2, + ), + } + } + + fn new_whir_config_for_size(num_variables: usize, batch_size: usize) -> WhirConfig { + let mv_params = MultivariateParameters::new(num_variables); + let whir_params = ProtocolParameters { + initial_statement: true, + security_level: 128, + pow_bits: default_max_pow(num_variables, 1), + folding_factor: FoldingFactor::Constant(4), + leaf_hash_params: (), + two_to_one_params: (), + soundness_type: SoundnessType::ConjectureList, + _pow_parameters: Default::default(), + starting_log_inv_rate: 1, + batch_size, + }; + WhirConfig::new(mv_params, whir_params) + } +} diff --git a/noir-r1cs/src/noir_witness.rs b/provekit/r1cs-compiler/src/witness_generator.rs similarity index 60% rename from noir-r1cs/src/noir_witness.rs rename to provekit/r1cs-compiler/src/witness_generator.rs index f72a0d0b7..b135588cb 100644 --- a/noir-r1cs/src/noir_witness.rs +++ b/provekit/r1cs-compiler/src/witness_generator.rs @@ -1,8 +1,4 @@ use { - crate::{ - utils::{noir_to_native, serde_jsonify}, - FieldElement, - }, anyhow::{anyhow, bail, ensure, Context, Result}, ark_ff::PrimeField, noirc_abi::{ @@ -10,30 +6,27 @@ use { Abi, AbiType, }, noirc_artifacts::program::ProgramArtifact, - serde::{Deserialize, Serialize}, - spongefish::codecs::arkworks_algebra::FieldDomainSeparator, + provekit_common::{utils::noir_to_native, witness::NoirWitnessGenerator, FieldElement}, std::num::NonZeroU32, tracing::instrument, }; -// TODO: Handling of the return value for the verifier. +pub trait NoirWitnessGeneratorBuilder { + fn new( + program: &ProgramArtifact, + witness_map: Vec>, + r1cs_witnesses: usize, + ) -> Self; -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct NoirWitnessGenerator { - // Note: Abi uses an [internally tagged] enum format in Serde, which is not compatible - // with some schemaless formats like Postcard. - // [internally-tagged]: https://serde.rs/enum-representations.html - // TODO: serializes the ABI as a json string. Something like CBOR might be better. - #[serde(with = "serde_jsonify")] - abi: Abi, + fn witness_map(&self) -> &[Option]; - /// ACIR witness index to R1CS witness index - /// Index zero is reserved for constant one, so we can use `NonZeroU32` - witness_map: Vec>, + fn abi(&self) -> &Abi; + + fn input_from_toml(&self, toml: &str) -> Result>; } -impl NoirWitnessGenerator { - pub fn new( +impl NoirWitnessGeneratorBuilder for NoirWitnessGenerator { + fn new( program: &ProgramArtifact, mut witness_map: Vec>, r1cs_witnesses: usize, @@ -50,17 +43,17 @@ impl NoirWitnessGenerator { Self { abi, witness_map } } - pub fn witness_map(&self) -> &[Option] { + fn witness_map(&self) -> &[Option] { &self.witness_map } - pub fn abi(&self) -> &Abi { + fn abi(&self) -> &Abi { &self.abi } /// Noir inputs are in order at the start of the witness vector #[instrument(skip_all, fields(size = toml.len()))] - pub fn input_from_toml(&self, toml: &str) -> Result> { + fn input_from_toml(&self, toml: &str) -> Result> { // Parse toml to name -> value map let mut input = Format::Toml .parse(toml, &self.abi) @@ -87,13 +80,6 @@ impl NoirWitnessGenerator { } } -impl PartialEq for NoirWitnessGenerator { - fn eq(&self, other: &Self) -> bool { - format!("{:?}", self.abi) == format!("{:?}", other.abi) - && self.witness_map == other.witness_map - } -} - /// Recursively encode Noir ABI input to a witness vector /// See [`noirc_abi::Abi::encode`] for the Noir ABI specification. fn encode_input( @@ -140,42 +126,3 @@ fn encode_input( } Ok(()) } - -/// Trait which is used to add witness RNG for IOPattern -pub trait WitnessIOPattern { - /// Schedule absorption of circuit shape (2 scalars): (num_constraints, - /// num_witnesses). - fn add_shape(self) -> Self; - - /// Schedule absorption of `num_pub_inputs` public input scalars. - fn add_public_inputs(self, num_pub_inputs: usize) -> Self; - - /// Schedule absorption of `num_challenges` Fiat–Shamir challenges for - /// LogUp/Spice. - fn add_logup_challenges(self, num_challenges: usize) -> Self; -} - -impl WitnessIOPattern for IOPattern -where - IOPattern: FieldDomainSeparator, -{ - fn add_shape(self) -> Self { - self.add_scalars(2, "shape") - } - - fn add_public_inputs(self, num_pub_inputs: usize) -> Self { - if num_pub_inputs > 0 { - self.add_scalars(num_pub_inputs, "pub_inputs") - } else { - self - } - } - - fn add_logup_challenges(self, num_challenges: usize) -> Self { - if num_challenges > 0 { - self.challenge_scalars(num_challenges, "wb:challenges") - } else { - self - } - } -} diff --git a/provekit/verifier/.gitignore b/provekit/verifier/.gitignore new file mode 100644 index 000000000..ea8c4bf7f --- /dev/null +++ b/provekit/verifier/.gitignore @@ -0,0 +1 @@ +/target diff --git a/provekit/verifier/Cargo.toml b/provekit/verifier/Cargo.toml new file mode 100644 index 000000000..90847847f --- /dev/null +++ b/provekit/verifier/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "provekit-verifier" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +# Workspace crates +provekit-common.workspace = true +provekit-prover.workspace = true + +# Cryptography and proof systems +ark-std.workspace = true +spongefish.workspace = true +whir.workspace = true + +# 3rd party +anyhow.workspace = true +tracing.workspace = true + +[lints] +workspace = true diff --git a/provekit/verifier/src/lib.rs b/provekit/verifier/src/lib.rs new file mode 100644 index 000000000..8f39094e1 --- /dev/null +++ b/provekit/verifier/src/lib.rs @@ -0,0 +1,7 @@ +mod noir_proof_scheme; +mod whir_r1cs; + +pub use noir_proof_scheme::NoirProofSchemeVerifier; + +#[cfg(test)] +mod tests {} diff --git a/provekit/verifier/src/noir_proof_scheme.rs b/provekit/verifier/src/noir_proof_scheme.rs new file mode 100644 index 000000000..f1ce8b998 --- /dev/null +++ b/provekit/verifier/src/noir_proof_scheme.rs @@ -0,0 +1,18 @@ +use { + crate::whir_r1cs::WhirR1CSVerifier, + anyhow::Result, + provekit_common::{NoirProof, NoirProofScheme}, + tracing::instrument, +}; + +pub trait NoirProofSchemeVerifier { + fn verify(&self, proof: &NoirProof) -> Result<()>; +} + +impl NoirProofSchemeVerifier for NoirProofScheme { + #[instrument(skip_all)] + fn verify(&self, proof: &NoirProof) -> Result<()> { + self.whir_for_witness.verify(&proof.whir_r1cs_proof)?; + Ok(()) + } +} diff --git a/provekit/verifier/src/whir_r1cs.rs b/provekit/verifier/src/whir_r1cs.rs new file mode 100644 index 000000000..e56bbb6b9 --- /dev/null +++ b/provekit/verifier/src/whir_r1cs.rs @@ -0,0 +1,188 @@ +use { + anyhow::{ensure, Context, Result}, + ark_std::{One, Zero}, + provekit_common::{ + skyscraper::SkyscraperSponge, + utils::sumcheck::{calculate_eq, eval_cubic_poly}, + FieldElement, WhirConfig, WhirR1CSProof, WhirR1CSScheme, + }, + spongefish::{ + codecs::arkworks_algebra::{FieldToUnitDeserialize, UnitToField}, + VerifierState, + }, + tracing::instrument, + whir::{ + poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, + whir::{ + committer::{reader::ParsedCommitment, CommitmentReader}, + statement::{Statement, Weights}, + utils::HintDeserialize, + verifier::Verifier, + }, + }, +}; + +pub struct DataFromSumcheckVerifier { + r: Vec, + alpha: Vec, + last_sumcheck_val: FieldElement, +} + +pub trait WhirR1CSVerifier { + fn verify(&self, proof: &WhirR1CSProof) -> Result<()>; +} + +impl WhirR1CSVerifier for WhirR1CSScheme { + #[instrument(skip_all)] + #[allow(unused)] // TODO: Fix implementation + fn verify(&self, proof: &WhirR1CSProof) -> Result<()> { + // Set up transcript + let io = self.create_io_pattern(); + let mut arthur = io.to_verifier_state(&proof.transcript); + + let commitment_reader = CommitmentReader::new(&self.whir_witness); + let parsed_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); + + let data_from_sumcheck_verifier = run_sumcheck_verifier( + &mut arthur, + self.m_0, + &self.whir_for_hiding_spartan, + // proof.whir_spartan_blinding_values, + ) + .context("while verifying sumcheck")?; + + let whir_query_answer_sum_vectors: (Vec, Vec) = + arthur.hint().unwrap(); + + let whir_query_answer_sums = ( + whir_query_answer_sum_vectors.0.try_into().unwrap(), + whir_query_answer_sum_vectors.1.try_into().unwrap(), + ); + + let statement_verifier = prepare_statement_for_witness_verifier::<3>( + self.m, + &parsed_commitment, + &whir_query_answer_sums, + ); + + let (folding_randomness, deferred) = run_whir_pcs_verifier( + &mut arthur, + &parsed_commitment, + &self.whir_witness, + &statement_verifier, + ) + .context("while verifying WHIR proof")?; + + // Check the Spartan sumcheck relation. + ensure!( + data_from_sumcheck_verifier.last_sumcheck_val + == (whir_query_answer_sums.0[0] * whir_query_answer_sums.0[1] + - whir_query_answer_sums.0[2]) + * calculate_eq( + &data_from_sumcheck_verifier.r, + &data_from_sumcheck_verifier.alpha + ), + "last sumcheck value does not match" + ); + + Ok(()) + } +} + +fn prepare_statement_for_witness_verifier( + m: usize, + parsed_commitment: &ParsedCommitment, + whir_query_answer_sums: &([FieldElement; N], [FieldElement; N]), +) -> Statement { + let mut statement_verifier = Statement::::new(m); + for i in 0..whir_query_answer_sums.0.len() { + let claimed_sum = whir_query_answer_sums.0[i] + + whir_query_answer_sums.1[i] * parsed_commitment.batching_randomness; + statement_verifier.add_constraint( + Weights::linear(EvaluationsList::new(vec![FieldElement::zero(); 1 << m])), + claimed_sum, + ); + } + statement_verifier +} + +#[instrument(skip_all)] +pub fn run_sumcheck_verifier( + arthur: &mut VerifierState, + m_0: usize, + whir_for_spartan_blinding_config: &WhirConfig, +) -> Result { + // r is the combination randomness from the 2nd item of the interaction phase + let mut r = vec![FieldElement::zero(); m_0]; + let _ = arthur.fill_challenge_scalars(&mut r); + + let commitment_reader = CommitmentReader::new(whir_for_spartan_blinding_config); + let parsed_commitment = commitment_reader.parse_commitment(arthur).unwrap(); + + let mut sum_g_buf = [FieldElement::zero()]; + arthur.fill_next_scalars(&mut sum_g_buf)?; + + let mut rho_buf = [FieldElement::zero()]; + arthur.fill_challenge_scalars(&mut rho_buf)?; + let rho = rho_buf[0]; + + let mut saved_val_for_sumcheck_equality_assertion = rho * sum_g_buf[0]; + + let mut alpha = vec![FieldElement::zero(); m_0]; + + for item in alpha.iter_mut().take(m_0) { + let mut hhat_i = [FieldElement::zero(); 4]; + let mut alpha_i = [FieldElement::zero(); 1]; + let _ = arthur.fill_next_scalars(&mut hhat_i); + let _ = arthur.fill_challenge_scalars(&mut alpha_i); + *item = alpha_i[0]; + let hhat_i_at_zero = eval_cubic_poly(&hhat_i, &FieldElement::zero()); + let hhat_i_at_one = eval_cubic_poly(&hhat_i, &FieldElement::one()); + ensure!( + saved_val_for_sumcheck_equality_assertion == hhat_i_at_zero + hhat_i_at_one, + "Sumcheck equality assertion failed" + ); + saved_val_for_sumcheck_equality_assertion = eval_cubic_poly(&hhat_i, &alpha_i[0]); + } + let mut values_of_polynomial_sums = [FieldElement::zero(); 2]; + let _ = arthur.fill_next_scalars(&mut values_of_polynomial_sums); + + let statement_verifier = prepare_statement_for_witness_verifier::<1>( + whir_for_spartan_blinding_config.mv_parameters.num_variables, + &parsed_commitment, + &([values_of_polynomial_sums[0]], [ + values_of_polynomial_sums[1] + ]), + ); + run_whir_pcs_verifier( + arthur, + &parsed_commitment, + whir_for_spartan_blinding_config, + &statement_verifier, + ) + .context("while verifying WHIR")?; + + let f_at_alpha = saved_val_for_sumcheck_equality_assertion - rho * values_of_polynomial_sums[0]; + + Ok(DataFromSumcheckVerifier { + r, + alpha, + last_sumcheck_val: f_at_alpha, + }) +} + +#[instrument(skip_all)] +pub fn run_whir_pcs_verifier( + arthur: &mut VerifierState, + parsed_commitment: &ParsedCommitment, + params: &WhirConfig, + statement_verifier: &Statement, +) -> Result<(MultilinearPoint, Vec)> { + let verifier = Verifier::new(params); + + let (folding_randomness, deferred) = verifier + .verify(arthur, parsed_commitment, statement_verifier) + .context("while verifying WHIR")?; + + Ok((folding_randomness, deferred)) +} diff --git a/skyscraper/block-multiplier-codegen/Cargo.toml b/skyscraper/block-multiplier-codegen/Cargo.toml index 1e18c9160..946f023d2 100644 --- a/skyscraper/block-multiplier-codegen/Cargo.toml +++ b/skyscraper/block-multiplier-codegen/Cargo.toml @@ -1,7 +1,16 @@ [package] name = "block-multiplier-codegen" version = "0.1.0" -edition = "2024" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true [dependencies] +# Workspace crates hla.workspace = true + +[lints] +workspace = true diff --git a/skyscraper/block-multiplier-codegen/src/main.rs b/skyscraper/block-multiplier-codegen/src/main.rs index f3e375010..7437e3217 100644 --- a/skyscraper/block-multiplier-codegen/src/main.rs +++ b/skyscraper/block-multiplier-codegen/src/main.rs @@ -1,6 +1,6 @@ use { block_multiplier_codegen::{scalar, simd}, - hla::builder::{Interleaving, build_includable}, + hla::builder::{build_includable, Interleaving}, }; fn main() { diff --git a/skyscraper/block-multiplier/Cargo.toml b/skyscraper/block-multiplier/Cargo.toml index 915b2a751..469c97d6b 100644 --- a/skyscraper/block-multiplier/Cargo.toml +++ b/skyscraper/block-multiplier/Cargo.toml @@ -1,24 +1,39 @@ [package] name = "block-multiplier" version = "0.1.0" -edition = "2024" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true [dependencies] -seq-macro.workspace = true +# Workspace crates fp-rounding.workspace = true +# 3rd party +seq-macro.workspace = true + [dev-dependencies] -rand.workspace = true +# Cryptography and proof systems +ark-bn254.workspace = true +ark-ff.workspace = true + +# 3rd party divan.workspace = true primitive-types.workspace = true -ark-ff.workspace = true -ark-bn254.workspace = true proptest.workspace = true +rand.workspace = true [build-dependencies] +# Workspace crates block-multiplier-codegen.workspace = true hla.workspace = true +[lints] +workspace = true + [[bench]] name = "bench" harness = false diff --git a/skyscraper/block-multiplier/benches/bench.rs b/skyscraper/block-multiplier/benches/bench.rs index 1d37cc8a4..3e5c6f177 100644 --- a/skyscraper/block-multiplier/benches/bench.rs +++ b/skyscraper/block-multiplier/benches/bench.rs @@ -4,7 +4,7 @@ use { core::{array, simd::u64x2}, divan::Bencher, fp_rounding::with_rounding_mode, - rand::{Rng, rng}, + rand::{rng, Rng}, }; // #[divan::bench_group] diff --git a/skyscraper/block-multiplier/build.rs b/skyscraper/block-multiplier/build.rs index 5e4f49335..7623a247d 100644 --- a/skyscraper/block-multiplier/build.rs +++ b/skyscraper/block-multiplier/build.rs @@ -1,6 +1,6 @@ use { block_multiplier_codegen::{scalar, simd}, - hla::builder::{Interleaving, build_includable}, + hla::builder::{build_includable, Interleaving}, std::path::Path, }; diff --git a/skyscraper/block-multiplier/src/block_simd.rs b/skyscraper/block-multiplier/src/block_simd.rs index ca271b0c7..fc2cb21d3 100644 --- a/skyscraper/block-multiplier/src/block_simd.rs +++ b/skyscraper/block-multiplier/src/block_simd.rs @@ -11,7 +11,7 @@ use { core::{ arch::aarch64::vcvtq_f64_u64, ops::BitAnd, - simd::{Simd, num::SimdFloat}, + simd::{num::SimdFloat, Simd}, }, fp_rounding::{RoundingGuard, Zero}, std::simd::StdFloat, diff --git a/skyscraper/block-multiplier/src/constants.rs b/skyscraper/block-multiplier/src/constants.rs index 9a0718c82..171273f55 100644 --- a/skyscraper/block-multiplier/src/constants.rs +++ b/skyscraper/block-multiplier/src/constants.rs @@ -131,8 +131,8 @@ pub const RHO_4: [u64; 5] = [ pub const C1: f64 = pow_2(104); // 2.0^104 pub const C2: f64 = pow_2(104) + pow_2(52); // 2.0^104 + 2.0^52 -// const C3: f64 = pow_2(52); // 2.0^52 -// ------------------------------------------------------------------------------------------------- + // const C3: f64 = pow_2(52); // 2.0^52 + // ------------------------------------------------------------------------------------------------- const fn pow_2(n: u32) -> f64 { // Unfortunately we can't use f64::powi in const fn yet diff --git a/skyscraper/block-multiplier/src/portable_simd.rs b/skyscraper/block-multiplier/src/portable_simd.rs index ebfb4bc03..513eb982f 100644 --- a/skyscraper/block-multiplier/src/portable_simd.rs +++ b/skyscraper/block-multiplier/src/portable_simd.rs @@ -9,7 +9,7 @@ use { std::{ arch::aarch64::vcvtq_f64_u64, ops::BitAnd, - simd::{Simd, StdFloat, num::SimdFloat}, + simd::{num::SimdFloat, Simd, StdFloat}, }, }; diff --git a/skyscraper/block-multiplier/src/scalar.rs b/skyscraper/block-multiplier/src/scalar.rs index d9be3f7ea..ff7250ecb 100644 --- a/skyscraper/block-multiplier/src/scalar.rs +++ b/skyscraper/block-multiplier/src/scalar.rs @@ -140,7 +140,7 @@ mod tests { ark_ff::BigInt, primitive_types::U256, proptest::proptest, - rand::{Rng, SeedableRng, rngs}, + rand::{rngs, Rng, SeedableRng}, }; #[test] diff --git a/skyscraper/block-multiplier/src/test_utils.rs b/skyscraper/block-multiplier/src/test_utils.rs index 3518d895f..e46b3f25b 100644 --- a/skyscraper/block-multiplier/src/test_utils.rs +++ b/skyscraper/block-multiplier/src/test_utils.rs @@ -6,7 +6,7 @@ use { ark_ff::{BigInt, Field}, proptest::{ collection, - prelude::{Strategy, any}, + prelude::{any, Strategy}, proptest, }, }; diff --git a/skyscraper/block-multiplier/src/utils.rs b/skyscraper/block-multiplier/src/utils.rs index 029eb3909..774d54bf1 100644 --- a/skyscraper/block-multiplier/src/utils.rs +++ b/skyscraper/block-multiplier/src/utils.rs @@ -5,9 +5,9 @@ use { array, ops::BitAnd, simd::{ - Simd, StdFloat, cmp::SimdPartialEq, num::{SimdFloat, SimdInt, SimdUint}, + Simd, StdFloat, }, }, }; diff --git a/skyscraper/core/Cargo.toml b/skyscraper/core/Cargo.toml index fe298b6c0..25df09d32 100644 --- a/skyscraper/core/Cargo.toml +++ b/skyscraper/core/Cargo.toml @@ -9,19 +9,27 @@ homepage.workspace = true repository.workspace = true [dependencies] +# Workspace crates block-multiplier.workspace = true fp-rounding.workspace = true -ark-ff.workspace = true + +# Cryptography and proof systems ark-bn254.workspace = true -zerocopy.workspace = true -seq-macro.workspace = true +ark-ff.workspace = true + +# 3rd party proptest.workspace = true rayon.workspace = true +seq-macro.workspace = true +zerocopy.workspace = true [dev-dependencies] -rand.workspace = true divan.workspace = true primitive-types.workspace = true +rand.workspace = true + +[lints] +workspace = true [[bench]] name = "bench" diff --git a/skyscraper/fp-rounding/Cargo.toml b/skyscraper/fp-rounding/Cargo.toml index f72f7e378..44e8094bd 100644 --- a/skyscraper/fp-rounding/Cargo.toml +++ b/skyscraper/fp-rounding/Cargo.toml @@ -8,12 +8,12 @@ license.workspace = true homepage.workspace = true repository.workspace = true -[lints] -workspace = true - [dev-dependencies] divan.workspace = true +[lints] +workspace = true + [[bench]] name = "main" harness = false diff --git a/skyscraper/hla/Cargo.toml b/skyscraper/hla/Cargo.toml index c24631453..c2651146e 100644 --- a/skyscraper/hla/Cargo.toml +++ b/skyscraper/hla/Cargo.toml @@ -1,11 +1,20 @@ [package] name = "hla" version = "0.1.0" -edition = "2024" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true [dependencies] +# 3rd party paste.workspace = true [dev-dependencies] quickcheck.workspace = true quickcheck_macros.workspace = true + +[lints] +workspace = true diff --git a/skyscraper/hla/src/backend.rs b/skyscraper/hla/src/backend.rs index 5dc876031..774e68b82 100644 --- a/skyscraper/hla/src/backend.rs +++ b/skyscraper/hla/src/backend.rs @@ -1,9 +1,9 @@ use { crate::{ - FreshVariable, ir::{FreshRegister, HardwareRegister, Instruction, TypedHardwareRegister, Variable}, liveness::{Lifetime, Lifetimes}, reification::{Index, RegisterType, ReifiedRegister}, + FreshVariable, }, std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque}, }; diff --git a/skyscraper/hla/src/builder.rs b/skyscraper/hla/src/builder.rs index b833db3e7..c0f3a196d 100644 --- a/skyscraper/hla/src/builder.rs +++ b/skyscraper/hla/src/builder.rs @@ -1,9 +1,8 @@ use { crate::{ - AtomicInstructionBlock, backend::{ - AllocatedVariable, RegisterBank, RegisterMapping, allocate_input_variable, - hardware_register_allocation, reserve_output_variable, + allocate_input_variable, hardware_register_allocation, reserve_output_variable, + AllocatedVariable, RegisterBank, RegisterMapping, }, codegen::{ generate_rust_global_asm, generate_rust_includable_asm, generate_rust_inline_asm, @@ -11,9 +10,10 @@ use { frontend::{Assembler, FreshAllocator, FreshVariable}, ir::{HardwareRegister, Instruction, Variable}, liveness::liveness_analysis, + AtomicInstructionBlock, }, std::{ - collections::{HashMap, hash_map::Entry}, + collections::{hash_map::Entry, HashMap}, path::Path, }, }; diff --git a/skyscraper/hla/src/codegen.rs b/skyscraper/hla/src/codegen.rs index 6edf030bc..d23b44925 100644 --- a/skyscraper/hla/src/codegen.rs +++ b/skyscraper/hla/src/codegen.rs @@ -218,7 +218,8 @@ fn format_operands(variables: &[AllocatedVariable], direction: &str) -> String { ) }) .collect::>() - .join(", ") // Collect registers within a group with comma separators + .join(", ") // Collect registers within a group with comma + // separators } else { format!( "{direction}(\"{}\") {}", diff --git a/skyscraper/hla/src/instructions.rs b/skyscraper/hla/src/instructions.rs index f48992baf..d12466385 100644 --- a/skyscraper/hla/src/instructions.rs +++ b/skyscraper/hla/src/instructions.rs @@ -32,7 +32,7 @@ use { crate::{ - frontend::{Assembler, D, FreshAllocator, PointerReg, Reg, SIMD, Simd, SizedIdx}, + frontend::{Assembler, FreshAllocator, PointerReg, Reg, Simd, SizedIdx, D, SIMD}, ir::{FreshRegister, Instruction, Modifier}, reification::ReifyRegister, }, diff --git a/skyscraper/hla/src/liveness.rs b/skyscraper/hla/src/liveness.rs index 0e961f9d2..3f79e19e5 100644 --- a/skyscraper/hla/src/liveness.rs +++ b/skyscraper/hla/src/liveness.rs @@ -1,9 +1,9 @@ use { crate::{ - FreshAllocator, frontend::FreshVariable, ir::{FreshRegister, Instruction}, reification::ReifiedRegister, + FreshAllocator, }, std::{ collections::{HashSet, VecDeque}, diff --git a/tooling/cli/.gitignore b/tooling/cli/.gitignore new file mode 100644 index 000000000..ea8c4bf7f --- /dev/null +++ b/tooling/cli/.gitignore @@ -0,0 +1 @@ +/target diff --git a/tooling/cli/Cargo.toml b/tooling/cli/Cargo.toml new file mode 100644 index 000000000..11ae43801 --- /dev/null +++ b/tooling/cli/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "provekit-cli" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +# Workspace crates +provekit-common.workspace = true +provekit-gnark.workspace = true +provekit-prover.workspace = true +provekit-r1cs-compiler.workspace = true +provekit-verifier.workspace = true + +# Noir language +acir.workspace = true + +# 3rd party +anyhow.workspace = true +argh.workspace = true +base64.workspace = true +serde.workspace = true +serde_json.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true + +[lints] +workspace = true diff --git a/noir-r1cs/src/cli/cmd/circuit_stats.rs b/tooling/cli/src/cmd/circuit_stats.rs similarity index 100% rename from noir-r1cs/src/cli/cmd/circuit_stats.rs rename to tooling/cli/src/cmd/circuit_stats.rs diff --git a/noir-r1cs/src/cli/cmd/generate_gnark_inputs.rs b/tooling/cli/src/cmd/generate_gnark_inputs.rs similarity index 94% rename from noir-r1cs/src/cli/cmd/generate_gnark_inputs.rs rename to tooling/cli/src/cmd/generate_gnark_inputs.rs index 4624e97da..3afc41067 100644 --- a/noir-r1cs/src/cli/cmd/generate_gnark_inputs.rs +++ b/tooling/cli/src/cmd/generate_gnark_inputs.rs @@ -2,7 +2,8 @@ use { crate::Command, anyhow::{Context, Result}, argh::FromArgs, - noir_r1cs::{read, write_gnark_parameters_to_file, NoirProof, NoirProofScheme}, + provekit_common::{file::read, NoirProof, NoirProofScheme}, + provekit_gnark::write_gnark_parameters_to_file, std::{fs::File, io::Write, path::PathBuf}, tracing::{info, instrument}, }; diff --git a/noir-r1cs/src/cli/cmd/mod.rs b/tooling/cli/src/cmd/mod.rs similarity index 100% rename from noir-r1cs/src/cli/cmd/mod.rs rename to tooling/cli/src/cmd/mod.rs diff --git a/noir-r1cs/src/cli/cmd/prepare.rs b/tooling/cli/src/cmd/prepare.rs similarity index 89% rename from noir-r1cs/src/cli/cmd/prepare.rs rename to tooling/cli/src/cmd/prepare.rs index a6e16e5e2..5e6c22e01 100644 --- a/noir-r1cs/src/cli/cmd/prepare.rs +++ b/tooling/cli/src/cmd/prepare.rs @@ -2,7 +2,8 @@ use { super::Command, anyhow::{Context, Result}, argh::FromArgs, - noir_r1cs::{write, NoirProofScheme}, + provekit_common::{file::write, NoirProofScheme}, + provekit_r1cs_compiler::NoirProofSchemeBuilder, std::path::PathBuf, tracing::instrument, }; diff --git a/noir-r1cs/src/cli/cmd/prove.rs b/tooling/cli/src/cmd/prove.rs similarity index 90% rename from noir-r1cs/src/cli/cmd/prove.rs rename to tooling/cli/src/cmd/prove.rs index 1269c8df4..bd6480990 100644 --- a/noir-r1cs/src/cli/cmd/prove.rs +++ b/tooling/cli/src/cmd/prove.rs @@ -1,8 +1,14 @@ +#[cfg(test)] +use provekit_verifier::NoirProofSchemeVerifier; use { super::Command, anyhow::{Context, Result}, argh::FromArgs, - noir_r1cs::{self, read, write, NoirProofScheme}, + provekit_common::{ + file::{read, write}, + NoirProofScheme, + }, + provekit_prover::NoirProofSchemeProver, std::path::PathBuf, tracing::{info, instrument}, }; diff --git a/noir-r1cs/src/cli/cmd/verify.rs b/tooling/cli/src/cmd/verify.rs similarity index 91% rename from noir-r1cs/src/cli/cmd/verify.rs rename to tooling/cli/src/cmd/verify.rs index 28c2de8d7..ad611faee 100644 --- a/noir-r1cs/src/cli/cmd/verify.rs +++ b/tooling/cli/src/cmd/verify.rs @@ -2,7 +2,8 @@ use { super::Command, anyhow::{Context, Result}, argh::FromArgs, - noir_r1cs::{self, read, NoirProofScheme}, + provekit_common::{file::read, NoirProofScheme}, + provekit_verifier::NoirProofSchemeVerifier, std::path::PathBuf, tracing::{info, instrument}, }; diff --git a/noir-r1cs/src/cli/main.rs b/tooling/cli/src/main.rs similarity index 100% rename from noir-r1cs/src/cli/main.rs rename to tooling/cli/src/main.rs diff --git a/noir-r1cs/src/cli/measuring_alloc.rs b/tooling/cli/src/measuring_alloc.rs similarity index 100% rename from noir-r1cs/src/cli/measuring_alloc.rs rename to tooling/cli/src/measuring_alloc.rs diff --git a/noir-r1cs/src/cli/span_stats.rs b/tooling/cli/src/span_stats.rs similarity index 99% rename from noir-r1cs/src/cli/span_stats.rs rename to tooling/cli/src/span_stats.rs index edc50b961..47f279e13 100644 --- a/noir-r1cs/src/cli/span_stats.rs +++ b/tooling/cli/src/span_stats.rs @@ -3,7 +3,7 @@ //! NOTE: This module is only included in the bin, not in the lib. use { crate::ALLOC, - noir_r1cs::human, + provekit_common::utils::human, std::{ cmp::max, fmt::{self, Write as _}, diff --git a/tooling/provekit-bench/Cargo.toml b/tooling/provekit-bench/Cargo.toml new file mode 100644 index 000000000..b90f5c9ae --- /dev/null +++ b/tooling/provekit-bench/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "provekit-bench" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +# Workspace crates +provekit-common.workspace = true +provekit-prover.workspace = true +provekit-r1cs-compiler.workspace = true +provekit-verifier.workspace = true + +# Noir language +nargo.workspace = true +nargo_cli.workspace = true +nargo_toml.workspace = true +noirc_driver.workspace = true +noirc_artifacts.workspace = true + +# 3rd party +anyhow.workspace = true +divan.workspace = true +serde.workspace = true +test-case.workspace = true +toml.workspace = true + +[lints] +workspace = true + +[[bench]] +name = "bench" +harness = false diff --git a/noir-r1cs/benches/bench.rs b/tooling/provekit-bench/benches/bench.rs similarity index 87% rename from noir-r1cs/benches/bench.rs rename to tooling/provekit-bench/benches/bench.rs index 1c339bb78..4903bc0ca 100644 --- a/noir-r1cs/benches/bench.rs +++ b/tooling/provekit-bench/benches/bench.rs @@ -3,7 +3,9 @@ use { anyhow::Context, core::hint::black_box, divan::Bencher, - noir_r1cs::{read, NoirProof, NoirProofScheme}, + provekit_common::{file::read, NoirProof, NoirProofScheme}, + provekit_prover::NoirProofSchemeProver, + provekit_verifier::NoirProofSchemeVerifier, std::path::Path, }; @@ -20,7 +22,7 @@ fn prove_poseidon_1000(bencher: Bencher) { .with_context(|| format!("Reading {}", path.display())) .expect("Reading proof scheme"); - let crate_dir: &Path = "../noir-examples/poseidon-rounds".as_ref(); + let crate_dir: &Path = "../../noir-examples/poseidon-rounds".as_ref(); let witness_path = crate_dir.join("Prover.toml"); @@ -35,7 +37,7 @@ fn prove_poseidon_1000(bencher: Bencher) { fn prove_poseidon_1000_with_io(bencher: Bencher) { let path: &Path = "benches/poseidon-1000.nps".as_ref(); - let crate_dir: &Path = "../noir-examples/poseidon-rounds".as_ref(); + let crate_dir: &Path = "../../noir-examples/poseidon-rounds".as_ref(); let witness_path = crate_dir.join("Prover.toml"); bencher.bench(|| { diff --git a/noir-r1cs/benches/poseidon-1000.np b/tooling/provekit-bench/benches/poseidon-1000.np similarity index 100% rename from noir-r1cs/benches/poseidon-1000.np rename to tooling/provekit-bench/benches/poseidon-1000.np diff --git a/noir-r1cs/benches/poseidon-1000.nps b/tooling/provekit-bench/benches/poseidon-1000.nps similarity index 100% rename from noir-r1cs/benches/poseidon-1000.nps rename to tooling/provekit-bench/benches/poseidon-1000.nps diff --git a/noir-r1cs/benches/poseidon-1000.toml b/tooling/provekit-bench/benches/poseidon-1000.toml similarity index 100% rename from noir-r1cs/benches/poseidon-1000.toml rename to tooling/provekit-bench/benches/poseidon-1000.toml diff --git a/noir-r1cs/benches/poseidon_rounds.json b/tooling/provekit-bench/benches/poseidon_rounds.json similarity index 100% rename from noir-r1cs/benches/poseidon_rounds.json rename to tooling/provekit-bench/benches/poseidon_rounds.json diff --git a/tooling/provekit-bench/tests/compiler.rs b/tooling/provekit-bench/tests/compiler.rs new file mode 100644 index 000000000..106c7c944 --- /dev/null +++ b/tooling/provekit-bench/tests/compiler.rs @@ -0,0 +1,87 @@ +use { + anyhow::Result, + nargo::workspace::Workspace, + nargo_cli::cli::compile_cmd::compile_workspace_full, + nargo_toml::{resolve_workspace_from_toml, PackageSelection}, + noirc_driver::CompileOptions, + provekit_common::NoirProofScheme, + provekit_prover::NoirProofSchemeProver, + provekit_r1cs_compiler::NoirProofSchemeBuilder, + provekit_verifier::NoirProofSchemeVerifier, + serde::Deserialize, + std::path::Path, + test_case::test_case, +}; + +#[derive(Debug, Deserialize)] +struct NargoToml { + package: NargoTomlPackage, +} + +#[derive(Debug, Deserialize)] +struct NargoTomlPackage { + name: String, +} + +fn test_compiler(test_case_path: impl AsRef) { + let test_case_path = test_case_path.as_ref(); + + compile_workspace(test_case_path).expect("Compiling workspace"); + + let nargo_toml_path = test_case_path.join("Nargo.toml"); + + let nargo_toml = std::fs::read_to_string(&nargo_toml_path).expect("Reading Nargo.toml"); + let nargo_toml: NargoToml = toml::from_str(&nargo_toml).expect("Deserializing Nargo.toml"); + + let package_name = nargo_toml.package.name; + + let circuit_path = test_case_path.join(format!("target/{package_name}.json")); + let witness_file_path = test_case_path.join("Prover.toml"); + + let proof_schema = NoirProofScheme::from_file(&circuit_path).expect("Reading proof scheme"); + let input_map = proof_schema + .read_witness(&witness_file_path) + .expect("Reading witness data"); + + let proof = proof_schema + .prove(&input_map) + .expect("While proving Noir program statement"); + + proof_schema.verify(&proof).expect("Verifying proof"); +} + +pub fn compile_workspace(workspace_path: impl AsRef) -> Result { + let workspace_path = workspace_path.as_ref(); + let workspace_path = if workspace_path.ends_with("Nargo.toml") { + workspace_path.to_owned() + } else { + workspace_path.join("Nargo.toml") + }; + + // `resolve_workspace_from_toml` calls .normalize() under the hood which messes + // up path resolution + let workspace_path = workspace_path.canonicalize()?; + + let workspace = + resolve_workspace_from_toml(&workspace_path, PackageSelection::DefaultOrAll, None)?; + let compile_options = CompileOptions::default(); + + compile_workspace_full(&workspace, &compile_options, None)?; + + Ok(workspace) +} + +#[test_case("../../noir-examples/noir-r1cs-test-programs/acir_assert_zero")] +#[test_case("../../noir-examples/noir-r1cs-test-programs/simplest-read-only-memory")] +#[test_case("../../noir-examples/noir-r1cs-test-programs/read-only-memory")] +#[test_case("../../noir-examples/noir-r1cs-test-programs/range-check-u8")] +#[test_case("../../noir-examples/noir-r1cs-test-programs/range-check-u16")] +#[test_case("../../noir-examples/noir-r1cs-test-programs/range-check-mixed-bases")] +#[test_case("../../noir-examples/noir-r1cs-test-programs/read-write-memory")] +#[test_case("../../noir-examples/noir-r1cs-test-programs/conditional-write")] +#[test_case("../../noir-examples/noir-r1cs-test-programs/bin-opcode")] +#[test_case("../../noir-examples/noir-r1cs-test-programs/small-sha")] +#[test_case("../../noir-examples/noir-passport-examples/complete_age_check"; "complete_age_check")] +fn case(path: &str) { + test_compiler(path); +} diff --git a/tooling/provekit-gnark/.gitignore b/tooling/provekit-gnark/.gitignore new file mode 100644 index 000000000..ea8c4bf7f --- /dev/null +++ b/tooling/provekit-gnark/.gitignore @@ -0,0 +1 @@ +/target diff --git a/noir-tools/Cargo.toml b/tooling/provekit-gnark/Cargo.toml similarity index 50% rename from noir-tools/Cargo.toml rename to tooling/provekit-gnark/Cargo.toml index 4c21edcfb..7558f054c 100644 --- a/noir-tools/Cargo.toml +++ b/tooling/provekit-gnark/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "noir-tools" +name = "provekit-gnark" version = "0.1.0" edition.workspace = true rust-version.workspace = true @@ -9,12 +9,16 @@ homepage.workspace = true repository.workspace = true [dependencies] -anyhow.workspace = true +# Workspace crates +provekit-common.workspace = true -nargo.workspace = true -nargo_toml.workspace = true -noirc_driver.workspace = true -nargo_cli.workspace = true +# Cryptography and proof systems +ark-poly.workspace = true + +# 3rd party +serde.workspace = true +serde_json.workspace = true +tracing.workspace = true [lints] workspace = true diff --git a/noir-r1cs/src/gnark_config.rs b/tooling/provekit-gnark/src/gnark_config.rs similarity index 98% rename from noir-r1cs/src/gnark_config.rs rename to tooling/provekit-gnark/src/gnark_config.rs index 3793693a5..968ca2838 100644 --- a/noir-r1cs/src/gnark_config.rs +++ b/tooling/provekit-gnark/src/gnark_config.rs @@ -1,8 +1,8 @@ use { - crate::whir_r1cs::{IOPattern, WhirConfig}, ark_poly::EvaluationDomain, + provekit_common::{IOPattern, WhirConfig}, serde::{Deserialize, Serialize}, - std::{fs::File, io::Write as _}, + std::{fs::File, io::Write}, tracing::instrument, }; diff --git a/tooling/provekit-gnark/src/lib.rs b/tooling/provekit-gnark/src/lib.rs new file mode 100644 index 000000000..a230bcb34 --- /dev/null +++ b/tooling/provekit-gnark/src/lib.rs @@ -0,0 +1,6 @@ +mod gnark_config; + +pub use gnark_config::write_gnark_parameters_to_file; + +#[cfg(test)] +mod tests {}