From b8bff468024afd074177e47cc343c06b0c2562b3 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Thu, 22 Dec 2022 12:25:34 +0200 Subject: [PATCH 01/64] first version of the sqrt PST without the MIPP --- .cargo/config | 5 +- .github/workflows/testudo.yml | 2 +- README.md | 422 +--------------------------------- src/constraints.rs | 8 +- src/dense_mlpoly.rs | 22 +- src/lib.rs | 1 + src/poseidon_transcript.rs | 2 + src/r1csproof.rs | 39 +++- src/sqrt_pst.rs | 273 ++++++++++++++++++++++ 9 files changed, 325 insertions(+), 449 deletions(-) create mode 100644 src/sqrt_pst.rs diff --git a/.cargo/config b/.cargo/config index 3a420e91..8b137891 100644 --- a/.cargo/config +++ b/.cargo/config @@ -1,4 +1 @@ -[build] -rustflags = [ - "-C", "target-cpu=native", -] \ No newline at end of file + diff --git a/.github/workflows/testudo.yml b/.github/workflows/testudo.yml index 781bd986..bce620a4 100644 --- a/.github/workflows/testudo.yml +++ b/.github/workflows/testudo.yml @@ -26,7 +26,7 @@ jobs: - name: Build run: cargo build --verbose - name: Run tests - run: cargo test --verbose + run: cargo test --release --all-features --verbose - name: Build examples run: cargo build --examples --verbose - name: Check Rustfmt Code Style diff --git a/README.md b/README.md index 0eed4a7e..b194ce2b 100644 --- a/README.md +++ b/README.md @@ -1,421 +1,3 @@ -# Spartan: High-speed zkSNARKs without trusted setup +# Testudo: Spartan + Groth16 -![Rust](https://github.com/microsoft/Spartan/workflows/Rust/badge.svg) -[![](https://img.shields.io/crates/v/spartan.svg)](<(https://crates.io/crates/spartan)>) - -Spartan is a high-speed zero-knowledge proof system, a cryptographic primitive that enables a prover to prove a mathematical statement to a verifier without revealing anything besides the validity of the statement. This repository provides `libspartan,` a Rust library that implements a zero-knowledge succinct non-interactive argument of knowledge (zkSNARK), which is a type of zero-knowledge proof system with short proofs and fast verification times. The details of the Spartan proof system are described in our [paper](https://eprint.iacr.org/2019/550) published at [CRYPTO 2020](https://crypto.iacr.org/2020/). The security of the Spartan variant implemented in this library is based on the discrete logarithm problem in the random oracle model. - -A simple example application is proving the knowledge of a secret s such that H(s) == d for a public d, where H is a cryptographic hash function (e.g., SHA-256, Keccak). A more complex application is a database-backed cloud service that produces proofs of correct state machine transitions for auditability. See this [paper](https://eprint.iacr.org/2020/758.pdf) for an overview and this [paper](https://eprint.iacr.org/2018/907.pdf) for details. - -Note that this library has _not_ received a security review or audit. - -## Highlights - -We now highlight Spartan's distinctive features. - -- **No "toxic" waste:** Spartan is a _transparent_ zkSNARK and does not require a trusted setup. So, it does not involve any trapdoors that must be kept secret or require a multi-party ceremony to produce public parameters. - -- **General-purpose:** Spartan produces proofs for arbitrary NP statements. `libspartan` supports NP statements expressed as rank-1 constraint satisfiability (R1CS) instances, a popular language for which there exists efficient transformations and compiler toolchains from high-level programs of interest. - -- **Sub-linear verification costs:** Spartan is the first transparent proof system with sub-linear verification costs for arbitrary NP statements (e.g., R1CS). - -- **Standardized security:** Spartan's security relies on the hardness of computing discrete logarithms (a standard cryptographic assumption) in the random oracle model. `libspartan` uses `ristretto255`, a prime-order group abstraction atop `curve25519` (a high-speed elliptic curve). We use [`curve25519-dalek`](https://docs.rs/curve25519-dalek) for arithmetic over `ristretto255`. - -- **State-of-the-art performance:** - Among transparent SNARKs, Spartan offers the fastest prover with speedups of 36–152× depending on the baseline, produces proofs that are shorter by 1.2–416×, and incurs the lowest verification times with speedups of 3.6–1326×. The only exception is proof sizes under Bulletproofs, but Bulletproofs incurs slower verification both asymptotically and concretely. When compared to the state-of-the-art zkSNARK with trusted setup, Spartan’s prover is 2× faster for arbitrary R1CS instances and 16× faster for data-parallel workloads. - -### Implementation details - -`libspartan` uses [`merlin`](https://docs.rs/merlin/) to automate the Fiat-Shamir transform. We also introduce a new type called `RandomTape` that extends a `Transcript` in `merlin` to allow the prover's internal methods to produce private randomness using its private transcript without having to create `OsRng` objects throughout the code. An object of type `RandomTape` is initialized with a new random seed from `OsRng` for each proof produced by the library. - -## Examples - -To import `libspartan` into your Rust project, add the following dependency to `Cargo.toml`: - -```text -spartan = "0.7.1" -``` - -The following example shows how to use `libspartan` to create and verify a SNARK proof. -Some of our public APIs' style is inspired by the underlying crates we use. - -```rust -# extern crate libspartan; -# extern crate merlin; -# use libspartan::{Instance, SNARKGens, SNARK}; -# use libspartan::poseidon_transcript::PoseidonTranscript; -# use libspartan::parameters::poseidon_params; -# fn main() { - // specify the size of an R1CS instance - let num_vars = 1024; - let num_cons = 1024; - let num_inputs = 10; - let num_non_zero_entries = 1024; - - // produce public parameters - let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_non_zero_entries); - - // ask the library to produce a synthentic R1CS instance - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - - // create a commitment to the R1CS instance - let (comm, decomm) = SNARK::encode(&inst, &gens); - - let params = poseidon_params(); - - // produce a proof of satisfiability - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = SNARK::prove(&inst, &comm, &decomm, vars, &inputs, &gens, &mut prover_transcript); - - // verify the proof of satisfiability - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&comm, &inputs, &mut verifier_transcript, &gens) - .is_ok()); - println!("proof verification successful!"); -# } -``` - -Here is another example to use the NIZK variant of the Spartan proof system: - -```rust -# extern crate libspartan; -# extern crate merlin; -# use libspartan::{Instance, NIZKGens, NIZK}; -# use libspartan::poseidon_transcript::PoseidonTranscript; -# use libspartan::parameters::poseidon_params; -# fn main() { - // specify the size of an R1CS instance - let num_vars = 1024; - let num_cons = 1024; - let num_inputs = 10; - - // produce public parameters - let gens = NIZKGens::new(num_cons, num_vars, num_inputs); - - // ask the library to produce a synthentic R1CS instance - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - - let params = poseidon_params(); - - // produce a proof of satisfiability - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); - - // verify the proof of satisfiability - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&inst, &inputs, &mut verifier_transcript, &gens) - .is_ok()); - println!("proof verification successful!"); -# } -``` - -Finally, we provide an example that specifies a custom R1CS instance instead of using a synthetic instance - -```rust -#![allow(non_snake_case)] -# extern crate ark_std; -# extern crate libspartan; -# extern crate merlin; -# mod scalar; -# use scalar::Scalar; -# use libspartan::parameters::poseidon_params; -# use libspartan::{InputsAssignment, Instance, SNARKGens, VarsAssignment, SNARK}; -# use libspartan::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; -# -# use ark_ff::{PrimeField, Field, BigInteger}; -# use ark_std::{One, Zero, UniformRand}; -# fn main() { - // produce a tiny instance - let ( - num_cons, - num_vars, - num_inputs, - num_non_zero_entries, - inst, - assignment_vars, - assignment_inputs, - ) = produce_tiny_r1cs(); - - // produce public parameters - let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_non_zero_entries); - - // create a commitment to the R1CS instance - let (comm, decomm) = SNARK::encode(&inst, &gens); - let params = poseidon_params(); - - // produce a proof of satisfiability - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = SNARK::prove( - &inst, - &comm, - &decomm, - assignment_vars, - &assignment_inputs, - &gens, - &mut prover_transcript, - ); - - // verify the proof of satisfiability - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens) - .is_ok()); - println!("proof verification successful!"); -# } - -# fn produce_tiny_r1cs() -> ( -# usize, -# usize, -# usize, -# usize, -# Instance, -# VarsAssignment, -# InputsAssignment, -# ) { - // We will use the following example, but one could construct any R1CS instance. - // Our R1CS instance is three constraints over five variables and two public inputs - // (Z0 + Z1) * I0 - Z2 = 0 - // (Z0 + I1) * Z2 - Z3 = 0 - // Z4 * 1 - 0 = 0 - - // parameters of the R1CS instance rounded to the nearest power of two - let num_cons = 4; - let num_vars = 5; - let num_inputs = 2; - let num_non_zero_entries = 5; - - // We will encode the above constraints into three matrices, where - // the coefficients in the matrix are in the little-endian byte order - let mut A: Vec<(usize, usize, Vec)> = Vec::new(); - let mut B: Vec<(usize, usize, Vec)> = Vec::new(); - let mut C: Vec<(usize, usize, Vec)> = Vec::new(); - - // The constraint system is defined over a finite field, which in our case is - // the scalar field of ristreeto255/curve25519 i.e., p = 2^{252}+27742317777372353535851937790883648493 - // To construct these matrices, we will use `curve25519-dalek` but one can use any other method. - - // a variable that holds a byte representation of 1 - let one = Scalar::one().into_repr().to_bytes_le(); - - // R1CS is a set of three sparse matrices A B C, where is a row for every - // constraint and a column for every entry in z = (vars, 1, inputs) - // An R1CS instance is satisfiable iff: - // Az \circ Bz = Cz, where z = (vars, 1, inputs) - - // constraint 0 entries in (A,B,C) - // constraint 0 is (Z0 + Z1) * I0 - Z2 = 0. - // We set 1 in matrix A for columns that correspond to Z0 and Z1 - // We set 1 in matrix B for column that corresponds to I0 - // We set 1 in matrix C for column that corresponds to Z2 - A.push((0, 0, one.clone())); - A.push((0, 1, one.clone())); - B.push((0, num_vars + 1, one.clone())); - C.push((0, 2, one.clone())); - - // constraint 1 entries in (A,B,C) - A.push((1, 0, one.clone())); - A.push((1, num_vars + 2, one.clone())); - B.push((1, 2, one.clone())); - C.push((1, 3, one.clone())); - - // constraint 3 entries in (A,B,C) - A.push((2, 4, one.clone())); - B.push((2, num_vars, one.clone())); - - let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C).unwrap(); - - // compute a satisfying assignment -let mut rng = ark_std::rand::thread_rng(); - let i0 = Scalar::rand(&mut rng); - let i1 = Scalar::rand(&mut rng); - let z0 = Scalar::rand(&mut rng); - let z1 = Scalar::rand(&mut rng); - let z2 = (z0 + z1) * i0; // constraint 0 - let z3 = (z0 + i1) * z2; // constraint 1 - let z4 = Scalar::zero(); //constraint 2 - - // create a VarsAssignment - let mut vars = vec![Scalar::zero().into_repr().to_bytes_le(); num_vars]; - vars[0] = z0.into_repr().to_bytes_le(); - vars[1] = z1.into_repr().to_bytes_le(); - vars[2] = z2.into_repr().to_bytes_le(); - vars[3] = z3.into_repr().to_bytes_le(); - vars[4] = z4.into_repr().to_bytes_le(); - let assignment_vars = VarsAssignment::new(&vars).unwrap(); - - // create an InputsAssignment - let mut inputs = vec![Scalar::zero().into_repr().to_bytes_le(); num_inputs]; - inputs[0] = i0.into_repr().to_bytes_le(); - inputs[1] = i1.into_repr().to_bytes_le(); - let assignment_inputs = InputsAssignment::new(&inputs).unwrap(); - - // check if the instance we created is satisfiable - let res = inst.is_sat(&assignment_vars, &assignment_inputs); - assert_eq!(res.unwrap(), true); - - ( - num_cons, - num_vars, - num_inputs, - num_non_zero_entries, - inst, - assignment_vars, - assignment_inputs, - ) -# } -``` - -For more examples, see [`examples/`](examples) directory in this repo. - -## Building `libspartan` - -Install [`rustup`](https://rustup.rs/) - -Switch to nightly Rust using `rustup`: - -```text -rustup default nightly -``` - -Clone the repository: - -```text -git clone https://github.com/Microsoft/Spartan -cd Spartan -``` - -To build docs for public APIs of `libspartan`: - -```text -cargo doc -``` - -To run tests: - -```text -RUSTFLAGS="-C target_cpu=native" cargo test -``` - -To build `libspartan`: - -```text -RUSTFLAGS="-C target_cpu=native" cargo build --release -``` - -> NOTE: We enable SIMD instructions in `curve25519-dalek` by default, so if it fails to build remove the "simd_backend" feature argument in `Cargo.toml`. - -### Supported features - -- `profile`: enables fine-grained profiling information (see below for its use) - -## Performance - -### End-to-end benchmarks - -`libspartan` includes two benches: `benches/nizk.rs` and `benches/snark.rs`. If you report the performance of Spartan in a research paper, we recommend using these benches for higher accuracy instead of fine-grained profiling (listed below). - -To run end-to-end benchmarks: - -```text -RUSTFLAGS="-C target_cpu=native" cargo bench -``` - -### Fine-grained profiling - -Build `libspartan` with `profile` feature enabled. It creates two profilers: `./target/release/snark` and `./target/release/nizk`. - -These profilers report performance as depicted below (for varying R1CS instance sizes). The reported -performance is from running the profilers on a Microsoft Surface Laptop 3 on a single CPU core of Intel Core i7-1065G7 running Ubuntu 20.04 (atop WSL2 on Windows 10). -See Section 9 in our [paper](https://eprint.iacr.org/2019/550) to see how this compares with other zkSNARKs in the literature. - -```text -$ ./target/release/snark -Profiler:: SNARK - * number_of_constraints 1048576 - * number_of_variables 1048576 - * number_of_inputs 10 - * number_non-zero_entries_A 1048576 - * number_non-zero_entries_B 1048576 - * number_non-zero_entries_C 1048576 - * SNARK::encode - * SNARK::encode 14.2644201s - * SNARK::prove - * R1CSProof::prove - * polycommit - * polycommit 2.7175848s - * prove_sc_phase_one - * prove_sc_phase_one 683.7481ms - * prove_sc_phase_two - * prove_sc_phase_two 846.1056ms - * polyeval - * polyeval 193.4216ms - * R1CSProof::prove 4.4416193s - * len_r1cs_sat_proof 47024 - * eval_sparse_polys - * eval_sparse_polys 377.357ms - * R1CSEvalProof::prove - * commit_nondet_witness - * commit_nondet_witness 14.4507331s - * build_layered_network - * build_layered_network 3.4360521s - * evalproof_layered_network - * len_product_layer_proof 64712 - * evalproof_layered_network 15.5708066s - * R1CSEvalProof::prove 34.2930559s - * len_r1cs_eval_proof 133720 - * SNARK::prove 39.1297568s - * SNARK::proof_compressed_len 141768 - * SNARK::verify - * verify_sat_proof - * verify_sat_proof 20.0828ms - * verify_eval_proof - * verify_polyeval_proof - * verify_prod_proof - * verify_prod_proof 1.1847ms - * verify_hash_proof - * verify_hash_proof 81.06ms - * verify_polyeval_proof 82.3583ms - * verify_eval_proof 82.8937ms - * SNARK::verify 103.0536ms -``` - -```text -$ ./target/release/nizk -Profiler:: NIZK - * number_of_constraints 1048576 - * number_of_variables 1048576 - * number_of_inputs 10 - * number_non-zero_entries_A 1048576 - * number_non-zero_entries_B 1048576 - * number_non-zero_entries_C 1048576 - * NIZK::prove - * R1CSProof::prove - * polycommit - * polycommit 2.7220635s - * prove_sc_phase_one - * prove_sc_phase_one 722.5487ms - * prove_sc_phase_two - * prove_sc_phase_two 862.6796ms - * polyeval - * polyeval 190.2233ms - * R1CSProof::prove 4.4982305s - * len_r1cs_sat_proof 47024 - * NIZK::prove 4.5139888s - * NIZK::proof_compressed_len 48134 - * NIZK::verify - * eval_sparse_polys - * eval_sparse_polys 395.0847ms - * verify_sat_proof - * verify_sat_proof 19.286ms - * NIZK::verify 414.5102ms -``` - -## LICENSE - -See [LICENSE](./LICENSE) - -## Contributing - -See [CONTRIBUTING](./CONTRIBUTING.md) +TODO documentation diff --git a/src/constraints.rs b/src/constraints.rs index edd54533..8ffb6fb3 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -7,7 +7,9 @@ use crate::{ sparse_mlpoly::{SparsePolyEntry, SparsePolynomial}, unipoly::UniPoly, }; + use ark_bls12_377::{constraints::PairingVar as IV, Bls12_377 as I, Fr}; + use ark_crypto_primitives::{ snark::BooleanInputVar, CircuitSpecificSetupSNARK, SNARKGadget, SNARK, }; @@ -18,10 +20,7 @@ use ark_groth16::{ Groth16, PreparedVerifyingKey, Proof as GrothProof, }; -use ark_poly_commit::multilinear_pc::{ - data_structures::{Commitment, Proof, VerifierKey}, - MultilinearPC, -}; +use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof, VerifierKey}; use ark_r1cs_std::{ alloc::{AllocVar, AllocationMode}, fields::fp::FpVar, @@ -409,6 +408,7 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { #[derive(Clone)] pub struct VerifierConfig { + pub comm: Commitment, pub num_vars: usize, pub num_cons: usize, pub input: Vec, diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index ae92f800..47517ebf 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -1,6 +1,7 @@ #![allow(clippy::too_many_arguments)] use crate::group::Fr; use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; +use crate::timer::Timer; use super::commitments::{Commitments, MultiCommitGens}; use super::errors::ProofVerifyError; @@ -13,18 +14,19 @@ use super::nizk::{DotProductProofGens, DotProductProofLog}; use super::random::RandomTape; use super::scalar::Scalar; use super::transcript::{AppendToTranscript, ProofTranscript}; -use ark_bls12_377::Bls12_377 as I; -use ark_ff::{One, UniformRand, Zero}; +use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_ec::msm::VariableBaseMSM; +use ark_ec::{PairingEngine, ProjectiveCurve}; +use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; use ark_poly_commit::multilinear_pc::data_structures::{ - CommitterKey, UniversalParams, VerifierKey, + Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, }; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::*; use core::ops::Index; use merlin::Transcript; use std::ops::{Add, AddAssign, Neg, Sub, SubAssign}; -use std::process::abort; #[cfg(feature = "multicore")] use rayon::prelude::*; @@ -32,9 +34,9 @@ use rayon::prelude::*; // TODO: integrate the DenseMultilinearExtension(and Sparse) https://github.com/arkworks-rs/algebra/tree/master/poly/src/evaluations/multivariate/multilinear from arkworks into Spartan. This requires moving the specific Spartan functionalities in separate traits. #[derive(Debug, Clone, Eq, PartialEq, Hash, CanonicalDeserialize, CanonicalSerialize)] pub struct DensePolynomial { - num_vars: usize, // the number of variables in the multilinear polynomial - len: usize, - Z: Vec, // evaluations of the polynomial in all the 2^num_vars Boolean inputs + pub num_vars: usize, // the number of variables in the multilinear polynomial + pub len: usize, + pub Z: Vec, // evaluations of the polynomial in all the 2^num_vars Boolean inputs } impl MultilinearExtension for DensePolynomial { @@ -201,8 +203,8 @@ impl PolyCommitmentGens { // Generates the SRS and trims it based on the number of variables in the // multilinear polynomial. let mut rng = ark_std::test_rng(); - let pst_gens = MultilinearPC::::setup(num_vars, &mut rng); - let (ck, vk) = MultilinearPC::::trim(&pst_gens, num_vars); + let pst_gens = MultilinearPC::::setup(num_vars / 2, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&pst_gens, num_vars / 2); PolyCommitmentGens { gens, ck, vk } } @@ -591,6 +593,8 @@ impl PolyEvalProof { #[cfg(test)] mod tests { + use std::num; + use crate::parameters::poseidon_params; use super::*; diff --git a/src/lib.rs b/src/lib.rs index af4d4ff8..2dd548a8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -31,6 +31,7 @@ mod r1csproof; mod random; mod scalar; mod sparse_mlpoly; +mod sqrt_pst; mod sumcheck; mod timer; mod transcript; diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 577e2ded..f4440226 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -36,6 +36,8 @@ impl PoseidonTranscript { self.sponge.absorb(&x); } + // pub fn append_usize + pub fn append_bytes(&mut self, x: &Vec) { self.sponge.absorb(x); } diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 5d9b139e..a2b3ede8 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -4,9 +4,11 @@ use crate::group::{Fq, Fr}; use crate::math::Math; use crate::parameters::poseidon_params; use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; +use crate::sqrt_pst::PolyList; use crate::sumcheck::SumcheckInstanceProof; use ark_bls12_377::Bls12_377 as I; use ark_bw6_761::BW6_761 as P; +use ark_ec::PairingEngine; use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; use ark_poly_commit::multilinear_pc::MultilinearPC; @@ -42,6 +44,7 @@ pub struct R1CSProof { ry: Vec, // The transcript state after the satisfiability proof was computed. pub transcript_sat_state: Scalar, + pub t: ::Fqk, } #[derive(Clone)] pub struct R1CSSumcheckGens { @@ -142,12 +145,19 @@ impl R1CSProof { assert!(input.len() < vars.len()); // create the multilinear witness polynomial from the satisfying assiment - let poly_vars = DensePolynomial::new(vars.clone()); + // expressed as the list of sqrt-sized polynomials + let pl = PolyList::new(&vars.clone()); let timer_commit = Timer::new("polycommit"); - // commitment to the satisfying witness polynomial - let comm = MultilinearPC::::commit(&gens.gens_pc.ck, &poly_vars); - comm.append_to_poseidon(transcript); + + // commitment list to the satisfying witness polynomial list + let (comm_list, t) = PolyList::commit(&pl, &gens.gens_pc.ck); + + let mut bytes = Vec::new(); + t.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); + + // comm.append_to_poseidon(transcript); timer_commit.stop(); let c = transcript.challenge_scalar(); @@ -234,15 +244,18 @@ impl R1CSProof { let timmer_opening = Timer::new("polyopening"); let mut dummy = ry[1..].to_vec().clone(); dummy.reverse(); - let proof_eval_vars_at_ry = MultilinearPC::::open(&gens.gens_pc.ck, &poly_vars, &dummy); + let q = pl.get_q(&dummy); + + let (comm, proof_eval_vars_at_ry) = PolyList::open_q(comm_list, &gens.gens_pc.ck, &q, &dummy); println!( "proof size (no of quotients): {:?}", proof_eval_vars_at_ry.proofs.len() ); + // comm.append_to_poseidon(transcript); timmer_opening.stop(); let timer_polyeval = Timer::new("polyeval"); - let eval_vars_at_ry = poly_vars.evaluate(&ry[1..]); + let eval_vars_at_ry = PolyList::eval_q(q.clone(), &dummy); timer_polyeval.stop(); timer_prove.stop(); @@ -260,6 +273,7 @@ impl R1CSProof { rx: rx.clone(), ry: ry.clone(), transcript_sat_state: c, + t: t, }, rx, ry, @@ -275,7 +289,10 @@ impl R1CSProof { transcript: &mut PoseidonTranscript, gens: &R1CSGens, ) -> Result<(u128, u128, u128), ProofVerifyError> { - self.comm.append_to_poseidon(transcript); + // serialise and add the IPP commitment to the transcript + let mut bytes = Vec::new(); + self.t.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); let c = transcript.challenge_scalar(); @@ -303,7 +320,7 @@ impl R1CSProof { polys_sc2: self.sc_proof_phase2.polys.clone(), eval_vars_at_ry: self.eval_vars_at_ry, input_as_sparse_poly, - // rx: self.rx.clone(), + comm: self.comm.clone(), ry: self.ry.clone(), transcript_sat_state: self.transcript_sat_state, }; @@ -339,7 +356,7 @@ impl R1CSProof { // Verifies the proof of opening against the result of evaluating the // witness polynomial at point ry. - let res = MultilinearPC::::check( + let res = PolyList::verify_q( &gens.gens_pc.vk, &self.comm, &dummy, @@ -365,7 +382,7 @@ impl R1CSProof { transcript: &mut PoseidonTranscript, gens: &R1CSGens, ) -> Result { - self.comm.append_to_poseidon(transcript); + // self.comm.append_to_poseidon(transcript); let c = transcript.challenge_scalar(); @@ -393,8 +410,8 @@ impl R1CSProof { polys_sc2: self.sc_proof_phase2.polys.clone(), eval_vars_at_ry: self.eval_vars_at_ry, input_as_sparse_poly, - // rx: self.rx.clone(), ry: self.ry.clone(), + comm: self.comm.clone(), transcript_sat_state: self.transcript_sat_state, }; diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs new file mode 100644 index 00000000..17be7997 --- /dev/null +++ b/src/sqrt_pst.rs @@ -0,0 +1,273 @@ +use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; +use ark_ff::{One, PrimeField}; +use ark_poly_commit::multilinear_pc::{ + data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, + MultilinearPC, +}; +use rayon::prelude::{ + IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, +}; + +use super::scalar::Scalar; +use crate::{dense_mlpoly::DensePolynomial, math::Math, timer::Timer}; + +pub struct PolyList { + m: usize, + polys: Vec, +} + +impl PolyList { + // Given the evaluations over the boolean hypercube of a polynomial p of size + // 2*m compute the sqrt-sized polynomials p_i as + // p_i(Y) = \sum_{j \in \{0,1\}^m} p(j, i) * chi_j(Y) + // where p(X,Y) = \sum_{i \in \{0,\1}^m} chi_i(X) * p_i(Y) + pub fn new(Z: &[Scalar]) -> Self { + let pl_timer = Timer::new("poly_list_build"); + let m = Z.len().log_2() / 2; + let pow_m = 2_usize.pow(m as u32); + let polys: Vec = (0..pow_m) + .into_par_iter() + .map(|i| { + let z: Vec = (0..pow_m) + .into_par_iter() + .map(|j| Z[(j << m) | i]) + .collect(); + DensePolynomial::new(z) + }) + .collect(); + assert!(polys.len() == pow_m); + pl_timer.stop(); + Self { m, polys } + } + + // Given point = (\vec{a}, \vec{b}), compute the polynomial q as + // q(Y) = + // \sum_{j \in \{0,1\}^m}(\sum_{i \in \{0,1\}^m} p(j,i) * chi_i(a)) * chi_j(Y) + // and p(a,b) = q(b) where p is the initial polynomial + + pub fn get_q(&self, point: &[Scalar]) -> DensePolynomial { + let q_timer = Timer::new("build_q"); + assert!(point.len() == 2 * self.m); + let a = &point[0..self.m]; + let pow_m = 2_usize.pow(self.m as u32); + + let chis: Vec = (0..pow_m) + .into_par_iter() + .map(|i| Self::get_chi_i(a, i)) + .collect(); + + let z_q: Vec = (0..pow_m) + .into_par_iter() + .map(|j| (0..pow_m).map(|i| self.polys[i].Z[j] * chis[i]).sum()) + .collect(); + q_timer.stop(); + + DensePolynomial::new(z_q) + } + + // Given point = (\vec{a}, \vec{b}) used to construct q + // compute q(b) = p(a,b). + pub fn eval_q(q: DensePolynomial, point: &[Scalar]) -> Scalar { + let b = &point[point.len() / 2..point.len()]; + let prods = (0..q.Z.len()) + .into_par_iter() + .map(|j| q.Z[j] * PolyList::get_chi_i(b, j)); + + prods.sum() + } + + pub fn commit( + poly_list: &PolyList, + ck: &CommitterKey, + ) -> (Vec>, ::Fqk) { + let timer_commit = Timer::new("sqrt_commit"); + + let timer_list = Timer::new("comm_list"); + + // commit to each of the sqrt sized p_i + let comm_list: Vec> = poly_list + .polys + .par_iter() + .map(|p| MultilinearPC::::commit(&ck.clone(), p)) + .collect(); + timer_list.stop(); + + let h_vec = ck.powers_of_h[0].clone(); + assert!(comm_list.len() == h_vec.len()); + + let ipp_timer = Timer::new("ipp"); + let pairings: Vec<_> = comm_list + .clone() + .into_par_iter() + .map(|c| ::G1Prepared::from(c.g_product)) + .zip( + h_vec + .into_par_iter() + .map(|h| ::G2Prepared::from(h)), + ) + .collect(); + + // computer the IPP commitment + let t = I::product_of_pairings(pairings.iter()); + ipp_timer.stop(); + + timer_commit.stop(); + + (comm_list, t) + } + + pub fn get_chi_i(b: &[Scalar], i: usize) -> Scalar { + let m = b.len(); + let mut prod = Scalar::one(); + for j in 0..m { + let b_j = b[j]; + if i >> j & 1 == 1 { + prod = prod * b_j; + } else { + prod = prod * (Scalar::one() - b_j) + }; + } + prod + } + + pub fn open_q( + comm_list: Vec>, + ck: &CommitterKey, + q: &DensePolynomial, + point: &[Scalar], + ) -> (Commitment, Proof) { + let m = point.len() / 2; + let a = &point[0..m]; + let b = &point[m..2 * m]; + + let timer_open = Timer::new("sqrt_open"); + + // Compute the PST commitment to q obtained as the inner products of the + // commitments to the polynomials p_i and chi_i(a) for i ranging over the + // boolean hypercube of size m. + let m = a.len(); + let pow_m = 2_usize.pow(m as u32); + let timer_msm = Timer::new("msm"); + let chis: Vec<_> = (0..pow_m) + .into_par_iter() + .map(|i| Self::get_chi_i(a, i).into_repr()) + .collect(); + assert!(chis.len() == comm_list.len()); + + let c_u = VariableBaseMSM::multi_scalar_mul( + comm_list + .par_iter() + .map(|c| c.g_product) + .collect::>() + .as_slice(), + chis.as_slice(), + ) + .into_affine(); + + let U: Commitment = Commitment { + nv: q.num_vars, + g_product: c_u, + }; + timer_msm.stop(); + + let comm = MultilinearPC::::commit(ck, q); + assert!(c_u == comm.g_product); + + // TODO: MIPP proof that U is the inner product of the opening + // vector A to T and the vector y + + // PST proof for opening q at b + let timer_proof = Timer::new("open"); + let pst_proof = MultilinearPC::::open(ck, q, &b); + timer_proof.stop(); + + timer_open.stop(); + + // TODO: add MIPP proof as return value + (U, pst_proof) + } + + pub fn verify_q( + vk: &VerifierKey, + U: &Commitment, + point: &[Scalar], + v: Scalar, + pst_proof: &Proof, + // TODO: add MIPP proof as argument + ) -> bool { + // TODO: MIPP verification + + let len = point.len(); + let b = &point[len / 2..len]; + + let timer_verify = Timer::new("sqrt_verify"); + let res = MultilinearPC::::check(vk, U, b, v, pst_proof); + timer_verify.stop(); + res + } +} + +#[cfg(test)] +mod tests { + use std::clone; + + use super::*; + use ark_ff::Zero; + use ark_std::UniformRand; + #[test] + fn check_sqrt_poly_eval() { + let mut rng = ark_std::test_rng(); + let num_vars = 8; + let len = 2_usize.pow(num_vars); + let Z: Vec = (0..len) + .into_iter() + .map(|_| Scalar::rand(&mut rng)) + .collect(); + let r: Vec = (0..num_vars) + .into_iter() + .map(|_| Scalar::rand(&mut rng)) + .collect(); + + let p = DensePolynomial::new(Z.clone()); + let res1 = p.evaluate(&r); + + let mut r_new = r.to_vec(); + r_new.reverse(); + let pl = PolyList::new(&Z.clone()); + let q = pl.get_q(&r_new); + let res2 = PolyList::eval_q(q.clone(), &r_new); + + assert!(res1 == res2); + } + + #[test] + fn check_new_poly_commit() { + let mut rng = ark_std::test_rng(); + let num_vars = 26; + let len = 2_usize.pow(num_vars); + let Z: Vec = (0..len) + .into_iter() + .map(|_| Scalar::rand(&mut rng)) + .collect(); + let r: Vec = (0..num_vars) + .into_iter() + .map(|_| Scalar::rand(&mut rng)) + .collect(); + + let gens = MultilinearPC::::setup(13, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&gens, 13); + + let pl = PolyList::new(&Z.clone()); + let q = pl.get_q(&r); + + let v = PolyList::eval_q(q.clone(), &r); + + let (comm_list, t) = PolyList::commit(&pl, &ck); + + let (u, pst_proof) = PolyList::open_q(comm_list, &ck, &q, &r); + + let res = PolyList::verify_q(&vk, &u, &r, v, &pst_proof); + assert!(res == true); + } +} From a29888db24f9a6a42180072cf425ab3f7b678faa Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Thu, 19 Jan 2023 14:42:00 +0000 Subject: [PATCH 02/64] snarkpack integration --- Cargo.toml | 5 +- src/constraints.rs | 2 - src/lib.rs | 8 +-- src/poseidon_transcript.rs | 36 ++++++++++- src/r1csproof.rs | 42 +++++++------ src/sqrt_pst.rs | 119 ++++++++++++++++++++++++++----------- 6 files changed, 149 insertions(+), 63 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index da7c8353..94c4aa0e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,6 +42,8 @@ ark-groth16 = { version = "^0.3.0", features = ["r1cs"] } ark-bw6-761 = { version = "^0.3.0" } ark-poly-commit = { version = "^0.3.0" } ark-poly = {version = "^0.3.0"} +snarkpack = { git = "https://github.com/maramihali/snarkpack", branch = "testudo"} + lazy_static = "1.4.0" rand = { version = "0.8", features = [ "std", "std_rng" ] } @@ -91,5 +93,6 @@ std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-seri [patch.crates-io] ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/", rev = "a2a5ac491ae005ba2afd03fd21b7d3160d794a83"} -ark-poly-commit = {git = "https://github.com/maramihali/poly-commit"} +ark-poly-commit = {git = "https://github.com/maramihali/poly-commit", branch="pst_g2"} + diff --git a/src/constraints.rs b/src/constraints.rs index 8ffb6fb3..71436c7e 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -393,7 +393,6 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { let expected_claim_post_phase2_var = eval_Z_at_ry_var * scalar_var; claim_post_phase2_var.enforce_equal(&expected_claim_post_phase2_var)?; - let expected_transcript_state_var = transcript_var.challenge()?; let claimed_transcript_state_var = FpVar::::new_input(cs, || Ok(self.claimed_transcript_sat_state))?; @@ -401,7 +400,6 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { // Ensure that the prover and verifier transcipt views are consistent at // the end of the satisfiability proof. expected_transcript_state_var.enforce_equal(&claimed_transcript_state_var)?; - Ok(()) } } diff --git a/src/lib.rs b/src/lib.rs index 2dd548a8..001db7cc 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -407,7 +407,7 @@ impl SNARK { // side all the previous updates are done on the transcript // circuit variable and the transcript outside the circuit will be // inconsistent wrt to the prover's. - transcript.new_from_state(&r1cs_sat_proof.transcript_sat_state); + // transcript.new_from_state(&r1cs_sat_proof.transcript_sat_state); // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check @@ -480,7 +480,7 @@ impl SNARK { // TODO: find a way to retrieve this state from the circuit. Currently // the API for generating constraints doesn't support returning values // computed inside the circuit. - transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); + // transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); let (Ar, Br, Cr) = &self.inst_evals; transcript.append_scalar(&Ar); @@ -598,10 +598,10 @@ impl NIZK { // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check - let timer_eval = Timer::new("eval_sparse_polys"); + // let timer_eval = Timer::new("eval_sparse_polys"); let (claimed_rx, claimed_ry) = &self.r; let inst_evals = inst.inst.evaluate(claimed_rx, claimed_ry); - timer_eval.stop(); + // timer_eval.stop(); let timer_sat_proof = Timer::new("verify_sat_proof"); assert_eq!(input.assignment.len(), inst.inst.get_num_inputs()); diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index f4440226..247b8c35 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -1,14 +1,16 @@ use crate::group::{CompressedGroup, Fr}; use super::scalar::Scalar; -use ark_bls12_377::Bls12_377 as I; +use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_ec::PairingEngine; +use ark_ff::{Field, PrimeField}; use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; -// use ark_r1cs_std::prelude::*; use ark_sponge::{ poseidon::{PoseidonParameters, PoseidonSponge}, CryptographicSponge, }; +use snarkpack::Transcript; #[derive(Clone)] /// TODO @@ -17,6 +19,22 @@ pub struct PoseidonTranscript { params: PoseidonParameters, } +impl Transcript for PoseidonTranscript { + fn domain_sep(&mut self) { + self.sponge.absorb(&b"testudo".to_vec()); + } + + fn append(&mut self, label: &'static [u8], point: &S) { + let mut buf = Vec::new(); + point.serialize(&mut buf).expect("serialization failed"); + self.sponge.absorb(&buf); + } + + fn challenge_scalar(&mut self, label: &'static [u8]) -> F { + self.sponge.squeeze_field_elements(1).remove(0) + } +} + impl PoseidonTranscript { /// create a new transcript pub fn new(params: &PoseidonParameters) -> Self { @@ -56,6 +74,12 @@ impl PoseidonTranscript { } } + pub fn append_gt(&mut self, g_t: &::Fqk) { + let mut bytes = Vec::new(); + g_t.serialize(&mut bytes).unwrap(); + self.append_bytes(&bytes); + } + pub fn challenge_scalar(&mut self) -> Scalar { self.sponge.squeeze_field_elements(1).remove(0) } @@ -82,3 +106,11 @@ impl AppendToPoseidon for Commitment { transcript.append_bytes(&bytes); } } + +impl AppendToPoseidon for G1Affine { + fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { + let mut bytes = Vec::new(); + self.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); + } +} diff --git a/src/r1csproof.rs b/src/r1csproof.rs index a2b3ede8..abcc75fc 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -12,6 +12,7 @@ use ark_ec::PairingEngine; use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; use ark_poly_commit::multilinear_pc::MultilinearPC; +use snarkpack::mipp::MippProof; use super::commitments::MultiCommitGens; use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; @@ -45,6 +46,7 @@ pub struct R1CSProof { // The transcript state after the satisfiability proof was computed. pub transcript_sat_state: Scalar, pub t: ::Fqk, + pub mipp_proof: MippProof, } #[derive(Clone)] pub struct R1CSSumcheckGens { @@ -237,31 +239,29 @@ impl R1CSProof { transcript, ); timer_sc_proof_phase2.stop(); + let c = transcript.challenge_scalar(); + transcript.new_from_state(&c); // TODO: modify the polynomial evaluation in Spartan to be consistent // with the evaluation in ark-poly-commit so that reversing is not needed // anymore let timmer_opening = Timer::new("polyopening"); - let mut dummy = ry[1..].to_vec().clone(); - dummy.reverse(); - let q = pl.get_q(&dummy); + let q = pl.get_q(&ry[1..]); + timer_prove.stop(); - let (comm, proof_eval_vars_at_ry) = PolyList::open_q(comm_list, &gens.gens_pc.ck, &q, &dummy); + let (comm, proof_eval_vars_at_ry, mipp_proof) = + PolyList::open_q(transcript, comm_list, &gens.gens_pc.ck, &q, &ry[1..], &t); println!( "proof size (no of quotients): {:?}", proof_eval_vars_at_ry.proofs.len() ); - // comm.append_to_poseidon(transcript); + timmer_opening.stop(); let timer_polyeval = Timer::new("polyeval"); - let eval_vars_at_ry = PolyList::eval_q(q.clone(), &dummy); + let eval_vars_at_ry = PolyList::eval_q(q.clone(), &ry[1..]); timer_polyeval.stop(); - timer_prove.stop(); - - let c = transcript.challenge_scalar(); - ( R1CSProof { comm, @@ -273,7 +273,8 @@ impl R1CSProof { rx: rx.clone(), ry: ry.clone(), transcript_sat_state: c, - t: t, + t, + mipp_proof, }, rx, ry, @@ -333,6 +334,7 @@ impl R1CSProof { let dp1 = start.elapsed().as_millis(); prove_inner.stop(); + // this is universal, we don't measure it let start = Instant::now(); let (pk, vk) = Groth16::

::setup(circuit.clone(), &mut rng).unwrap(); let ds = start.elapsed().as_millis(); @@ -344,24 +346,25 @@ impl R1CSProof { prove_outer.stop(); let start = Instant::now(); + let verifier_time = Timer::new("groth16_verification"); let is_verified = Groth16::

::verify(&vk, &[], &proof).unwrap(); assert!(is_verified); + verifier_time.stop(); let timer_verification = Timer::new("commitverification"); - let mut dummy = self.ry[1..].to_vec(); - // TODO: ensure ark-poly-commit and Spartan produce consistent results - // when evaluating a polynomial at a given point so this reverse is not - // needed. - dummy.reverse(); + transcript.new_from_state(&self.transcript_sat_state); // Verifies the proof of opening against the result of evaluating the // witness polynomial at point ry. let res = PolyList::verify_q( + transcript, &gens.gens_pc.vk, &self.comm, - &dummy, + &self.ry[1..], self.eval_vars_at_ry, &self.proof_eval_vars_at_ry, + &self.mipp_proof, + &self.t, ); timer_verification.stop(); @@ -382,7 +385,10 @@ impl R1CSProof { transcript: &mut PoseidonTranscript, gens: &R1CSGens, ) -> Result { - // self.comm.append_to_poseidon(transcript); + // serialise and add the IPP commitment to the transcript + let mut bytes = Vec::new(); + self.t.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); let c = transcript.challenge_scalar(); diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 17be7997..a138a851 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,16 +1,24 @@ -use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_bls12_377::{Bls12_377 as I, G1Affine, G2Affine}; use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; -use ark_ff::{One, PrimeField}; +use ark_ff::{BigInteger256, One, PrimeField}; use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, MultilinearPC, }; +use ark_serialize::CanonicalSerialize; use rayon::prelude::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, }; +use snarkpack::mipp::MippProof; use super::scalar::Scalar; -use crate::{dense_mlpoly::DensePolynomial, math::Math, timer::Timer}; +use crate::{ + dense_mlpoly::DensePolynomial, + math::Math, + poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}, + timer::Timer, + transcript, +}; pub struct PolyList { m: usize, @@ -50,11 +58,12 @@ impl PolyList { let q_timer = Timer::new("build_q"); assert!(point.len() == 2 * self.m); let a = &point[0..self.m]; + let b = &point[self.m..2 * self.m]; let pow_m = 2_usize.pow(self.m as u32); let chis: Vec = (0..pow_m) .into_par_iter() - .map(|i| Self::get_chi_i(a, i)) + .map(|i| Self::get_chi_i(b, i)) .collect(); let z_q: Vec = (0..pow_m) @@ -69,10 +78,11 @@ impl PolyList { // Given point = (\vec{a}, \vec{b}) used to construct q // compute q(b) = p(a,b). pub fn eval_q(q: DensePolynomial, point: &[Scalar]) -> Scalar { + let a = &point[0..point.len() / 2]; let b = &point[point.len() / 2..point.len()]; let prods = (0..q.Z.len()) .into_par_iter() - .map(|j| q.Z[j] * PolyList::get_chi_i(b, j)); + .map(|j| q.Z[j] * PolyList::get_chi_i(&a, j)); prods.sum() } @@ -108,7 +118,7 @@ impl PolyList { ) .collect(); - // computer the IPP commitment + // compute the IPP commitment let t = I::product_of_pairings(pairings.iter()); ipp_timer.stop(); @@ -122,7 +132,7 @@ impl PolyList { let mut prod = Scalar::one(); for j in 0..m { let b_j = b[j]; - if i >> j & 1 == 1 { + if i >> (m - j - 1) & 1 == 1 { prod = prod * b_j; } else { prod = prod * (Scalar::one() - b_j) @@ -132,11 +142,13 @@ impl PolyList { } pub fn open_q( + transcript: &mut PoseidonTranscript, comm_list: Vec>, ck: &CommitterKey, q: &DensePolynomial, point: &[Scalar], - ) -> (Commitment, Proof) { + t: &::Fqk, + ) -> (Commitment, Proof, MippProof) { let m = point.len() / 2; let a = &point[0..m]; let b = &point[m..2 * m]; @@ -151,19 +163,14 @@ impl PolyList { let timer_msm = Timer::new("msm"); let chis: Vec<_> = (0..pow_m) .into_par_iter() - .map(|i| Self::get_chi_i(a, i).into_repr()) + .map(|i| Self::get_chi_i(b, i)) .collect(); + let chis_repr: Vec = chis.par_iter().map(|y| y.into_repr()).collect(); assert!(chis.len() == comm_list.len()); + let a_vec: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); - let c_u = VariableBaseMSM::multi_scalar_mul( - comm_list - .par_iter() - .map(|c| c.g_product) - .collect::>() - .as_slice(), - chis.as_slice(), - ) - .into_affine(); + let c_u = + VariableBaseMSM::multi_scalar_mul(a_vec.as_slice(), chis_repr.as_slice()).into_affine(); let U: Commitment = Commitment { nv: q.num_vars, @@ -174,36 +181,59 @@ impl PolyList { let comm = MultilinearPC::::commit(ck, q); assert!(c_u == comm.g_product); - // TODO: MIPP proof that U is the inner product of the opening - // vector A to T and the vector y + let h_vec = ck.powers_of_h[0].clone(); + // TODO: MIPP proof that U is the inner product of the vector A + // and the vector y, where A is the opening vector to T + + let timer_mipp_proof = Timer::new("mipp_prove"); + let mipp_proof = + MippProof::::prove::(transcript, ck, a_vec, chis, h_vec, &c_u, t) + .unwrap(); + timer_mipp_proof.stop(); // PST proof for opening q at b - let timer_proof = Timer::new("open"); - let pst_proof = MultilinearPC::::open(ck, q, &b); + let timer_proof = Timer::new("pst_open"); + let mut a_rev = a.to_vec().clone(); + a_rev.reverse(); + let pst_proof = MultilinearPC::::open(ck, q, &a_rev); timer_proof.stop(); timer_open.stop(); // TODO: add MIPP proof as return value - (U, pst_proof) + (U, pst_proof, mipp_proof) } pub fn verify_q( + transcript: &mut PoseidonTranscript, vk: &VerifierKey, U: &Commitment, point: &[Scalar], v: Scalar, pst_proof: &Proof, - // TODO: add MIPP proof as argument + mipp_proof: &MippProof, + T: &::Fqk, ) -> bool { - // TODO: MIPP verification - let len = point.len(); + let a = &point[0..len / 2]; let b = &point[len / 2..len]; - - let timer_verify = Timer::new("sqrt_verify"); - let res = MultilinearPC::::check(vk, U, b, v, pst_proof); - timer_verify.stop(); + let timer_mipp_verify = Timer::new("mipp_verify"); + let res_mipp = MippProof::::verify::( + vk, + transcript, + mipp_proof, + b.to_vec(), + &U.g_product, + T, + ); + assert!(res_mipp == true); + timer_mipp_verify.stop(); + + let mut a_rev = a.to_vec().clone(); + a_rev.reverse(); + let timer_pst_verify = Timer::new("pst_verify"); + let res = MultilinearPC::::check(vk, U, &a_rev, v, pst_proof); + timer_pst_verify.stop(); res } } @@ -212,6 +242,8 @@ impl PolyList { mod tests { use std::clone; + use crate::parameters::poseidon_params; + use super::*; use ark_ff::Zero; use ark_std::UniformRand; @@ -233,7 +265,7 @@ mod tests { let res1 = p.evaluate(&r); let mut r_new = r.to_vec(); - r_new.reverse(); + // r_new.reverse(); let pl = PolyList::new(&Z.clone()); let q = pl.get_q(&r_new); let res2 = PolyList::eval_q(q.clone(), &r_new); @@ -244,7 +276,7 @@ mod tests { #[test] fn check_new_poly_commit() { let mut rng = ark_std::test_rng(); - let num_vars = 26; + let num_vars = 4; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len) .into_iter() @@ -255,8 +287,8 @@ mod tests { .map(|_| Scalar::rand(&mut rng)) .collect(); - let gens = MultilinearPC::::setup(13, &mut rng); - let (ck, vk) = MultilinearPC::::trim(&gens, 13); + let gens = MultilinearPC::::setup(2, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&gens, 2); let pl = PolyList::new(&Z.clone()); let q = pl.get_q(&r); @@ -265,9 +297,24 @@ mod tests { let (comm_list, t) = PolyList::commit(&pl, &ck); - let (u, pst_proof) = PolyList::open_q(comm_list, &ck, &q, &r); + let params = poseidon_params(); + let mut prover_transcript = PoseidonTranscript::new(¶ms); + + let (u, pst_proof, mipp_proof) = + PolyList::open_q(&mut prover_transcript, comm_list, &ck, &q, &r, &t); + + let mut verifier_transcript = PoseidonTranscript::new(¶ms); - let res = PolyList::verify_q(&vk, &u, &r, v, &pst_proof); + let res = PolyList::verify_q( + &mut verifier_transcript, + &vk, + &u, + &r, + v, + &pst_proof, + &mipp_proof, + &t, + ); assert!(res == true); } } From 1d7b27da0291633205a0da2ba2999e4a2ddb7679 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Thu, 19 Jan 2023 14:42:00 +0000 Subject: [PATCH 03/64] snarkpack integration --- Cargo.toml | 5 +- src/constraints.rs | 2 - src/lib.rs | 8 +-- src/poseidon_transcript.rs | 36 ++++++++++- src/r1csproof.rs | 42 +++++++------ src/sqrt_pst.rs | 119 ++++++++++++++++++++++++++----------- 6 files changed, 149 insertions(+), 63 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index da7c8353..94c4aa0e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,6 +42,8 @@ ark-groth16 = { version = "^0.3.0", features = ["r1cs"] } ark-bw6-761 = { version = "^0.3.0" } ark-poly-commit = { version = "^0.3.0" } ark-poly = {version = "^0.3.0"} +snarkpack = { git = "https://github.com/maramihali/snarkpack", branch = "testudo"} + lazy_static = "1.4.0" rand = { version = "0.8", features = [ "std", "std_rng" ] } @@ -91,5 +93,6 @@ std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-seri [patch.crates-io] ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/", rev = "a2a5ac491ae005ba2afd03fd21b7d3160d794a83"} -ark-poly-commit = {git = "https://github.com/maramihali/poly-commit"} +ark-poly-commit = {git = "https://github.com/maramihali/poly-commit", branch="pst_g2"} + diff --git a/src/constraints.rs b/src/constraints.rs index 8ffb6fb3..71436c7e 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -393,7 +393,6 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { let expected_claim_post_phase2_var = eval_Z_at_ry_var * scalar_var; claim_post_phase2_var.enforce_equal(&expected_claim_post_phase2_var)?; - let expected_transcript_state_var = transcript_var.challenge()?; let claimed_transcript_state_var = FpVar::::new_input(cs, || Ok(self.claimed_transcript_sat_state))?; @@ -401,7 +400,6 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { // Ensure that the prover and verifier transcipt views are consistent at // the end of the satisfiability proof. expected_transcript_state_var.enforce_equal(&claimed_transcript_state_var)?; - Ok(()) } } diff --git a/src/lib.rs b/src/lib.rs index 2dd548a8..001db7cc 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -407,7 +407,7 @@ impl SNARK { // side all the previous updates are done on the transcript // circuit variable and the transcript outside the circuit will be // inconsistent wrt to the prover's. - transcript.new_from_state(&r1cs_sat_proof.transcript_sat_state); + // transcript.new_from_state(&r1cs_sat_proof.transcript_sat_state); // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check @@ -480,7 +480,7 @@ impl SNARK { // TODO: find a way to retrieve this state from the circuit. Currently // the API for generating constraints doesn't support returning values // computed inside the circuit. - transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); + // transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); let (Ar, Br, Cr) = &self.inst_evals; transcript.append_scalar(&Ar); @@ -598,10 +598,10 @@ impl NIZK { // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check - let timer_eval = Timer::new("eval_sparse_polys"); + // let timer_eval = Timer::new("eval_sparse_polys"); let (claimed_rx, claimed_ry) = &self.r; let inst_evals = inst.inst.evaluate(claimed_rx, claimed_ry); - timer_eval.stop(); + // timer_eval.stop(); let timer_sat_proof = Timer::new("verify_sat_proof"); assert_eq!(input.assignment.len(), inst.inst.get_num_inputs()); diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index f4440226..247b8c35 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -1,14 +1,16 @@ use crate::group::{CompressedGroup, Fr}; use super::scalar::Scalar; -use ark_bls12_377::Bls12_377 as I; +use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_ec::PairingEngine; +use ark_ff::{Field, PrimeField}; use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; -// use ark_r1cs_std::prelude::*; use ark_sponge::{ poseidon::{PoseidonParameters, PoseidonSponge}, CryptographicSponge, }; +use snarkpack::Transcript; #[derive(Clone)] /// TODO @@ -17,6 +19,22 @@ pub struct PoseidonTranscript { params: PoseidonParameters, } +impl Transcript for PoseidonTranscript { + fn domain_sep(&mut self) { + self.sponge.absorb(&b"testudo".to_vec()); + } + + fn append(&mut self, label: &'static [u8], point: &S) { + let mut buf = Vec::new(); + point.serialize(&mut buf).expect("serialization failed"); + self.sponge.absorb(&buf); + } + + fn challenge_scalar(&mut self, label: &'static [u8]) -> F { + self.sponge.squeeze_field_elements(1).remove(0) + } +} + impl PoseidonTranscript { /// create a new transcript pub fn new(params: &PoseidonParameters) -> Self { @@ -56,6 +74,12 @@ impl PoseidonTranscript { } } + pub fn append_gt(&mut self, g_t: &::Fqk) { + let mut bytes = Vec::new(); + g_t.serialize(&mut bytes).unwrap(); + self.append_bytes(&bytes); + } + pub fn challenge_scalar(&mut self) -> Scalar { self.sponge.squeeze_field_elements(1).remove(0) } @@ -82,3 +106,11 @@ impl AppendToPoseidon for Commitment { transcript.append_bytes(&bytes); } } + +impl AppendToPoseidon for G1Affine { + fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { + let mut bytes = Vec::new(); + self.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); + } +} diff --git a/src/r1csproof.rs b/src/r1csproof.rs index a2b3ede8..abcc75fc 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -12,6 +12,7 @@ use ark_ec::PairingEngine; use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; use ark_poly_commit::multilinear_pc::MultilinearPC; +use snarkpack::mipp::MippProof; use super::commitments::MultiCommitGens; use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; @@ -45,6 +46,7 @@ pub struct R1CSProof { // The transcript state after the satisfiability proof was computed. pub transcript_sat_state: Scalar, pub t: ::Fqk, + pub mipp_proof: MippProof, } #[derive(Clone)] pub struct R1CSSumcheckGens { @@ -237,31 +239,29 @@ impl R1CSProof { transcript, ); timer_sc_proof_phase2.stop(); + let c = transcript.challenge_scalar(); + transcript.new_from_state(&c); // TODO: modify the polynomial evaluation in Spartan to be consistent // with the evaluation in ark-poly-commit so that reversing is not needed // anymore let timmer_opening = Timer::new("polyopening"); - let mut dummy = ry[1..].to_vec().clone(); - dummy.reverse(); - let q = pl.get_q(&dummy); + let q = pl.get_q(&ry[1..]); + timer_prove.stop(); - let (comm, proof_eval_vars_at_ry) = PolyList::open_q(comm_list, &gens.gens_pc.ck, &q, &dummy); + let (comm, proof_eval_vars_at_ry, mipp_proof) = + PolyList::open_q(transcript, comm_list, &gens.gens_pc.ck, &q, &ry[1..], &t); println!( "proof size (no of quotients): {:?}", proof_eval_vars_at_ry.proofs.len() ); - // comm.append_to_poseidon(transcript); + timmer_opening.stop(); let timer_polyeval = Timer::new("polyeval"); - let eval_vars_at_ry = PolyList::eval_q(q.clone(), &dummy); + let eval_vars_at_ry = PolyList::eval_q(q.clone(), &ry[1..]); timer_polyeval.stop(); - timer_prove.stop(); - - let c = transcript.challenge_scalar(); - ( R1CSProof { comm, @@ -273,7 +273,8 @@ impl R1CSProof { rx: rx.clone(), ry: ry.clone(), transcript_sat_state: c, - t: t, + t, + mipp_proof, }, rx, ry, @@ -333,6 +334,7 @@ impl R1CSProof { let dp1 = start.elapsed().as_millis(); prove_inner.stop(); + // this is universal, we don't measure it let start = Instant::now(); let (pk, vk) = Groth16::

::setup(circuit.clone(), &mut rng).unwrap(); let ds = start.elapsed().as_millis(); @@ -344,24 +346,25 @@ impl R1CSProof { prove_outer.stop(); let start = Instant::now(); + let verifier_time = Timer::new("groth16_verification"); let is_verified = Groth16::

::verify(&vk, &[], &proof).unwrap(); assert!(is_verified); + verifier_time.stop(); let timer_verification = Timer::new("commitverification"); - let mut dummy = self.ry[1..].to_vec(); - // TODO: ensure ark-poly-commit and Spartan produce consistent results - // when evaluating a polynomial at a given point so this reverse is not - // needed. - dummy.reverse(); + transcript.new_from_state(&self.transcript_sat_state); // Verifies the proof of opening against the result of evaluating the // witness polynomial at point ry. let res = PolyList::verify_q( + transcript, &gens.gens_pc.vk, &self.comm, - &dummy, + &self.ry[1..], self.eval_vars_at_ry, &self.proof_eval_vars_at_ry, + &self.mipp_proof, + &self.t, ); timer_verification.stop(); @@ -382,7 +385,10 @@ impl R1CSProof { transcript: &mut PoseidonTranscript, gens: &R1CSGens, ) -> Result { - // self.comm.append_to_poseidon(transcript); + // serialise and add the IPP commitment to the transcript + let mut bytes = Vec::new(); + self.t.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); let c = transcript.challenge_scalar(); diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 17be7997..a138a851 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,16 +1,24 @@ -use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_bls12_377::{Bls12_377 as I, G1Affine, G2Affine}; use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; -use ark_ff::{One, PrimeField}; +use ark_ff::{BigInteger256, One, PrimeField}; use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, MultilinearPC, }; +use ark_serialize::CanonicalSerialize; use rayon::prelude::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, }; +use snarkpack::mipp::MippProof; use super::scalar::Scalar; -use crate::{dense_mlpoly::DensePolynomial, math::Math, timer::Timer}; +use crate::{ + dense_mlpoly::DensePolynomial, + math::Math, + poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}, + timer::Timer, + transcript, +}; pub struct PolyList { m: usize, @@ -50,11 +58,12 @@ impl PolyList { let q_timer = Timer::new("build_q"); assert!(point.len() == 2 * self.m); let a = &point[0..self.m]; + let b = &point[self.m..2 * self.m]; let pow_m = 2_usize.pow(self.m as u32); let chis: Vec = (0..pow_m) .into_par_iter() - .map(|i| Self::get_chi_i(a, i)) + .map(|i| Self::get_chi_i(b, i)) .collect(); let z_q: Vec = (0..pow_m) @@ -69,10 +78,11 @@ impl PolyList { // Given point = (\vec{a}, \vec{b}) used to construct q // compute q(b) = p(a,b). pub fn eval_q(q: DensePolynomial, point: &[Scalar]) -> Scalar { + let a = &point[0..point.len() / 2]; let b = &point[point.len() / 2..point.len()]; let prods = (0..q.Z.len()) .into_par_iter() - .map(|j| q.Z[j] * PolyList::get_chi_i(b, j)); + .map(|j| q.Z[j] * PolyList::get_chi_i(&a, j)); prods.sum() } @@ -108,7 +118,7 @@ impl PolyList { ) .collect(); - // computer the IPP commitment + // compute the IPP commitment let t = I::product_of_pairings(pairings.iter()); ipp_timer.stop(); @@ -122,7 +132,7 @@ impl PolyList { let mut prod = Scalar::one(); for j in 0..m { let b_j = b[j]; - if i >> j & 1 == 1 { + if i >> (m - j - 1) & 1 == 1 { prod = prod * b_j; } else { prod = prod * (Scalar::one() - b_j) @@ -132,11 +142,13 @@ impl PolyList { } pub fn open_q( + transcript: &mut PoseidonTranscript, comm_list: Vec>, ck: &CommitterKey, q: &DensePolynomial, point: &[Scalar], - ) -> (Commitment, Proof) { + t: &::Fqk, + ) -> (Commitment, Proof, MippProof) { let m = point.len() / 2; let a = &point[0..m]; let b = &point[m..2 * m]; @@ -151,19 +163,14 @@ impl PolyList { let timer_msm = Timer::new("msm"); let chis: Vec<_> = (0..pow_m) .into_par_iter() - .map(|i| Self::get_chi_i(a, i).into_repr()) + .map(|i| Self::get_chi_i(b, i)) .collect(); + let chis_repr: Vec = chis.par_iter().map(|y| y.into_repr()).collect(); assert!(chis.len() == comm_list.len()); + let a_vec: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); - let c_u = VariableBaseMSM::multi_scalar_mul( - comm_list - .par_iter() - .map(|c| c.g_product) - .collect::>() - .as_slice(), - chis.as_slice(), - ) - .into_affine(); + let c_u = + VariableBaseMSM::multi_scalar_mul(a_vec.as_slice(), chis_repr.as_slice()).into_affine(); let U: Commitment = Commitment { nv: q.num_vars, @@ -174,36 +181,59 @@ impl PolyList { let comm = MultilinearPC::::commit(ck, q); assert!(c_u == comm.g_product); - // TODO: MIPP proof that U is the inner product of the opening - // vector A to T and the vector y + let h_vec = ck.powers_of_h[0].clone(); + // TODO: MIPP proof that U is the inner product of the vector A + // and the vector y, where A is the opening vector to T + + let timer_mipp_proof = Timer::new("mipp_prove"); + let mipp_proof = + MippProof::::prove::(transcript, ck, a_vec, chis, h_vec, &c_u, t) + .unwrap(); + timer_mipp_proof.stop(); // PST proof for opening q at b - let timer_proof = Timer::new("open"); - let pst_proof = MultilinearPC::::open(ck, q, &b); + let timer_proof = Timer::new("pst_open"); + let mut a_rev = a.to_vec().clone(); + a_rev.reverse(); + let pst_proof = MultilinearPC::::open(ck, q, &a_rev); timer_proof.stop(); timer_open.stop(); // TODO: add MIPP proof as return value - (U, pst_proof) + (U, pst_proof, mipp_proof) } pub fn verify_q( + transcript: &mut PoseidonTranscript, vk: &VerifierKey, U: &Commitment, point: &[Scalar], v: Scalar, pst_proof: &Proof, - // TODO: add MIPP proof as argument + mipp_proof: &MippProof, + T: &::Fqk, ) -> bool { - // TODO: MIPP verification - let len = point.len(); + let a = &point[0..len / 2]; let b = &point[len / 2..len]; - - let timer_verify = Timer::new("sqrt_verify"); - let res = MultilinearPC::::check(vk, U, b, v, pst_proof); - timer_verify.stop(); + let timer_mipp_verify = Timer::new("mipp_verify"); + let res_mipp = MippProof::::verify::( + vk, + transcript, + mipp_proof, + b.to_vec(), + &U.g_product, + T, + ); + assert!(res_mipp == true); + timer_mipp_verify.stop(); + + let mut a_rev = a.to_vec().clone(); + a_rev.reverse(); + let timer_pst_verify = Timer::new("pst_verify"); + let res = MultilinearPC::::check(vk, U, &a_rev, v, pst_proof); + timer_pst_verify.stop(); res } } @@ -212,6 +242,8 @@ impl PolyList { mod tests { use std::clone; + use crate::parameters::poseidon_params; + use super::*; use ark_ff::Zero; use ark_std::UniformRand; @@ -233,7 +265,7 @@ mod tests { let res1 = p.evaluate(&r); let mut r_new = r.to_vec(); - r_new.reverse(); + // r_new.reverse(); let pl = PolyList::new(&Z.clone()); let q = pl.get_q(&r_new); let res2 = PolyList::eval_q(q.clone(), &r_new); @@ -244,7 +276,7 @@ mod tests { #[test] fn check_new_poly_commit() { let mut rng = ark_std::test_rng(); - let num_vars = 26; + let num_vars = 4; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len) .into_iter() @@ -255,8 +287,8 @@ mod tests { .map(|_| Scalar::rand(&mut rng)) .collect(); - let gens = MultilinearPC::::setup(13, &mut rng); - let (ck, vk) = MultilinearPC::::trim(&gens, 13); + let gens = MultilinearPC::::setup(2, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&gens, 2); let pl = PolyList::new(&Z.clone()); let q = pl.get_q(&r); @@ -265,9 +297,24 @@ mod tests { let (comm_list, t) = PolyList::commit(&pl, &ck); - let (u, pst_proof) = PolyList::open_q(comm_list, &ck, &q, &r); + let params = poseidon_params(); + let mut prover_transcript = PoseidonTranscript::new(¶ms); + + let (u, pst_proof, mipp_proof) = + PolyList::open_q(&mut prover_transcript, comm_list, &ck, &q, &r, &t); + + let mut verifier_transcript = PoseidonTranscript::new(¶ms); - let res = PolyList::verify_q(&vk, &u, &r, v, &pst_proof); + let res = PolyList::verify_q( + &mut verifier_transcript, + &vk, + &u, + &r, + v, + &pst_proof, + &mipp_proof, + &t, + ); assert!(res == true); } } From 7a7707e41e0ab84845b811e3b77ebefde659ad52 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Mon, 6 Feb 2023 10:43:33 +0100 Subject: [PATCH 04/64] adding mipp as submodule directly --- src/lib.rs | 3 + src/macros.rs | 72 +++++++ src/mipp.rs | 395 +++++++++++++++++++++++++++++++++++++ src/poseidon_transcript.rs | 2 +- src/r1csproof.rs | 2 +- src/sqrt_pst.rs | 2 +- src/timer.rs | 1 + src/transcript.rs | 2 + 8 files changed, 476 insertions(+), 3 deletions(-) create mode 100644 src/macros.rs create mode 100644 src/mipp.rs diff --git a/src/lib.rs b/src/lib.rs index 001db7cc..ebb4e88a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,7 +23,10 @@ mod commitments; mod dense_mlpoly; mod errors; mod group; +#[macro_use] +pub(crate) mod macros; mod math; +pub(crate) mod mipp; mod nizk; mod product_tree; mod r1csinstance; diff --git a/src/macros.rs b/src/macros.rs new file mode 100644 index 00000000..d6fc14a0 --- /dev/null +++ b/src/macros.rs @@ -0,0 +1,72 @@ +macro_rules! try_par { + ($(let $name:ident = $f:expr),+) => { + $( + let mut $name = None; + )+ + rayon::scope(|s| { + $( + let $name = &mut $name; + s.spawn(move |_| { + *$name = Some($f); + });)+ + }); + $( + let $name = $name.unwrap()?; + )+ + }; +} + +macro_rules! par { + ($(let $name:ident = $f:expr),+) => { + $( + let mut $name = None; + )+ + rayon::scope(|s| { + $( + let $name = &mut $name; + s.spawn(move |_| { + *$name = Some($f); + });)+ + }); + $( + let $name = $name.unwrap(); + )+ + }; + + ($(let ($name1:ident, $name2:ident) = $f:block),+) => { + $( + let mut $name1 = None; + let mut $name2 = None; + )+ + rayon::scope(|s| { + $( + let $name1 = &mut $name1; + let $name2 = &mut $name2; + s.spawn(move |_| { + let (a, b) = $f; + *$name1 = Some(a); + *$name2 = Some(b); + });)+ + }); + $( + let $name1 = $name1.unwrap(); + let $name2 = $name2.unwrap(); + )+ + } +} + +macro_rules! mul { + ($a:expr, $b:expr) => {{ + let mut a = $a; + a.mul_assign($b); + a + }}; +} + +macro_rules! sub { + ($a:expr, $b:expr) => {{ + let mut a = $a; + a.sub_assign($b); + a + }}; +} diff --git a/src/mipp.rs b/src/mipp.rs new file mode 100644 index 00000000..61fb0a97 --- /dev/null +++ b/src/mipp.rs @@ -0,0 +1,395 @@ +use super::macros::*; +use ark_ec::msm::VariableBaseMSM; +use ark_ec::ProjectiveCurve; +use ark_ec::{AffineCurve, PairingEngine}; +use ark_ff::{Field, PrimeField}; +use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; +use ark_poly_commit::multilinear_pc::data_structures::{ + Commitment_G2, CommitterKey, Proof, Proof_G1, VerifierKey, +}; +use ark_poly_commit::multilinear_pc::MultilinearPC; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; +use ark_std::cfg_iter; +use ark_std::One; +use ark_std::Zero; +use rayon::iter::ParallelIterator; +use rayon::prelude::IntoParallelIterator; +use rayon::prelude::*; +use std::ops::{Add, Mul, MulAssign, SubAssign}; +use thiserror::Error; + +#[derive(Debug, Clone, CanonicalDeserialize, CanonicalSerialize)] +pub struct MippProof { + pub comms_t: Vec<(::Fqk, ::Fqk)>, + pub comms_u: Vec<(E::G1Affine, E::G1Affine)>, + pub final_a: E::G1Affine, + pub final_h: E::G2Affine, + pub pst_proof_h: Proof_G1, +} + +impl MippProof { + pub fn prove( + transcript: &mut impl Transcript, + ck: &CommitterKey, + a: Vec, + y: Vec, + h: Vec, + U: &E::G1Affine, + T: &::Fqk, + ) -> Result, Error> { + // the values of vectors C and bits rescaled at each step of the loop + // these are A and y + let (mut m_a, mut m_y) = (a.clone(), y.clone()); + // the values of the commitment keys rescaled at each step of the loop + // these are the h for me + let mut m_h = h.clone(); + + // storing the values for including in the proof + // these are T_l and T_r + let mut comms_t = Vec::new(); + // these are U_l and U_r + let mut comms_u = Vec::new(); + // these are the x-es + let mut xs: Vec = Vec::new(); + let mut xs_inv: Vec = Vec::new(); + + // we already appended t + transcript.append(b"U", U); + while m_a.len() > 1 { + // recursive step + // Recurse with problem of half size + let split = m_a.len() / 2; + + // MIPP /// + // c[:n'] c[n':] + let (a_l, a_r) = m_a.split_at_mut(split); + // r[:n'] r[:n'] + let (y_l, y_r) = m_y.split_at_mut(split); + + let (h_l, h_r) = m_h.split_at_mut(split); + + // since we do this in parallel we take reference first so it can be + // moved within the macro's rayon scope. + let (rh_l, rh_r) = (&h_l, &h_r); + let (ra_l, ra_r) = (&a_l, &a_r); + let (ry_l, ry_r) = (&y_l, &y_r); + // See section 3.3 for paper version with equivalent names + try_par! { + // MIPP part + // Compute cross commitment C^r + // z_l = c[n':] ^ r[:n'] + // TODO to replace by bitsf_multiexp + let comm_u_l = multiexponentiation(ra_l, &ry_r), + // Z_r = c[:n'] ^ r[n':] + let comm_u_r = multiexponentiation(ra_r, &ry_l) + + }; + // Compute C commitment over the distinct halfs of C + // u_l = c[n':] * v[:n'] + let comm_t_l = pairings_product::(&ra_l, rh_r); + // u_r = c[:n'] * v[n':] + let comm_t_r = pairings_product::(&ra_r, rh_l); + + // Fiat-Shamir challenge + transcript.append(b"comm_u_l", &comm_u_l); + transcript.append(b"comm_u_r", &comm_u_r); + transcript.append(b"comm_t_l", &comm_t_l); + transcript.append(b"comm_t_r", &comm_t_r); + let c_inv = transcript.challenge_scalar::(b"challenge_i"); + + // Optimization for multiexponentiation to rescale G2 elements with + // 128-bit challenge Swap 'c' and 'c_inv' since can't control bit size + // of c_inv + let c = c_inv.inverse().unwrap(); + + // Set up values for next step of recursion + // c[:n'] + c[n':]^x + compress(&mut m_a, split, &c); + compress_field(&mut m_y, split, &c_inv); + + // v_left + v_right^x^-1 + compress(&mut m_h, split, &c_inv); + + comms_t.push((comm_t_l, comm_t_r)); + comms_u.push((comm_u_l.into_affine(), comm_u_r.into_affine())); + xs.push(c); + xs_inv.push(c_inv); + } + + assert!(m_a.len() == 1 && m_y.len() == 1 && m_h.len() == 1); + + let final_a = m_a[0]; + let final_h = m_h[0]; + + // println!("before evaluations"); + // get polynomial + let poly = DenseMultilinearExtension::::from_evaluations_vec( + xs_inv.len(), + Self::polynomial_evaluations_from_transcript::(&xs_inv), + ); + let c = MultilinearPC::::commit_g2(ck, &poly); + assert!(c.h_product == final_h); + + // create proof that h is indeed correct + let mut point: Vec = (0..poly.num_vars) + .into_iter() + .map(|_| transcript.challenge_scalar::(b"random_point")) + .collect(); + + let pst_proof_h = MultilinearPC::::open_g1(ck, &poly, &point); + + println!("PROVER: last challenge {}", xs.last().unwrap()); + println!("PROVER: last y {}", m_y.last().unwrap()); + println!("PROVER: last final c {:?}", m_a.last().unwrap()); + + Ok( + (MippProof { + comms_t, + comms_u, + final_a, + final_h: final_h, + pst_proof_h, + }), + ) + } + + fn polynomial_evaluations_from_transcript(cs_inv: &[F]) -> Vec { + let m = cs_inv.len(); + let pow_m = 2_usize.pow(m as u32); + + let evals = (0..pow_m) + .into_par_iter() + .map(|i| { + let mut res = F::one(); + for j in 0..m { + if (i >> j) & 1 == 1 { + res *= cs_inv[m - j - 1]; + } + } + res + }) + .collect(); + evals + } + + pub fn verify( + vk: &VerifierKey, + transcript: &mut impl Transcript, + proof: &MippProof, + point: Vec, + U: &E::G1Affine, + T: &::Fqk, + ) -> bool { + let comms_u = proof.comms_u.clone(); + let comms_t = proof.comms_t.clone(); + + let mut xs = Vec::new(); + let mut xs_inv = Vec::new(); + let mut final_y = E::Fr::one(); + let mut u_prime = U.clone().into_projective(); + let mut t_prime = T.clone(); + + transcript.append(b"U", U); + assert!(comms_u.len() == point.len()); + for (i, (comm_u, comm_t)) in comms_u.iter().zip(comms_t.iter()).enumerate() { + let (comm_u_l, comm_u_r) = comm_u; + let (comm_t_l, comm_t_r) = comm_t; + + // Fiat-Shamir challenge + transcript.append(b"comm_u_l", comm_u_l); + transcript.append(b"comm_u_r", comm_u_r); + transcript.append(b"comm_t_l", comm_t_l); + transcript.append(b"comm_t_r", comm_t_r); + let c_inv = transcript.challenge_scalar::(b"challenge_i"); + + let c = c_inv.inverse().unwrap(); + xs.push(c); + xs_inv.push(c_inv); + } + let len = point.len(); + + let final_y: E::Fr = (0..len) + .into_par_iter() + .map(|i| E::Fr::one() + xs_inv[i].mul(point[i]) - point[i]) + .product(); + + u_prime += (0..len) + .into_iter() + .map(|i| { + let (comm_u_l, comm_u_r) = comms_u[i]; + comm_u_l.into_projective().mul(xs_inv[i].into_repr()) + + comm_u_r.into_projective().mul(xs[i].into_repr()) + }) + .sum::(); + + t_prime *= (0..len) + .into_par_iter() + .map(|i| { + let (comm_t_l, comm_t_r) = comms_t[i]; + comm_t_l.pow(xs_inv[i].into_repr()) * comm_t_r.pow(xs[i].into_repr()) + }) + .product::(); + + println!("VERIFIER: last challenge {}", xs.last().unwrap()); + println!("VERIFIER: last y {}", final_y); + println!("VERIFIER: last final c from prover {:?}", proof.final_a); + + // compute structured polynomial h at a random point + let mut point: Vec = Vec::new(); + let m = xs_inv.len(); + for i in 0..m { + let r = transcript.challenge_scalar::(b"random_point"); + point.push(r); + } + let v = (0..m) + .into_par_iter() + .map(|i| E::Fr::one() + point[i].mul(xs_inv[m - i - 1]) - point[i]) + .product(); + + // println!("VERIFIER: v is {}", v); + + let comm_h = Commitment_G2 { + nv: m, + h_product: proof.final_h, + }; + let check_h = MultilinearPC::::check_2(vk, &comm_h, &point, v, &proof.pst_proof_h); + assert!(check_h == true); + + let final_u = proof.final_a.mul(final_y); + let final_t: ::Fqk = E::pairing(proof.final_a, proof.final_h); + + let check_t = t_prime == final_t; + assert!(check_t == true); + let check_u = u_prime == final_u; + assert!(check_u == true); + check_h & check_u & check_t + } +} + +/// compress is similar to commit::{V,W}KEY::compress: it modifies the `vec` +/// vector by setting the value at index $i:0 -> split$ $vec[i] = vec[i] + +/// vec[i+split]^scaler$. The `vec` vector is half of its size after this call. +pub fn compress(vec: &mut Vec, split: usize, scaler: &C::ScalarField) { + let (left, right) = vec.split_at_mut(split); + left + .par_iter_mut() + .zip(right.par_iter()) + .for_each(|(a_l, a_r)| { + // TODO remove that with master version + let mut x = a_r.mul(scaler.into_repr()); + x.add_assign_mixed(&a_l); + *a_l = x.into_affine(); + }); + let len = left.len(); + vec.resize(len, C::zero()); +} + +// TODO make that generic with points as well +pub fn compress_field(vec: &mut Vec, split: usize, scaler: &F) { + let (left, right) = vec.split_at_mut(split); + assert!(left.len() == right.len()); + left + .par_iter_mut() + .zip(right.par_iter_mut()) + .for_each(|(a_l, a_r)| { + // TODO remove copy + a_r.mul_assign(scaler); + a_l.add_assign(a_r.clone()); + }); + let len = left.len(); + vec.resize(len, F::zero()); +} + +pub fn multiexponentiation( + left: &[G], + right: &[G::ScalarField], +) -> Result { + if left.len() != right.len() { + return Err(Error::InvalidIPVectorLength); + } + + Ok(VariableBaseMSM::multi_scalar_mul( + left, + &cfg_iter!(right).map(|s| s.into_repr()).collect::>(), + )) +} + +pub fn pairings_product(gs: &[E::G1Affine], hs: &[E::G2Affine]) -> E::Fqk { + let pairings: Vec<_> = gs + .into_par_iter() + .map(|g| ::G1Prepared::from(*g)) + .zip( + hs.into_par_iter() + .map(|h| ::G2Prepared::from(*h)), + ) + .collect(); + + E::product_of_pairings(pairings.iter()) +} + +#[derive(Debug, Error)] +pub enum Error { + #[error("Serialization error: {0}")] + Serialization(#[from] SerializationError), + + #[error("Commitment key length invalid")] + InvalidKeyLength, + + #[error("Vectors length do not match for inner product (IP)")] + InvalidIPVectorLength, + + #[error("Invalid pairing result")] + InvalidPairing, + + #[error("Invalid SRS: {0}")] + InvalidSRS(String), + + #[error("Invalid proof: {0}")] + InvalidProof(String), + + #[error("Malformed Groth16 verifying key")] + MalformedVerifyingKey, +} + +/// Transcript is the application level transcript to derive the challenges +/// needed for Fiat Shamir during aggregation. It is given to the +/// prover/verifier so that the transcript can be fed with any other data first. +/// TODO: Make this trait the only Transcript trait +pub trait Transcript { + fn domain_sep(&mut self); + fn append(&mut self, label: &'static [u8], point: &S); + fn challenge_scalar(&mut self, label: &'static [u8]) -> F; +} +#[cfg(test)] +mod tests { + use ark_bls12_381::{Bls12_381, Fr}; + use ark_ec::PairingEngine; + use ark_poly::DenseMultilinearExtension; + use ark_poly_commit::multilinear_pc::MultilinearPC; + use ark_std::{test_rng, UniformRand}; + type E = Bls12_381; + #[test] + fn test_setup() { + let mut rng = test_rng(); + let params = MultilinearPC::::setup(2, &mut rng); + // list of evaluation for polynomial + // 1 + 2*x_1 + x_2 + x_1x_2 + let evals_1 = vec![ + Fr::from(1u64), + Fr::from(4u64), + Fr::from(2u64), + Fr::from(5u64), + ]; + let poly_1 = DenseMultilinearExtension::::from_evaluations_vec(2, evals_1); + + // list of evaluation for polynomial + // 1 + x_1 + x_2 + 2*x_1x_2 + let evals_2 = vec![ + Fr::from(1u64), + Fr::from(2u64), + Fr::from(2u64), + Fr::from(5u64), + ]; + let poly_2 = DenseMultilinearExtension::::from_evaluations_vec(2, evals_2); + } +} diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 247b8c35..37728a93 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -1,6 +1,7 @@ use crate::group::{CompressedGroup, Fr}; use super::scalar::Scalar; +use crate::mipp::Transcript; use ark_bls12_377::{Bls12_377 as I, G1Affine}; use ark_ec::PairingEngine; use ark_ff::{Field, PrimeField}; @@ -10,7 +11,6 @@ use ark_sponge::{ poseidon::{PoseidonParameters, PoseidonSponge}, CryptographicSponge, }; -use snarkpack::Transcript; #[derive(Clone)] /// TODO diff --git a/src/r1csproof.rs b/src/r1csproof.rs index abcc75fc..9f08fc77 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -12,7 +12,7 @@ use ark_ec::PairingEngine; use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; use ark_poly_commit::multilinear_pc::MultilinearPC; -use snarkpack::mipp::MippProof; +use crate::mipp::MippProof; use super::commitments::MultiCommitGens; use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index a138a851..61abdff8 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,3 +1,4 @@ +use crate::mipp::MippProof; use ark_bls12_377::{Bls12_377 as I, G1Affine, G2Affine}; use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; use ark_ff::{BigInteger256, One, PrimeField}; @@ -9,7 +10,6 @@ use ark_serialize::CanonicalSerialize; use rayon::prelude::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, }; -use snarkpack::mipp::MippProof; use super::scalar::Scalar; use crate::{ diff --git a/src/timer.rs b/src/timer.rs index 8356a35d..fbc2d316 100644 --- a/src/timer.rs +++ b/src/timer.rs @@ -1,3 +1,4 @@ +/// Timer is a simple utility to profile the execution time of a block of code. #[cfg(feature = "profile")] use colored::Colorize; #[cfg(feature = "profile")] diff --git a/src/transcript.rs b/src/transcript.rs index a5c22ead..e1e5087e 100644 --- a/src/transcript.rs +++ b/src/transcript.rs @@ -12,6 +12,8 @@ pub trait ProofTranscript { fn challenge_vector(&mut self, label: &'static [u8], len: usize) -> Vec; } + + impl ProofTranscript for Transcript { fn append_protocol_name(&mut self, protocol_name: &'static [u8]) { self.append_message(b"protocol-name", protocol_name); From 59b2371ab04e39bd0fe722a88b502429801841cf Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Thu, 19 Jan 2023 14:42:00 +0000 Subject: [PATCH 05/64] snarkpack integration --- Cargo.toml | 5 +- src/constraints.rs | 2 - src/lib.rs | 8 +- src/poseidon_transcript.rs | 36 ++++++- src/r1csproof.rs | 49 +++++---- src/sqrt_pst.rs | 204 ++++++++++++++++++++++++------------- 6 files changed, 200 insertions(+), 104 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index da7c8353..6d896568 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,6 +42,8 @@ ark-groth16 = { version = "^0.3.0", features = ["r1cs"] } ark-bw6-761 = { version = "^0.3.0" } ark-poly-commit = { version = "^0.3.0" } ark-poly = {version = "^0.3.0"} +snarkpack = { path="../snarkpack"} + lazy_static = "1.4.0" rand = { version = "0.8", features = [ "std", "std_rng" ] } @@ -91,5 +93,6 @@ std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-seri [patch.crates-io] ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/", rev = "a2a5ac491ae005ba2afd03fd21b7d3160d794a83"} -ark-poly-commit = {git = "https://github.com/maramihali/poly-commit"} +ark-poly-commit = {git = "https://github.com/maramihali/poly-commit", branch="pst_g2"} + diff --git a/src/constraints.rs b/src/constraints.rs index 8ffb6fb3..71436c7e 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -393,7 +393,6 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { let expected_claim_post_phase2_var = eval_Z_at_ry_var * scalar_var; claim_post_phase2_var.enforce_equal(&expected_claim_post_phase2_var)?; - let expected_transcript_state_var = transcript_var.challenge()?; let claimed_transcript_state_var = FpVar::::new_input(cs, || Ok(self.claimed_transcript_sat_state))?; @@ -401,7 +400,6 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { // Ensure that the prover and verifier transcipt views are consistent at // the end of the satisfiability proof. expected_transcript_state_var.enforce_equal(&claimed_transcript_state_var)?; - Ok(()) } } diff --git a/src/lib.rs b/src/lib.rs index 2dd548a8..001db7cc 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -407,7 +407,7 @@ impl SNARK { // side all the previous updates are done on the transcript // circuit variable and the transcript outside the circuit will be // inconsistent wrt to the prover's. - transcript.new_from_state(&r1cs_sat_proof.transcript_sat_state); + // transcript.new_from_state(&r1cs_sat_proof.transcript_sat_state); // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check @@ -480,7 +480,7 @@ impl SNARK { // TODO: find a way to retrieve this state from the circuit. Currently // the API for generating constraints doesn't support returning values // computed inside the circuit. - transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); + // transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); let (Ar, Br, Cr) = &self.inst_evals; transcript.append_scalar(&Ar); @@ -598,10 +598,10 @@ impl NIZK { // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check - let timer_eval = Timer::new("eval_sparse_polys"); + // let timer_eval = Timer::new("eval_sparse_polys"); let (claimed_rx, claimed_ry) = &self.r; let inst_evals = inst.inst.evaluate(claimed_rx, claimed_ry); - timer_eval.stop(); + // timer_eval.stop(); let timer_sat_proof = Timer::new("verify_sat_proof"); assert_eq!(input.assignment.len(), inst.inst.get_num_inputs()); diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index f4440226..247b8c35 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -1,14 +1,16 @@ use crate::group::{CompressedGroup, Fr}; use super::scalar::Scalar; -use ark_bls12_377::Bls12_377 as I; +use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_ec::PairingEngine; +use ark_ff::{Field, PrimeField}; use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; -// use ark_r1cs_std::prelude::*; use ark_sponge::{ poseidon::{PoseidonParameters, PoseidonSponge}, CryptographicSponge, }; +use snarkpack::Transcript; #[derive(Clone)] /// TODO @@ -17,6 +19,22 @@ pub struct PoseidonTranscript { params: PoseidonParameters, } +impl Transcript for PoseidonTranscript { + fn domain_sep(&mut self) { + self.sponge.absorb(&b"testudo".to_vec()); + } + + fn append(&mut self, label: &'static [u8], point: &S) { + let mut buf = Vec::new(); + point.serialize(&mut buf).expect("serialization failed"); + self.sponge.absorb(&buf); + } + + fn challenge_scalar(&mut self, label: &'static [u8]) -> F { + self.sponge.squeeze_field_elements(1).remove(0) + } +} + impl PoseidonTranscript { /// create a new transcript pub fn new(params: &PoseidonParameters) -> Self { @@ -56,6 +74,12 @@ impl PoseidonTranscript { } } + pub fn append_gt(&mut self, g_t: &::Fqk) { + let mut bytes = Vec::new(); + g_t.serialize(&mut bytes).unwrap(); + self.append_bytes(&bytes); + } + pub fn challenge_scalar(&mut self) -> Scalar { self.sponge.squeeze_field_elements(1).remove(0) } @@ -82,3 +106,11 @@ impl AppendToPoseidon for Commitment { transcript.append_bytes(&bytes); } } + +impl AppendToPoseidon for G1Affine { + fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { + let mut bytes = Vec::new(); + self.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); + } +} diff --git a/src/r1csproof.rs b/src/r1csproof.rs index a2b3ede8..20d053d3 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -4,7 +4,7 @@ use crate::group::{Fq, Fr}; use crate::math::Math; use crate::parameters::poseidon_params; use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; -use crate::sqrt_pst::PolyList; +use crate::sqrt_pst::Polynomial; use crate::sumcheck::SumcheckInstanceProof; use ark_bls12_377::Bls12_377 as I; use ark_bw6_761::BW6_761 as P; @@ -12,6 +12,7 @@ use ark_ec::PairingEngine; use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; use ark_poly_commit::multilinear_pc::MultilinearPC; +use snarkpack::mipp::MippProof; use super::commitments::MultiCommitGens; use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; @@ -45,6 +46,7 @@ pub struct R1CSProof { // The transcript state after the satisfiability proof was computed. pub transcript_sat_state: Scalar, pub t: ::Fqk, + pub mipp_proof: MippProof, } #[derive(Clone)] pub struct R1CSSumcheckGens { @@ -146,12 +148,12 @@ impl R1CSProof { // create the multilinear witness polynomial from the satisfying assiment // expressed as the list of sqrt-sized polynomials - let pl = PolyList::new(&vars.clone()); + let mut pl = Polynomial::from_evaluations(&vars.clone()); let timer_commit = Timer::new("polycommit"); // commitment list to the satisfying witness polynomial list - let (comm_list, t) = PolyList::commit(&pl, &gens.gens_pc.ck); + let (comm_list, t) = pl.commit(&gens.gens_pc.ck); let mut bytes = Vec::new(); t.serialize(&mut bytes).unwrap(); @@ -237,31 +239,28 @@ impl R1CSProof { transcript, ); timer_sc_proof_phase2.stop(); + let c = transcript.challenge_scalar(); + transcript.new_from_state(&c); // TODO: modify the polynomial evaluation in Spartan to be consistent // with the evaluation in ark-poly-commit so that reversing is not needed // anymore let timmer_opening = Timer::new("polyopening"); - let mut dummy = ry[1..].to_vec().clone(); - dummy.reverse(); - let q = pl.get_q(&dummy); + timer_prove.stop(); - let (comm, proof_eval_vars_at_ry) = PolyList::open_q(comm_list, &gens.gens_pc.ck, &q, &dummy); + let (comm, proof_eval_vars_at_ry, mipp_proof) = + pl.open(transcript, comm_list, &gens.gens_pc.ck, &ry[1..], &t); println!( "proof size (no of quotients): {:?}", proof_eval_vars_at_ry.proofs.len() ); - // comm.append_to_poseidon(transcript); + timmer_opening.stop(); let timer_polyeval = Timer::new("polyeval"); - let eval_vars_at_ry = PolyList::eval_q(q.clone(), &dummy); + let eval_vars_at_ry = pl.eval(&ry[1..]); timer_polyeval.stop(); - timer_prove.stop(); - - let c = transcript.challenge_scalar(); - ( R1CSProof { comm, @@ -273,7 +272,8 @@ impl R1CSProof { rx: rx.clone(), ry: ry.clone(), transcript_sat_state: c, - t: t, + t, + mipp_proof, }, rx, ry, @@ -333,6 +333,7 @@ impl R1CSProof { let dp1 = start.elapsed().as_millis(); prove_inner.stop(); + // this is universal, we don't measure it let start = Instant::now(); let (pk, vk) = Groth16::

::setup(circuit.clone(), &mut rng).unwrap(); let ds = start.elapsed().as_millis(); @@ -344,24 +345,25 @@ impl R1CSProof { prove_outer.stop(); let start = Instant::now(); + let verifier_time = Timer::new("groth16_verification"); let is_verified = Groth16::

::verify(&vk, &[], &proof).unwrap(); assert!(is_verified); + verifier_time.stop(); let timer_verification = Timer::new("commitverification"); - let mut dummy = self.ry[1..].to_vec(); - // TODO: ensure ark-poly-commit and Spartan produce consistent results - // when evaluating a polynomial at a given point so this reverse is not - // needed. - dummy.reverse(); + transcript.new_from_state(&self.transcript_sat_state); // Verifies the proof of opening against the result of evaluating the // witness polynomial at point ry. - let res = PolyList::verify_q( + let res = Polynomial::verify( + transcript, &gens.gens_pc.vk, &self.comm, - &dummy, + &self.ry[1..], self.eval_vars_at_ry, &self.proof_eval_vars_at_ry, + &self.mipp_proof, + &self.t, ); timer_verification.stop(); @@ -382,7 +384,10 @@ impl R1CSProof { transcript: &mut PoseidonTranscript, gens: &R1CSGens, ) -> Result { - // self.comm.append_to_poseidon(transcript); + // serialise and add the IPP commitment to the transcript + let mut bytes = Vec::new(); + self.t.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); let c = transcript.challenge_scalar(); diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 17be7997..4e7183a8 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,29 +1,43 @@ -use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_bls12_377::{Bls12_377 as I, G1Affine, G2Affine}; use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; -use ark_ff::{One, PrimeField}; +use ark_ff::{BigInteger256, One, PrimeField}; use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, MultilinearPC, }; +use ark_serialize::CanonicalSerialize; use rayon::prelude::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, }; +use snarkpack::mipp::MippProof; use super::scalar::Scalar; -use crate::{dense_mlpoly::DensePolynomial, math::Math, timer::Timer}; +use crate::{ + dense_mlpoly::DensePolynomial, + math::Math, + poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}, + timer::Timer, + transcript, +}; -pub struct PolyList { +pub struct Polynomial { m: usize, polys: Vec, + q: Option, + chis_b: Option>, } -impl PolyList { +impl Polynomial { // Given the evaluations over the boolean hypercube of a polynomial p of size // 2*m compute the sqrt-sized polynomials p_i as // p_i(Y) = \sum_{j \in \{0,1\}^m} p(j, i) * chi_j(Y) // where p(X,Y) = \sum_{i \in \{0,\1}^m} chi_i(X) * p_i(Y) - pub fn new(Z: &[Scalar]) -> Self { + // + // TODO: add case when the length of the list is not an even power of 2 + pub fn from_evaluations(Z: &[Scalar]) -> Self { let pl_timer = Timer::new("poly_list_build"); + // check the evaluation list is a power of 2 + debug_assert!(Z.len() & (Z.len() - 1) == 0); let m = Z.len().log_2() / 2; let pow_m = 2_usize.pow(m as u32); let polys: Vec = (0..pow_m) @@ -36,25 +50,30 @@ impl PolyList { DensePolynomial::new(z) }) .collect(); - assert!(polys.len() == pow_m); + debug_assert!(polys.len() == pow_m); pl_timer.stop(); - Self { m, polys } + Self { + m, + polys, + q: None, + chis_b: None, + } } // Given point = (\vec{a}, \vec{b}), compute the polynomial q as // q(Y) = // \sum_{j \in \{0,1\}^m}(\sum_{i \in \{0,1\}^m} p(j,i) * chi_i(a)) * chi_j(Y) // and p(a,b) = q(b) where p is the initial polynomial - - pub fn get_q(&self, point: &[Scalar]) -> DensePolynomial { + fn get_q(&mut self, point: &[Scalar]) { let q_timer = Timer::new("build_q"); - assert!(point.len() == 2 * self.m); + debug_assert!(point.len() == 2 * self.m); let a = &point[0..self.m]; + let b = &point[self.m..2 * self.m]; let pow_m = 2_usize.pow(self.m as u32); let chis: Vec = (0..pow_m) .into_par_iter() - .map(|i| Self::get_chi_i(a, i)) + .map(|i| Self::get_chi_i(b, i)) .collect(); let z_q: Vec = (0..pow_m) @@ -63,33 +82,36 @@ impl PolyList { .collect(); q_timer.stop(); - DensePolynomial::new(z_q) + self.q = Some(DensePolynomial::new(z_q)); + self.chis_b = Some(chis); } // Given point = (\vec{a}, \vec{b}) used to construct q // compute q(b) = p(a,b). - pub fn eval_q(q: DensePolynomial, point: &[Scalar]) -> Scalar { + pub fn eval(&mut self, point: &[Scalar]) -> Scalar { + let a = &point[0..point.len() / 2]; let b = &point[point.len() / 2..point.len()]; + if self.q.is_none() { + self.get_q(point); + } + let q = self.q.clone().unwrap(); let prods = (0..q.Z.len()) .into_par_iter() - .map(|j| q.Z[j] * PolyList::get_chi_i(b, j)); + .map(|j| q.Z[j] * Polynomial::get_chi_i(&a, j)); prods.sum() } - pub fn commit( - poly_list: &PolyList, - ck: &CommitterKey, - ) -> (Vec>, ::Fqk) { + pub fn commit(&self, ck: &CommitterKey) -> (Vec>, ::Fqk) { let timer_commit = Timer::new("sqrt_commit"); let timer_list = Timer::new("comm_list"); // commit to each of the sqrt sized p_i - let comm_list: Vec> = poly_list + let comm_list: Vec> = self .polys .par_iter() - .map(|p| MultilinearPC::::commit(&ck.clone(), p)) + .map(|p| MultilinearPC::::commit(&ck, p)) .collect(); timer_list.stop(); @@ -108,7 +130,7 @@ impl PolyList { ) .collect(); - // computer the IPP commitment + // compute the IPP commitment let t = I::product_of_pairings(pairings.iter()); ipp_timer.stop(); @@ -122,7 +144,7 @@ impl PolyList { let mut prod = Scalar::one(); for j in 0..m { let b_j = b[j]; - if i >> j & 1 == 1 { + if i >> (m - j - 1) & 1 == 1 { prod = prod * b_j; } else { prod = prod * (Scalar::one() - b_j) @@ -131,79 +153,103 @@ impl PolyList { prod } - pub fn open_q( + pub fn open( + &mut self, + transcript: &mut PoseidonTranscript, comm_list: Vec>, ck: &CommitterKey, - q: &DensePolynomial, point: &[Scalar], - ) -> (Commitment, Proof) { + t: &::Fqk, + ) -> (Commitment, Proof, MippProof) { let m = point.len() / 2; let a = &point[0..m]; let b = &point[m..2 * m]; + if self.q.is_none() { + self.get_q(point); + } + + let q = self.q.clone().unwrap(); + let timer_open = Timer::new("sqrt_open"); // Compute the PST commitment to q obtained as the inner products of the // commitments to the polynomials p_i and chi_i(a) for i ranging over the // boolean hypercube of size m. let m = a.len(); - let pow_m = 2_usize.pow(m as u32); let timer_msm = Timer::new("msm"); - let chis: Vec<_> = (0..pow_m) - .into_par_iter() - .map(|i| Self::get_chi_i(a, i).into_repr()) - .collect(); - assert!(chis.len() == comm_list.len()); - - let c_u = VariableBaseMSM::multi_scalar_mul( - comm_list - .par_iter() - .map(|c| c.g_product) - .collect::>() - .as_slice(), - chis.as_slice(), - ) - .into_affine(); + if self.chis_b.is_none() { + panic!("chis(b) should have been computed for q"); + } + let chis = self.chis_b.clone().unwrap(); + let chis_repr: Vec = chis.par_iter().map(|y| y.into_repr()).collect(); + assert!(chis_repr.len() == comm_list.len()); + + let a_vec: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); + + let c_u = + VariableBaseMSM::multi_scalar_mul(a_vec.as_slice(), chis_repr.as_slice()).into_affine(); + timer_msm.stop(); let U: Commitment = Commitment { nv: q.num_vars, g_product: c_u, }; - timer_msm.stop(); - - let comm = MultilinearPC::::commit(ck, q); - assert!(c_u == comm.g_product); + let comm = MultilinearPC::::commit(ck, &q); + debug_assert!(c_u == comm.g_product); + let h_vec = ck.powers_of_h[0].clone(); - // TODO: MIPP proof that U is the inner product of the opening - // vector A to T and the vector y + // MIPP proof that U is the inner product of the vector A + // and the vector y, where A is the opening vector to T + let timer_mipp_proof = Timer::new("mipp_prove"); + let mipp_proof = + MippProof::::prove::(transcript, ck, a_vec, chis, h_vec, &c_u, t) + .unwrap(); + timer_mipp_proof.stop(); // PST proof for opening q at b - let timer_proof = Timer::new("open"); - let pst_proof = MultilinearPC::::open(ck, q, &b); + let timer_proof = Timer::new("pst_open"); + let mut a_rev = a.to_vec().clone(); + a_rev.reverse(); + let pst_proof = MultilinearPC::::open(ck, &q, &a_rev); timer_proof.stop(); timer_open.stop(); // TODO: add MIPP proof as return value - (U, pst_proof) + (U, pst_proof, mipp_proof) } - pub fn verify_q( + pub fn verify( + transcript: &mut PoseidonTranscript, vk: &VerifierKey, U: &Commitment, point: &[Scalar], v: Scalar, pst_proof: &Proof, - // TODO: add MIPP proof as argument + mipp_proof: &MippProof, + T: &::Fqk, ) -> bool { - // TODO: MIPP verification - let len = point.len(); + let a = &point[0..len / 2]; let b = &point[len / 2..len]; - - let timer_verify = Timer::new("sqrt_verify"); - let res = MultilinearPC::::check(vk, U, b, v, pst_proof); - timer_verify.stop(); + let timer_mipp_verify = Timer::new("mipp_verify"); + let res_mipp = MippProof::::verify::( + vk, + transcript, + mipp_proof, + b.to_vec(), + &U.g_product, + T, + ); + assert!(res_mipp == true); + timer_mipp_verify.stop(); + + let mut a_rev = a.to_vec().clone(); + a_rev.reverse(); + let timer_pst_verify = Timer::new("pst_verify"); + let res = MultilinearPC::::check(vk, U, &a_rev, v, pst_proof); + timer_pst_verify.stop(); res } } @@ -212,6 +258,8 @@ impl PolyList { mod tests { use std::clone; + use crate::parameters::poseidon_params; + use super::*; use ark_ff::Zero; use ark_std::UniformRand; @@ -232,11 +280,8 @@ mod tests { let p = DensePolynomial::new(Z.clone()); let res1 = p.evaluate(&r); - let mut r_new = r.to_vec(); - r_new.reverse(); - let pl = PolyList::new(&Z.clone()); - let q = pl.get_q(&r_new); - let res2 = PolyList::eval_q(q.clone(), &r_new); + let mut pl = Polynomial::from_evaluations(&Z.clone()); + let res2 = pl.eval(&r); assert!(res1 == res2); } @@ -244,7 +289,7 @@ mod tests { #[test] fn check_new_poly_commit() { let mut rng = ark_std::test_rng(); - let num_vars = 26; + let num_vars = 4; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len) .into_iter() @@ -255,19 +300,32 @@ mod tests { .map(|_| Scalar::rand(&mut rng)) .collect(); - let gens = MultilinearPC::::setup(13, &mut rng); - let (ck, vk) = MultilinearPC::::trim(&gens, 13); + let gens = MultilinearPC::::setup(2, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&gens, 2); + + let mut pl = Polynomial::from_evaluations(&Z.clone()); + + let v = pl.eval(&r); - let pl = PolyList::new(&Z.clone()); - let q = pl.get_q(&r); + let (comm_list, t) = pl.commit(&ck); - let v = PolyList::eval_q(q.clone(), &r); + let params = poseidon_params(); + let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (comm_list, t) = PolyList::commit(&pl, &ck); + let (u, pst_proof, mipp_proof) = pl.open(&mut prover_transcript, comm_list, &ck, &r, &t); - let (u, pst_proof) = PolyList::open_q(comm_list, &ck, &q, &r); + let mut verifier_transcript = PoseidonTranscript::new(¶ms); - let res = PolyList::verify_q(&vk, &u, &r, v, &pst_proof); + let res = Polynomial::verify( + &mut verifier_transcript, + &vk, + &u, + &r, + v, + &pst_proof, + &mipp_proof, + &t, + ); assert!(res == true); } } From 50ba972f051cac21ee196c305934d2a3575b46b8 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Mon, 6 Feb 2023 14:24:23 +0100 Subject: [PATCH 06/64] finalizing --- src/mipp.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mipp.rs b/src/mipp.rs index 61fb0a97..a31676a2 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -360,14 +360,14 @@ pub trait Transcript { fn append(&mut self, label: &'static [u8], point: &S); fn challenge_scalar(&mut self, label: &'static [u8]) -> F; } + #[cfg(test)] mod tests { - use ark_bls12_381::{Bls12_381, Fr}; + use ark_bls12_377::{Bls12_377 as E, Fr}; use ark_ec::PairingEngine; use ark_poly::DenseMultilinearExtension; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_std::{test_rng, UniformRand}; - type E = Bls12_381; #[test] fn test_setup() { let mut rng = test_rng(); From 4d6883101b75dd91e7c63afe98680f28d6ab27ef Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Thu, 19 Jan 2023 14:42:00 +0000 Subject: [PATCH 07/64] snarkpack integration --- Cargo.toml | 5 +- src/constraints.rs | 2 - src/lib.rs | 8 +- src/poseidon_transcript.rs | 36 ++++++- src/r1csproof.rs | 49 +++++---- src/sqrt_pst.rs | 213 +++++++++++++++++++++++-------------- 6 files changed, 202 insertions(+), 111 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index da7c8353..6d896568 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,6 +42,8 @@ ark-groth16 = { version = "^0.3.0", features = ["r1cs"] } ark-bw6-761 = { version = "^0.3.0" } ark-poly-commit = { version = "^0.3.0" } ark-poly = {version = "^0.3.0"} +snarkpack = { path="../snarkpack"} + lazy_static = "1.4.0" rand = { version = "0.8", features = [ "std", "std_rng" ] } @@ -91,5 +93,6 @@ std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-seri [patch.crates-io] ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/", rev = "a2a5ac491ae005ba2afd03fd21b7d3160d794a83"} -ark-poly-commit = {git = "https://github.com/maramihali/poly-commit"} +ark-poly-commit = {git = "https://github.com/maramihali/poly-commit", branch="pst_g2"} + diff --git a/src/constraints.rs b/src/constraints.rs index 8ffb6fb3..71436c7e 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -393,7 +393,6 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { let expected_claim_post_phase2_var = eval_Z_at_ry_var * scalar_var; claim_post_phase2_var.enforce_equal(&expected_claim_post_phase2_var)?; - let expected_transcript_state_var = transcript_var.challenge()?; let claimed_transcript_state_var = FpVar::::new_input(cs, || Ok(self.claimed_transcript_sat_state))?; @@ -401,7 +400,6 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { // Ensure that the prover and verifier transcipt views are consistent at // the end of the satisfiability proof. expected_transcript_state_var.enforce_equal(&claimed_transcript_state_var)?; - Ok(()) } } diff --git a/src/lib.rs b/src/lib.rs index 2dd548a8..001db7cc 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -407,7 +407,7 @@ impl SNARK { // side all the previous updates are done on the transcript // circuit variable and the transcript outside the circuit will be // inconsistent wrt to the prover's. - transcript.new_from_state(&r1cs_sat_proof.transcript_sat_state); + // transcript.new_from_state(&r1cs_sat_proof.transcript_sat_state); // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check @@ -480,7 +480,7 @@ impl SNARK { // TODO: find a way to retrieve this state from the circuit. Currently // the API for generating constraints doesn't support returning values // computed inside the circuit. - transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); + // transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); let (Ar, Br, Cr) = &self.inst_evals; transcript.append_scalar(&Ar); @@ -598,10 +598,10 @@ impl NIZK { // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check - let timer_eval = Timer::new("eval_sparse_polys"); + // let timer_eval = Timer::new("eval_sparse_polys"); let (claimed_rx, claimed_ry) = &self.r; let inst_evals = inst.inst.evaluate(claimed_rx, claimed_ry); - timer_eval.stop(); + // timer_eval.stop(); let timer_sat_proof = Timer::new("verify_sat_proof"); assert_eq!(input.assignment.len(), inst.inst.get_num_inputs()); diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index f4440226..247b8c35 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -1,14 +1,16 @@ use crate::group::{CompressedGroup, Fr}; use super::scalar::Scalar; -use ark_bls12_377::Bls12_377 as I; +use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_ec::PairingEngine; +use ark_ff::{Field, PrimeField}; use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; -// use ark_r1cs_std::prelude::*; use ark_sponge::{ poseidon::{PoseidonParameters, PoseidonSponge}, CryptographicSponge, }; +use snarkpack::Transcript; #[derive(Clone)] /// TODO @@ -17,6 +19,22 @@ pub struct PoseidonTranscript { params: PoseidonParameters, } +impl Transcript for PoseidonTranscript { + fn domain_sep(&mut self) { + self.sponge.absorb(&b"testudo".to_vec()); + } + + fn append(&mut self, label: &'static [u8], point: &S) { + let mut buf = Vec::new(); + point.serialize(&mut buf).expect("serialization failed"); + self.sponge.absorb(&buf); + } + + fn challenge_scalar(&mut self, label: &'static [u8]) -> F { + self.sponge.squeeze_field_elements(1).remove(0) + } +} + impl PoseidonTranscript { /// create a new transcript pub fn new(params: &PoseidonParameters) -> Self { @@ -56,6 +74,12 @@ impl PoseidonTranscript { } } + pub fn append_gt(&mut self, g_t: &::Fqk) { + let mut bytes = Vec::new(); + g_t.serialize(&mut bytes).unwrap(); + self.append_bytes(&bytes); + } + pub fn challenge_scalar(&mut self) -> Scalar { self.sponge.squeeze_field_elements(1).remove(0) } @@ -82,3 +106,11 @@ impl AppendToPoseidon for Commitment { transcript.append_bytes(&bytes); } } + +impl AppendToPoseidon for G1Affine { + fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { + let mut bytes = Vec::new(); + self.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); + } +} diff --git a/src/r1csproof.rs b/src/r1csproof.rs index a2b3ede8..20d053d3 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -4,7 +4,7 @@ use crate::group::{Fq, Fr}; use crate::math::Math; use crate::parameters::poseidon_params; use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; -use crate::sqrt_pst::PolyList; +use crate::sqrt_pst::Polynomial; use crate::sumcheck::SumcheckInstanceProof; use ark_bls12_377::Bls12_377 as I; use ark_bw6_761::BW6_761 as P; @@ -12,6 +12,7 @@ use ark_ec::PairingEngine; use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; use ark_poly_commit::multilinear_pc::MultilinearPC; +use snarkpack::mipp::MippProof; use super::commitments::MultiCommitGens; use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; @@ -45,6 +46,7 @@ pub struct R1CSProof { // The transcript state after the satisfiability proof was computed. pub transcript_sat_state: Scalar, pub t: ::Fqk, + pub mipp_proof: MippProof, } #[derive(Clone)] pub struct R1CSSumcheckGens { @@ -146,12 +148,12 @@ impl R1CSProof { // create the multilinear witness polynomial from the satisfying assiment // expressed as the list of sqrt-sized polynomials - let pl = PolyList::new(&vars.clone()); + let mut pl = Polynomial::from_evaluations(&vars.clone()); let timer_commit = Timer::new("polycommit"); // commitment list to the satisfying witness polynomial list - let (comm_list, t) = PolyList::commit(&pl, &gens.gens_pc.ck); + let (comm_list, t) = pl.commit(&gens.gens_pc.ck); let mut bytes = Vec::new(); t.serialize(&mut bytes).unwrap(); @@ -237,31 +239,28 @@ impl R1CSProof { transcript, ); timer_sc_proof_phase2.stop(); + let c = transcript.challenge_scalar(); + transcript.new_from_state(&c); // TODO: modify the polynomial evaluation in Spartan to be consistent // with the evaluation in ark-poly-commit so that reversing is not needed // anymore let timmer_opening = Timer::new("polyopening"); - let mut dummy = ry[1..].to_vec().clone(); - dummy.reverse(); - let q = pl.get_q(&dummy); + timer_prove.stop(); - let (comm, proof_eval_vars_at_ry) = PolyList::open_q(comm_list, &gens.gens_pc.ck, &q, &dummy); + let (comm, proof_eval_vars_at_ry, mipp_proof) = + pl.open(transcript, comm_list, &gens.gens_pc.ck, &ry[1..], &t); println!( "proof size (no of quotients): {:?}", proof_eval_vars_at_ry.proofs.len() ); - // comm.append_to_poseidon(transcript); + timmer_opening.stop(); let timer_polyeval = Timer::new("polyeval"); - let eval_vars_at_ry = PolyList::eval_q(q.clone(), &dummy); + let eval_vars_at_ry = pl.eval(&ry[1..]); timer_polyeval.stop(); - timer_prove.stop(); - - let c = transcript.challenge_scalar(); - ( R1CSProof { comm, @@ -273,7 +272,8 @@ impl R1CSProof { rx: rx.clone(), ry: ry.clone(), transcript_sat_state: c, - t: t, + t, + mipp_proof, }, rx, ry, @@ -333,6 +333,7 @@ impl R1CSProof { let dp1 = start.elapsed().as_millis(); prove_inner.stop(); + // this is universal, we don't measure it let start = Instant::now(); let (pk, vk) = Groth16::

::setup(circuit.clone(), &mut rng).unwrap(); let ds = start.elapsed().as_millis(); @@ -344,24 +345,25 @@ impl R1CSProof { prove_outer.stop(); let start = Instant::now(); + let verifier_time = Timer::new("groth16_verification"); let is_verified = Groth16::

::verify(&vk, &[], &proof).unwrap(); assert!(is_verified); + verifier_time.stop(); let timer_verification = Timer::new("commitverification"); - let mut dummy = self.ry[1..].to_vec(); - // TODO: ensure ark-poly-commit and Spartan produce consistent results - // when evaluating a polynomial at a given point so this reverse is not - // needed. - dummy.reverse(); + transcript.new_from_state(&self.transcript_sat_state); // Verifies the proof of opening against the result of evaluating the // witness polynomial at point ry. - let res = PolyList::verify_q( + let res = Polynomial::verify( + transcript, &gens.gens_pc.vk, &self.comm, - &dummy, + &self.ry[1..], self.eval_vars_at_ry, &self.proof_eval_vars_at_ry, + &self.mipp_proof, + &self.t, ); timer_verification.stop(); @@ -382,7 +384,10 @@ impl R1CSProof { transcript: &mut PoseidonTranscript, gens: &R1CSGens, ) -> Result { - // self.comm.append_to_poseidon(transcript); + // serialise and add the IPP commitment to the transcript + let mut bytes = Vec::new(); + self.t.serialize(&mut bytes).unwrap(); + transcript.append_bytes(&bytes); let c = transcript.challenge_scalar(); diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 17be7997..74056d33 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,29 +1,43 @@ -use ark_bls12_377::{Bls12_377 as I, G1Affine}; +use ark_bls12_377::{Bls12_377 as I, G1Affine, G2Affine}; use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; -use ark_ff::{One, PrimeField}; +use ark_ff::{BigInteger256, One, PrimeField}; use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, MultilinearPC, }; +use ark_serialize::CanonicalSerialize; use rayon::prelude::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, }; +use snarkpack::mipp::MippProof; use super::scalar::Scalar; -use crate::{dense_mlpoly::DensePolynomial, math::Math, timer::Timer}; +use crate::{ + dense_mlpoly::DensePolynomial, + math::Math, + poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}, + timer::Timer, + transcript, +}; -pub struct PolyList { +pub struct Polynomial { m: usize, polys: Vec, + q: Option, + chis_b: Option>, } -impl PolyList { +impl Polynomial { // Given the evaluations over the boolean hypercube of a polynomial p of size // 2*m compute the sqrt-sized polynomials p_i as // p_i(Y) = \sum_{j \in \{0,1\}^m} p(j, i) * chi_j(Y) // where p(X,Y) = \sum_{i \in \{0,\1}^m} chi_i(X) * p_i(Y) - pub fn new(Z: &[Scalar]) -> Self { + // + // TODO: add case when the length of the list is not an even power of 2 + pub fn from_evaluations(Z: &[Scalar]) -> Self { let pl_timer = Timer::new("poly_list_build"); + // check the evaluation list is a power of 2 + assert!(Z.len() & (Z.len() - 1) == 0); let m = Z.len().log_2() / 2; let pow_m = 2_usize.pow(m as u32); let polys: Vec = (0..pow_m) @@ -38,23 +52,27 @@ impl PolyList { .collect(); assert!(polys.len() == pow_m); pl_timer.stop(); - Self { m, polys } + Self { + m, + polys, + q: None, + chis_b: None, + } } // Given point = (\vec{a}, \vec{b}), compute the polynomial q as // q(Y) = - // \sum_{j \in \{0,1\}^m}(\sum_{i \in \{0,1\}^m} p(j,i) * chi_i(a)) * chi_j(Y) + // \sum_{j \in \{0,1\}^m}(\sum_{i \in \{0,1\}^m} p(j,i) * chi_i(b)) * chi_j(Y) // and p(a,b) = q(b) where p is the initial polynomial - - pub fn get_q(&self, point: &[Scalar]) -> DensePolynomial { + fn get_q(&mut self, point: &[Scalar]) { let q_timer = Timer::new("build_q"); assert!(point.len() == 2 * self.m); - let a = &point[0..self.m]; + let b = &point[self.m..2 * self.m]; let pow_m = 2_usize.pow(self.m as u32); let chis: Vec = (0..pow_m) .into_par_iter() - .map(|i| Self::get_chi_i(a, i)) + .map(|i| Self::get_chi_i(b, i)) .collect(); let z_q: Vec = (0..pow_m) @@ -63,33 +81,36 @@ impl PolyList { .collect(); q_timer.stop(); - DensePolynomial::new(z_q) + self.q = Some(DensePolynomial::new(z_q)); + self.chis_b = Some(chis); } - // Given point = (\vec{a}, \vec{b}) used to construct q - // compute q(b) = p(a,b). - pub fn eval_q(q: DensePolynomial, point: &[Scalar]) -> Scalar { + // Given point = (\vec{a}, \vec{b}) evaluate the polynomial as + // + pub fn eval(&mut self, point: &[Scalar]) -> Scalar { + let a = &point[0..point.len() / 2]; let b = &point[point.len() / 2..point.len()]; + if self.q.is_none() { + self.get_q(point); + } + let q = self.q.clone().unwrap(); let prods = (0..q.Z.len()) .into_par_iter() - .map(|j| q.Z[j] * PolyList::get_chi_i(b, j)); + .map(|j| q.Z[j] * Polynomial::get_chi_i(&a, j)); prods.sum() } - pub fn commit( - poly_list: &PolyList, - ck: &CommitterKey, - ) -> (Vec>, ::Fqk) { + pub fn commit(&self, ck: &CommitterKey) -> (Vec>, ::Fqk) { let timer_commit = Timer::new("sqrt_commit"); let timer_list = Timer::new("comm_list"); // commit to each of the sqrt sized p_i - let comm_list: Vec> = poly_list + let comm_list: Vec> = self .polys .par_iter() - .map(|p| MultilinearPC::::commit(&ck.clone(), p)) + .map(|p| MultilinearPC::::commit(&ck, p)) .collect(); timer_list.stop(); @@ -108,7 +129,7 @@ impl PolyList { ) .collect(); - // computer the IPP commitment + // compute the IPP commitment let t = I::product_of_pairings(pairings.iter()); ipp_timer.stop(); @@ -122,7 +143,7 @@ impl PolyList { let mut prod = Scalar::one(); for j in 0..m { let b_j = b[j]; - if i >> j & 1 == 1 { + if i >> (m - j - 1) & 1 == 1 { prod = prod * b_j; } else { prod = prod * (Scalar::one() - b_j) @@ -131,79 +152,99 @@ impl PolyList { prod } - pub fn open_q( + pub fn open( + &mut self, + transcript: &mut PoseidonTranscript, comm_list: Vec>, ck: &CommitterKey, - q: &DensePolynomial, point: &[Scalar], - ) -> (Commitment, Proof) { + t: &::Fqk, + ) -> (Commitment, Proof, MippProof) { let m = point.len() / 2; let a = &point[0..m]; - let b = &point[m..2 * m]; + + if self.q.is_none() { + self.get_q(point); + } + + let q = self.q.clone().unwrap(); let timer_open = Timer::new("sqrt_open"); // Compute the PST commitment to q obtained as the inner products of the // commitments to the polynomials p_i and chi_i(a) for i ranging over the // boolean hypercube of size m. - let m = a.len(); - let pow_m = 2_usize.pow(m as u32); let timer_msm = Timer::new("msm"); - let chis: Vec<_> = (0..pow_m) - .into_par_iter() - .map(|i| Self::get_chi_i(a, i).into_repr()) - .collect(); - assert!(chis.len() == comm_list.len()); - - let c_u = VariableBaseMSM::multi_scalar_mul( - comm_list - .par_iter() - .map(|c| c.g_product) - .collect::>() - .as_slice(), - chis.as_slice(), - ) - .into_affine(); + if self.chis_b.is_none() { + panic!("chis(b) should have been computed for q"); + } + let chis = self.chis_b.clone().unwrap(); + let chis_repr: Vec = chis.par_iter().map(|y| y.into_repr()).collect(); + assert!(chis_repr.len() == comm_list.len()); + + let a_vec: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); + + let c_u = + VariableBaseMSM::multi_scalar_mul(a_vec.as_slice(), chis_repr.as_slice()).into_affine(); + timer_msm.stop(); let U: Commitment = Commitment { nv: q.num_vars, g_product: c_u, }; - timer_msm.stop(); - - let comm = MultilinearPC::::commit(ck, q); + let comm = MultilinearPC::::commit(ck, &q); assert!(c_u == comm.g_product); + let h_vec = ck.powers_of_h[0].clone(); - // TODO: MIPP proof that U is the inner product of the opening - // vector A to T and the vector y - - // PST proof for opening q at b - let timer_proof = Timer::new("open"); - let pst_proof = MultilinearPC::::open(ck, q, &b); + // MIPP proof that U is the inner product of the vector A + // and the vector y, where A is the opening vector to T + let timer_mipp_proof = Timer::new("mipp_prove"); + let mipp_proof = + MippProof::::prove::(transcript, ck, a_vec, chis, h_vec, &c_u, t) + .unwrap(); + timer_mipp_proof.stop(); + + // PST proof for opening q at a + let timer_proof = Timer::new("pst_open"); + let mut a_rev = a.to_vec().clone(); + a_rev.reverse(); + let pst_proof = MultilinearPC::::open(ck, &q, &a_rev); timer_proof.stop(); timer_open.stop(); - - // TODO: add MIPP proof as return value - (U, pst_proof) + (U, pst_proof, mipp_proof) } - pub fn verify_q( + pub fn verify( + transcript: &mut PoseidonTranscript, vk: &VerifierKey, U: &Commitment, point: &[Scalar], v: Scalar, pst_proof: &Proof, - // TODO: add MIPP proof as argument + mipp_proof: &MippProof, + T: &::Fqk, ) -> bool { - // TODO: MIPP verification - let len = point.len(); + let a = &point[0..len / 2]; let b = &point[len / 2..len]; - - let timer_verify = Timer::new("sqrt_verify"); - let res = MultilinearPC::::check(vk, U, b, v, pst_proof); - timer_verify.stop(); + let timer_mipp_verify = Timer::new("mipp_verify"); + let res_mipp = MippProof::::verify::( + vk, + transcript, + mipp_proof, + b.to_vec(), + &U.g_product, + T, + ); + assert!(res_mipp == true); + timer_mipp_verify.stop(); + + let mut a_rev = a.to_vec().clone(); + a_rev.reverse(); + let timer_pst_verify = Timer::new("pst_verify"); + let res = MultilinearPC::::check(vk, U, &a_rev, v, pst_proof); + timer_pst_verify.stop(); res } } @@ -212,6 +253,8 @@ impl PolyList { mod tests { use std::clone; + use crate::parameters::poseidon_params; + use super::*; use ark_ff::Zero; use ark_std::UniformRand; @@ -232,11 +275,8 @@ mod tests { let p = DensePolynomial::new(Z.clone()); let res1 = p.evaluate(&r); - let mut r_new = r.to_vec(); - r_new.reverse(); - let pl = PolyList::new(&Z.clone()); - let q = pl.get_q(&r_new); - let res2 = PolyList::eval_q(q.clone(), &r_new); + let mut pl = Polynomial::from_evaluations(&Z.clone()); + let res2 = pl.eval(&r); assert!(res1 == res2); } @@ -244,7 +284,7 @@ mod tests { #[test] fn check_new_poly_commit() { let mut rng = ark_std::test_rng(); - let num_vars = 26; + let num_vars = 4; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len) .into_iter() @@ -255,19 +295,32 @@ mod tests { .map(|_| Scalar::rand(&mut rng)) .collect(); - let gens = MultilinearPC::::setup(13, &mut rng); - let (ck, vk) = MultilinearPC::::trim(&gens, 13); + let gens = MultilinearPC::::setup(2, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&gens, 2); + + let mut pl = Polynomial::from_evaluations(&Z.clone()); + + let v = pl.eval(&r); - let pl = PolyList::new(&Z.clone()); - let q = pl.get_q(&r); + let (comm_list, t) = pl.commit(&ck); - let v = PolyList::eval_q(q.clone(), &r); + let params = poseidon_params(); + let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (comm_list, t) = PolyList::commit(&pl, &ck); + let (u, pst_proof, mipp_proof) = pl.open(&mut prover_transcript, comm_list, &ck, &r, &t); - let (u, pst_proof) = PolyList::open_q(comm_list, &ck, &q, &r); + let mut verifier_transcript = PoseidonTranscript::new(¶ms); - let res = PolyList::verify_q(&vk, &u, &r, v, &pst_proof); + let res = Polynomial::verify( + &mut verifier_transcript, + &vk, + &u, + &r, + v, + &pst_proof, + &mipp_proof, + &t, + ); assert!(res == true); } } From dc5bcc8aa3ffbacb7ba3532690bca8d549123918 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Mon, 6 Feb 2023 16:52:32 +0000 Subject: [PATCH 08/64] update mipp with latestest optimisations and add preliminary documentation --- src/mipp.rs | 223 ++++++++++++++++++++++++++++------------------------ 1 file changed, 121 insertions(+), 102 deletions(-) diff --git a/src/mipp.rs b/src/mipp.rs index a31676a2..e6108f8e 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -15,7 +15,7 @@ use ark_std::Zero; use rayon::iter::ParallelIterator; use rayon::prelude::IntoParallelIterator; use rayon::prelude::*; -use std::ops::{Add, Mul, MulAssign, SubAssign}; +use std::ops::{Add, AddAssign, Mul, MulAssign, SubAssign}; use thiserror::Error; #[derive(Debug, Clone, CanonicalDeserialize, CanonicalSerialize)] @@ -37,35 +37,35 @@ impl MippProof { U: &E::G1Affine, T: &::Fqk, ) -> Result, Error> { - // the values of vectors C and bits rescaled at each step of the loop - // these are A and y + // the values of vectors A and y rescaled at each step of the loop let (mut m_a, mut m_y) = (a.clone(), y.clone()); - // the values of the commitment keys rescaled at each step of the loop - // these are the h for me + // the values of the commitment keys h for the vector A rescaled at + // each step of the loop let mut m_h = h.clone(); - // storing the values for including in the proof - // these are T_l and T_r + // storing the cross commitments for including in the proofs let mut comms_t = Vec::new(); - // these are U_l and U_r let mut comms_u = Vec::new(); - // these are the x-es + + // the transcript challenges let mut xs: Vec = Vec::new(); let mut xs_inv: Vec = Vec::new(); - // we already appended t + // we append only the MIPP because the aggregated commitment T has been + // appended already transcript.append(b"U", U); + while m_a.len() > 1 { // recursive step // Recurse with problem of half size let split = m_a.len() / 2; - // MIPP /// - // c[:n'] c[n':] + // MIPP where n' = split/// + // a[:n'] a[n':] let (a_l, a_r) = m_a.split_at_mut(split); - // r[:n'] r[:n'] + // y[:n'] y[n':] let (y_l, y_r) = m_y.split_at_mut(split); - + // h[:n'] y[n':] let (h_l, h_r) = m_h.split_at_mut(split); // since we do this in parallel we take reference first so it can be @@ -76,19 +76,19 @@ impl MippProof { // See section 3.3 for paper version with equivalent names try_par! { // MIPP part - // Compute cross commitment C^r - // z_l = c[n':] ^ r[:n'] + // Compute cross commitments + // u_l = a[n':] ^ y[:n'] // TODO to replace by bitsf_multiexp let comm_u_l = multiexponentiation(ra_l, &ry_r), - // Z_r = c[:n'] ^ r[n':] + // u_r = a[:n'] ^ y[n':] let comm_u_r = multiexponentiation(ra_r, &ry_l) }; - // Compute C commitment over the distinct halfs of C - // u_l = c[n':] * v[:n'] - let comm_t_l = pairings_product::(&ra_l, rh_r); - // u_r = c[:n'] * v[n':] - let comm_t_r = pairings_product::(&ra_r, rh_l); + // Compute the cross pairing products over the distinct halfs of A + // t_l = a[n':] * h[:n'] + let comm_t_l = pairings_product::(&a_l, h_r); + // t_r = a[:n'] * h[n':] + let comm_t_r = pairings_product::(&a_r, h_l); // Fiat-Shamir challenge transcript.append(b"comm_u_l", &comm_u_l); @@ -98,16 +98,16 @@ impl MippProof { let c_inv = transcript.challenge_scalar::(b"challenge_i"); // Optimization for multiexponentiation to rescale G2 elements with - // 128-bit challenge Swap 'c' and 'c_inv' since can't control bit size - // of c_inv + // 128-bit challenge Swap 'c' and 'c_inv' since we + // can't control bit size of c_inv let c = c_inv.inverse().unwrap(); // Set up values for next step of recursion - // c[:n'] + c[n':]^x + // a[n':] + a[:n']^x compress(&mut m_a, split, &c); + // y[n':] + y[:n']^x_inv compress_field(&mut m_y, split, &c_inv); - - // v_left + v_right^x^-1 + // h[n':] + h[:n']^x_inv compress(&mut m_h, split, &c_inv); comms_t.push((comm_t_l, comm_t_r)); @@ -121,16 +121,15 @@ impl MippProof { let final_a = m_a[0]; let final_h = m_h[0]; - // println!("before evaluations"); - // get polynomial + // get polynomial f_h let poly = DenseMultilinearExtension::::from_evaluations_vec( xs_inv.len(), Self::polynomial_evaluations_from_transcript::(&xs_inv), ); let c = MultilinearPC::::commit_g2(ck, &poly); - assert!(c.h_product == final_h); + debug_assert!(c.h_product == final_h); - // create proof that h is indeed correct + // create proof that final_h is well-formed let mut point: Vec = (0..poly.num_vars) .into_iter() .map(|_| transcript.challenge_scalar::(b"random_point")) @@ -138,16 +137,16 @@ impl MippProof { let pst_proof_h = MultilinearPC::::open_g1(ck, &poly, &point); - println!("PROVER: last challenge {}", xs.last().unwrap()); - println!("PROVER: last y {}", m_y.last().unwrap()); - println!("PROVER: last final c {:?}", m_a.last().unwrap()); + // println!("PROVER: last challenge {}", xs.last().unwrap()); + // println!("PROVER: last y {}", m_y.last().unwrap()); + // println!("PROVER: last final c {:?}", m_a.last().unwrap()); Ok( (MippProof { comms_t, comms_u, final_a, - final_h: final_h, + final_h, pst_proof_h, }), ) @@ -186,11 +185,16 @@ impl MippProof { let mut xs = Vec::new(); let mut xs_inv = Vec::new(); let mut final_y = E::Fr::one(); - let mut u_prime = U.clone().into_projective(); - let mut t_prime = T.clone(); + + let mut final_res = MippTU { + tc: T.clone(), + uc: U.into_projective(), + }; transcript.append(b"U", U); - assert!(comms_u.len() == point.len()); + + // Challenges need to be generated first in sequential order so the + // prover and the verifier have a consistent view of the transcript for (i, (comm_u, comm_t)) in comms_u.iter().zip(comms_t.iter()).enumerate() { let (comm_u_l, comm_u_r) = comm_u; let (comm_t_l, comm_t_r) = comm_t; @@ -205,36 +209,58 @@ impl MippProof { let c = c_inv.inverse().unwrap(); xs.push(c); xs_inv.push(c_inv); - } - let len = point.len(); - let final_y: E::Fr = (0..len) - .into_par_iter() - .map(|i| E::Fr::one() + xs_inv[i].mul(point[i]) - point[i]) - .product(); + // the verifier computes the final_y by themselves given + // it's field operations it is quite fast and parallelisation + // doesn't bring much improvement + final_y *= E::Fr::one() + c_inv.mul(point[i]) - point[i]; + } + enum Op<'a, E: PairingEngine> { + TC(&'a E::Fqk, ::BigInt), + UC(&'a E::G1Affine, ::BigInt), + } - u_prime += (0..len) - .into_iter() - .map(|i| { - let (comm_u_l, comm_u_r) = comms_u[i]; - comm_u_l.into_projective().mul(xs_inv[i].into_repr()) - + comm_u_r.into_projective().mul(xs[i].into_repr()) + let res = comms_t + .par_iter() + .zip(comms_u.par_iter()) + .zip(xs.par_iter().zip(xs_inv.par_iter())) + .flat_map(|((comm_t, comm_u), (c, c_inv))| { + let (comm_t_l, comm_t_r) = comm_t; + let (comm_u_l, comm_u_r) = comm_u; + + let c_repr = c.into_repr(); + let c_inv_repr = c_inv.into_repr(); + + // we multiple left side by x^-1 and right side by x + vec![ + Op::TC::(comm_t_l, c_inv_repr), + Op::TC(comm_t_r, c_repr), + Op::UC(comm_u_l, c_inv_repr), + Op::UC(comm_u_r, c_repr), + ] }) - .sum::(); - - t_prime *= (0..len) - .into_par_iter() - .map(|i| { - let (comm_t_l, comm_t_r) = comms_t[i]; - comm_t_l.pow(xs_inv[i].into_repr()) * comm_t_r.pow(xs[i].into_repr()) + .fold(MippTU::::default, |mut res, op: Op| { + match op { + Op::TC(tx, c) => { + let tx: E::Fqk = tx.pow(c); + res.tc.mul_assign(&tx); + } + Op::UC(zx, c) => { + let uxp: E::G1Projective = zx.mul(c); + res.uc.add_assign(&uxp); + } + } + res }) - .product::(); + .reduce(MippTU::default, |mut acc_res, res| { + acc_res.merge(&res); + acc_res + }); - println!("VERIFIER: last challenge {}", xs.last().unwrap()); - println!("VERIFIER: last y {}", final_y); - println!("VERIFIER: last final c from prover {:?}", proof.final_a); + // the initial values of T and U are merged to get the final result + let ref_final_res = &mut final_res; + ref_final_res.merge(&res); - // compute structured polynomial h at a random point let mut point: Vec = Vec::new(); let m = xs_inv.len(); for i in 0..m { @@ -246,26 +272,52 @@ impl MippProof { .map(|i| E::Fr::one() + point[i].mul(xs_inv[m - i - 1]) - point[i]) .product(); - // println!("VERIFIER: v is {}", v); - let comm_h = Commitment_G2 { nv: m, h_product: proof.final_h, }; let check_h = MultilinearPC::::check_2(vk, &comm_h, &point, v, &proof.pst_proof_h); - assert!(check_h == true); let final_u = proof.final_a.mul(final_y); let final_t: ::Fqk = E::pairing(proof.final_a, proof.final_h); - let check_t = t_prime == final_t; - assert!(check_t == true); - let check_u = u_prime == final_u; - assert!(check_u == true); + let check_t = ref_final_res.tc == final_t; + + let check_u = ref_final_res.uc == final_u; + check_h & check_u & check_t } } +/// MippTU keeps track of the variables that have been sent by the prover and +/// must be multiplied together by the verifier. +struct MippTU { + pub tc: E::Fqk, + pub uc: E::G1Projective, +} + +impl Default for MippTU +where + E: PairingEngine, +{ + fn default() -> Self { + Self { + tc: E::Fqk::one(), + uc: E::G1Projective::zero(), + } + } +} + +impl MippTU +where + E: PairingEngine, +{ + fn merge(&mut self, other: &Self) { + self.tc.mul_assign(&other.tc); + self.uc.add_assign(&other.uc); + } +} + /// compress is similar to commit::{V,W}KEY::compress: it modifies the `vec` /// vector by setting the value at index $i:0 -> split$ $vec[i] = vec[i] + /// vec[i+split]^scaler$. The `vec` vector is half of its size after this call. @@ -360,36 +412,3 @@ pub trait Transcript { fn append(&mut self, label: &'static [u8], point: &S); fn challenge_scalar(&mut self, label: &'static [u8]) -> F; } - -#[cfg(test)] -mod tests { - use ark_bls12_377::{Bls12_377 as E, Fr}; - use ark_ec::PairingEngine; - use ark_poly::DenseMultilinearExtension; - use ark_poly_commit::multilinear_pc::MultilinearPC; - use ark_std::{test_rng, UniformRand}; - #[test] - fn test_setup() { - let mut rng = test_rng(); - let params = MultilinearPC::::setup(2, &mut rng); - // list of evaluation for polynomial - // 1 + 2*x_1 + x_2 + x_1x_2 - let evals_1 = vec![ - Fr::from(1u64), - Fr::from(4u64), - Fr::from(2u64), - Fr::from(5u64), - ]; - let poly_1 = DenseMultilinearExtension::::from_evaluations_vec(2, evals_1); - - // list of evaluation for polynomial - // 1 + x_1 + x_2 + 2*x_1x_2 - let evals_2 = vec![ - Fr::from(1u64), - Fr::from(2u64), - Fr::from(2u64), - Fr::from(5u64), - ]; - let poly_2 = DenseMultilinearExtension::::from_evaluations_vec(2, evals_2); - } -} From 8fc381ec57c16de50e87ed891a30c216d29079c7 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Tue, 7 Feb 2023 10:58:18 +0000 Subject: [PATCH 09/64] improve codebase documentation --- benches/r1cs.rs | 10 +------ src/mipp.rs | 69 ++++++++++++++++++++++++++++-------------------- src/r1csproof.rs | 11 ++++---- src/sqrt_pst.rs | 22 ++++++++------- 4 files changed, 59 insertions(+), 53 deletions(-) diff --git a/benches/r1cs.rs b/benches/r1cs.rs index 5852c3f8..ccbc2493 100644 --- a/benches/r1cs.rs +++ b/benches/r1cs.rs @@ -11,7 +11,6 @@ struct BenchmarkResults { power: usize, input_constraints: usize, spartan_verifier_circuit_constraints: usize, - r1cs_instance_generation_time: u128, spartan_proving_time: u128, groth16_setup_time: u128, groth16_proving_time: u128, @@ -21,13 +20,7 @@ struct BenchmarkResults { fn main() { let mut writer = csv::Writer::from_path("testudo.csv").expect("unable to open csv writer"); - // for &s in [ - // 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - // ] - // .iter() - // For testing purposes we currently bench on very small instance to ensure - // correctness and then on biggest one for timings. - for &s in [4, 26].iter() { + for &s in [10, 12, 14, 16, 18, 20, 22, 24, 26].iter() { println!("Running for {} inputs", s); let mut br = BenchmarkResults::default(); let num_vars = (2_usize).pow(s as u32); @@ -39,7 +32,6 @@ fn main() { let start = Instant::now(); let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); let duration = start.elapsed().as_millis(); - br.r1cs_instance_generation_time = duration; let mut prover_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); let gens = NIZKGens::new(num_cons, num_vars, num_inputs); diff --git a/src/mipp.rs b/src/mipp.rs index e6108f8e..ca092b2e 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -121,7 +121,8 @@ impl MippProof { let final_a = m_a[0]; let final_h = m_h[0]; - // get polynomial f_h + // get the structured polynomial f_h for which final_h = h^f_h(vec{t}) + // is the PST commitment given generator h and toxic waste t let poly = DenseMultilinearExtension::::from_evaluations_vec( xs_inv.len(), Self::polynomial_evaluations_from_transcript::(&xs_inv), @@ -129,38 +130,39 @@ impl MippProof { let c = MultilinearPC::::commit_g2(ck, &poly); debug_assert!(c.h_product == final_h); - // create proof that final_h is well-formed - let mut point: Vec = (0..poly.num_vars) + // generate a proof of opening final_h at a random point + let rs: Vec = (0..poly.num_vars) .into_iter() .map(|_| transcript.challenge_scalar::(b"random_point")) .collect(); - let pst_proof_h = MultilinearPC::::open_g1(ck, &poly, &point); + let pst_proof_h = MultilinearPC::::open_g1(ck, &poly, &rs); - // println!("PROVER: last challenge {}", xs.last().unwrap()); - // println!("PROVER: last y {}", m_y.last().unwrap()); - // println!("PROVER: last final c {:?}", m_a.last().unwrap()); - - Ok( - (MippProof { - comms_t, - comms_u, - final_a, - final_h, - pst_proof_h, - }), - ) + Ok(MippProof { + comms_t, + comms_u, + final_a, + final_h, + pst_proof_h, + }) } + // builds the polynomial f_h in Lagrange basis which uses the + // inverses of transcript challenges this is the following + // structured polynomial $\prod_i(1 - z_i + cs_inv[m - i - 1] * z_i)$ + // where m is the length of cs_inv and z_i is the unknown fn polynomial_evaluations_from_transcript(cs_inv: &[F]) -> Vec { let m = cs_inv.len(); let pow_m = 2_usize.pow(m as u32); + // Constructs the list of evaluations over the boolean hypercube let evals = (0..pow_m) .into_par_iter() .map(|i| { let mut res = F::one(); for j in 0..m { + // We iterate (m - 1)th bit to 0th bit and, in case the bit is 1 + // we multiply by the corresponding challenge. if (i >> j) & 1 == 1 { res *= cs_inv[m - j - 1]; } @@ -204,17 +206,24 @@ impl MippProof { transcript.append(b"comm_u_r", comm_u_r); transcript.append(b"comm_t_l", comm_t_l); transcript.append(b"comm_t_r", comm_t_r); - let c_inv = transcript.challenge_scalar::(b"challenge_i"); + let c_inv = transcript.challenge_scalar::(b"challenge_i"); let c = c_inv.inverse().unwrap(); + xs.push(c); xs_inv.push(c_inv); // the verifier computes the final_y by themselves given - // it's field operations it is quite fast and parallelisation + // it's field operations so quite fast and parallelisation // doesn't bring much improvement + // TODO: look into alternatives final_y *= E::Fr::one() + c_inv.mul(point[i]) - point[i]; } + + // First, each entry of T and U are multiplied independently by their + // respective challenges which is done in parralel and, at the end, + // the results are merged together for each vector following their + // corresponding merge operation. enum Op<'a, E: PairingEngine> { TC(&'a E::Fqk, ::BigInt), UC(&'a E::G1Affine, ::BigInt), @@ -257,26 +266,30 @@ impl MippProof { acc_res }); - // the initial values of T and U are merged to get the final result + // the initial values of T and U are also merged to get the final result let ref_final_res = &mut final_res; ref_final_res.merge(&res); - let mut point: Vec = Vec::new(); + let mut rs: Vec = Vec::new(); let m = xs_inv.len(); for i in 0..m { - let r = transcript.challenge_scalar::(b"random_point"); - point.push(r); + let r = transcript.challenge_scalar::(b"random_rs"); + rs.push(r); } + + // Given f_h is structured, the verifier can compute it's evaluation at + // the random point point in O(m) time by themselves and use a PST + // verification to ensure final_h is well formed. let v = (0..m) .into_par_iter() - .map(|i| E::Fr::one() + point[i].mul(xs_inv[m - i - 1]) - point[i]) + .map(|i| E::Fr::one() + rs[i].mul(xs_inv[m - i - 1]) - rs[i]) .product(); let comm_h = Commitment_G2 { nv: m, h_product: proof.final_h, }; - let check_h = MultilinearPC::::check_2(vk, &comm_h, &point, v, &proof.pst_proof_h); + let check_h = MultilinearPC::::check_2(vk, &comm_h, &rs, v, &proof.pst_proof_h); let final_u = proof.final_a.mul(final_y); let final_t: ::Fqk = E::pairing(proof.final_a, proof.final_h); @@ -318,9 +331,9 @@ where } } -/// compress is similar to commit::{V,W}KEY::compress: it modifies the `vec` -/// vector by setting the value at index $i:0 -> split$ $vec[i] = vec[i] + -/// vec[i+split]^scaler$. The `vec` vector is half of its size after this call. +/// compress modifies the `vec` vector by setting the value at +/// index $i:0 -> split$ $vec[i] = vec[i] + vec[i+split]^scaler$. +/// The `vec` vector is half of its size after this call. pub fn compress(vec: &mut Vec, split: usize, scaler: &C::ScalarField) { let (left, right) = vec.split_at_mut(split); left diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 9b046314..a36d1df4 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -1,7 +1,11 @@ #![allow(clippy::too_many_arguments)] +use super::commitments::MultiCommitGens; +use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; +use super::errors::ProofVerifyError; use crate::constraints::{VerifierCircuit, VerifierConfig}; use crate::group::{Fq, Fr}; use crate::math::Math; +use crate::mipp::MippProof; use crate::parameters::poseidon_params; use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; use crate::sqrt_pst::Polynomial; @@ -12,10 +16,6 @@ use ark_ec::PairingEngine; use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; use ark_poly_commit::multilinear_pc::MultilinearPC; -use crate::mipp::MippProof; -use super::commitments::MultiCommitGens; -use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; -use super::errors::ProofVerifyError; use super::r1csinstance::R1CSInstance; @@ -245,7 +245,6 @@ impl R1CSProof { // with the evaluation in ark-poly-commit so that reversing is not needed // anymore let timmer_opening = Timer::new("polyopening"); - timer_prove.stop(); let (comm, proof_eval_vars_at_ry, mipp_proof) = pl.open(transcript, comm_list, &gens.gens_pc.ck, &ry[1..], &t); @@ -259,7 +258,7 @@ impl R1CSProof { let timer_polyeval = Timer::new("polyeval"); let eval_vars_at_ry = pl.eval(&ry[1..]); timer_polyeval.stop(); - + timer_prove.stop(); ( R1CSProof { comm, diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 76fb9110..4d67121a 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -45,6 +45,8 @@ impl Polynomial { .map(|i| { let z: Vec = (0..pow_m) .into_par_iter() + // viewing the list of evaluation as a square matrix + // we select by row i and column j .map(|j| Z[(j << m) | i]) .collect(); DensePolynomial::new(z) @@ -139,11 +141,13 @@ impl Polynomial { (comm_list, t) } + // computes \chi_i(\vec{b}) = \prod_{i_j = 0}(1 - b_j)\prod_{i_j = 1}(b_j) pub fn get_chi_i(b: &[Scalar], i: usize) -> Scalar { let m = b.len(); let mut prod = Scalar::one(); for j in 0..m { let b_j = b[j]; + // iterate from msb to lsb of i to build chi_i as defined above if i >> (m - j - 1) & 1 == 1 { prod = prod * b_j; } else { @@ -163,13 +167,6 @@ impl Polynomial { ) -> (Commitment, Proof, MippProof) { let m = point.len() / 2; let a = &point[0..m]; - - if self.q.is_none() { - self.get_q(point); - } - - let q = self.q.clone().unwrap(); - if self.q.is_none() { self.get_q(point); } @@ -204,18 +201,19 @@ impl Polynomial { debug_assert!(c_u == comm.g_product); let h_vec = ck.powers_of_h[0].clone(); - // MIPP proof that U is the inner product of the vector A - // and the vector y, where A is the opening vector to T + // construct MIPP proof that U is the inner product of the vector A + // and the vector y, where A is the opening vector to T let timer_mipp_proof = Timer::new("mipp_prove"); let mipp_proof = MippProof::::prove::(transcript, ck, a_vec, chis, h_vec, &c_u, t) .unwrap(); timer_mipp_proof.stop(); - // PST proof for opening q at a let timer_proof = Timer::new("pst_open"); let mut a_rev = a.to_vec().clone(); a_rev.reverse(); + + // construct PST proof for opening q at a let pst_proof = MultilinearPC::::open(ck, &q, &a_rev); timer_proof.stop(); @@ -236,7 +234,9 @@ impl Polynomial { let len = point.len(); let a = &point[0..len / 2]; let b = &point[len / 2..len]; + let timer_mipp_verify = Timer::new("mipp_verify"); + // verify that U = A^y where A is the opening vector of T let res_mipp = MippProof::::verify::( vk, transcript, @@ -251,6 +251,8 @@ impl Polynomial { let mut a_rev = a.to_vec().clone(); a_rev.reverse(); let timer_pst_verify = Timer::new("pst_verify"); + + // verify that q(a) is indeed v let res = MultilinearPC::::check(vk, U, &a_rev, v, pst_proof); timer_pst_verify.stop(); res From 688cfff5417ad59d75b15d8b7264f089467c2988 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Tue, 7 Feb 2023 12:24:01 +0000 Subject: [PATCH 10/64] remove unused imports and apply cargo fix changes --- benches/r1cs.rs | 2 +- src/dense_mlpoly.rs | 4 ++-- src/mipp.rs | 13 ++++++------- src/poseidon_transcript.rs | 6 +++--- src/r1csproof.rs | 5 ++--- src/sqrt_pst.rs | 17 ++++++++--------- 6 files changed, 22 insertions(+), 25 deletions(-) diff --git a/benches/r1cs.rs b/benches/r1cs.rs index ccbc2493..bab38bf4 100644 --- a/benches/r1cs.rs +++ b/benches/r1cs.rs @@ -31,7 +31,7 @@ fn main() { let start = Instant::now(); let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - let duration = start.elapsed().as_millis(); + let _duration = start.elapsed().as_millis(); let mut prover_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); let gens = NIZKGens::new(num_cons, num_vars, num_inputs); diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 1b302469..6828c491 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -1,7 +1,7 @@ #![allow(clippy::too_many_arguments)] use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; -use crate::timer::Timer; + use super::commitments::{Commitments, MultiCommitGens}; use super::errors::ProofVerifyError; @@ -589,7 +589,7 @@ impl PolyEvalProof { #[cfg(test)] mod tests { - use std::num; + use crate::parameters::poseidon_params; diff --git a/src/mipp.rs b/src/mipp.rs index ca092b2e..26e4929a 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -1,11 +1,10 @@ -use super::macros::*; use ark_ec::msm::VariableBaseMSM; use ark_ec::ProjectiveCurve; use ark_ec::{AffineCurve, PairingEngine}; use ark_ff::{Field, PrimeField}; -use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; +use ark_poly::DenseMultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{ - Commitment_G2, CommitterKey, Proof, Proof_G1, VerifierKey, + Commitment_G2, CommitterKey, Proof_G1, VerifierKey, }; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; @@ -15,7 +14,7 @@ use ark_std::Zero; use rayon::iter::ParallelIterator; use rayon::prelude::IntoParallelIterator; use rayon::prelude::*; -use std::ops::{Add, AddAssign, Mul, MulAssign, SubAssign}; +use std::ops::{AddAssign, Mul, MulAssign}; use thiserror::Error; #[derive(Debug, Clone, CanonicalDeserialize, CanonicalSerialize)] @@ -35,7 +34,7 @@ impl MippProof { y: Vec, h: Vec, U: &E::G1Affine, - T: &::Fqk, + _T: &::Fqk, ) -> Result, Error> { // the values of vectors A and y rescaled at each step of the loop let (mut m_a, mut m_y) = (a.clone(), y.clone()); @@ -70,7 +69,7 @@ impl MippProof { // since we do this in parallel we take reference first so it can be // moved within the macro's rayon scope. - let (rh_l, rh_r) = (&h_l, &h_r); + let (_rh_l, _rh_r) = (&h_l, &h_r); let (ra_l, ra_r) = (&a_l, &a_r); let (ry_l, ry_r) = (&y_l, &y_r); // See section 3.3 for paper version with equivalent names @@ -272,7 +271,7 @@ impl MippProof { let mut rs: Vec = Vec::new(); let m = xs_inv.len(); - for i in 0..m { + for _i in 0..m { let r = transcript.challenge_scalar::(b"random_rs"); rs.push(r); } diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 7be88d78..8e8180ee 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -4,7 +4,7 @@ use super::scalar::Scalar; use crate::mipp::Transcript; use ark_bls12_377::{Bls12_377 as I, G1Affine}; use ark_ec::PairingEngine; -use ark_ff::{Field, PrimeField}; +use ark_ff::{PrimeField}; use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; use ark_sponge::{ @@ -23,13 +23,13 @@ impl Transcript for PoseidonTranscript { self.sponge.absorb(&b"testudo".to_vec()); } - fn append(&mut self, label: &'static [u8], point: &S) { + fn append(&mut self, _label: &'static [u8], point: &S) { let mut buf = Vec::new(); point.serialize(&mut buf).expect("serialization failed"); self.sponge.absorb(&buf); } - fn challenge_scalar(&mut self, label: &'static [u8]) -> F { + fn challenge_scalar(&mut self, _label: &'static [u8]) -> F { self.sponge.squeeze_field_elements(1).remove(0) } } diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 66a92684..f9f4569b 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -7,15 +7,14 @@ use crate::group::{Fq, Fr}; use crate::math::Math; use crate::mipp::MippProof; use crate::parameters::poseidon_params; -use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; +use crate::poseidon_transcript::PoseidonTranscript; use crate::sqrt_pst::Polynomial; use crate::sumcheck::SumcheckInstanceProof; use ark_bls12_377::Bls12_377 as I; use ark_bw6_761::BW6_761 as P; use ark_ec::PairingEngine; -use ark_poly::MultilinearExtension; + use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; -use ark_poly_commit::multilinear_pc::MultilinearPC; use super::r1csinstance::R1CSInstance; diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 4d67121a..bca4cc5b 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,12 +1,12 @@ use crate::mipp::MippProof; -use ark_bls12_377::{Bls12_377 as I, G1Affine, G2Affine}; +use ark_bls12_377::{Bls12_377 as I}; use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; use ark_ff::{BigInteger256, One, PrimeField}; use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, MultilinearPC, }; -use ark_serialize::CanonicalSerialize; + use rayon::prelude::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, }; @@ -15,9 +15,8 @@ use super::scalar::Scalar; use crate::{ dense_mlpoly::DensePolynomial, math::Math, - poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}, + poseidon_transcript::{PoseidonTranscript}, timer::Timer, - transcript, }; pub struct Polynomial { @@ -69,7 +68,7 @@ impl Polynomial { fn get_q(&mut self, point: &[Scalar]) { let q_timer = Timer::new("build_q"); debug_assert!(point.len() == 2 * self.m); - let a = &point[0..self.m]; + let _a = &point[0..self.m]; let b = &point[self.m..2 * self.m]; let pow_m = 2_usize.pow(self.m as u32); @@ -92,7 +91,7 @@ impl Polynomial { // compute q(b) = p(a,b). pub fn eval(&mut self, point: &[Scalar]) -> Scalar { let a = &point[0..point.len() / 2]; - let b = &point[point.len() / 2..point.len()]; + let _b = &point[point.len() / 2..point.len()]; if self.q.is_none() { self.get_q(point); } @@ -178,7 +177,7 @@ impl Polynomial { // Compute the PST commitment to q obtained as the inner products of the // commitments to the polynomials p_i and chi_i(a) for i ranging over the // boolean hypercube of size m. - let m = a.len(); + let _m = a.len(); let timer_msm = Timer::new("msm"); if self.chis_b.is_none() { panic!("chis(b) should have been computed for q"); @@ -261,12 +260,12 @@ impl Polynomial { #[cfg(test)] mod tests { - use std::clone; + use crate::parameters::poseidon_params; use super::*; - use ark_ff::Zero; + use ark_std::UniformRand; #[test] fn check_sqrt_poly_eval() { From 93e0c24b50bf710e918a5fd6ad99c6c945cedb3a Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 7 Feb 2023 13:53:18 +0100 Subject: [PATCH 11/64] passing v0.4 --- Cargo.toml | 31 ++++----- examples/cubic.rs | 18 ++--- profiler/nizk.rs | 4 +- profiler/snark.rs | 4 +- src/commitments.rs | 13 ++-- src/constraints.rs | 27 ++++---- src/dense_mlpoly.rs | 4 +- src/group.rs | 21 +++--- src/lib.rs | 32 +++++---- src/mipp.rs | 130 ++++++++++++++++++------------------- src/nizk/mod.rs | 31 +++++---- src/parameters.rs | 16 +++-- src/poseidon_transcript.rs | 30 +++++---- src/r1csinstance.rs | 2 +- src/r1csproof.rs | 36 +++++----- src/sparse_mlpoly.rs | 2 +- src/sqrt_pst.rs | 44 +++++++------ src/transcript.rs | 10 +-- 18 files changed, 233 insertions(+), 222 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index b5428e07..1082cc96 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,21 +27,19 @@ colored = "2.0.0" flate2 = "1.0.14" thiserror = "1.0" json = "0.12.4" -ark-ff = { version = "^0.3.0", default-features = false } -ark-ec = { version = "^0.3.0", default-features = false } -ark-std = { version = "^0.3.0"} -ark-bls12-377 = { version = "^0.3.0", features = ["r1cs","curve"] } -ark-serialize = { version = "^0.3.0", features = ["derive"] } -ark-sponge = { version = "^0.3.0" , features = ["r1cs"] } -ark-crypto-primitives = { version = "^0.3.0", default-features = true } -ark-r1cs-std = { version = "^0.3.0", default-features = false } -ark-nonnative-field = { version = "0.3.0", default-features = false } -ark-relations = { version = "^0.3.0", default-features = false, optional = true } -ark-snark = { version = "^0.3.0", default-features = false } +ark-ff = { version = "^0.4.0", default-features = false } +ark-ec = { version = "^0.4.0", default-features = false } +ark-std = { version = "^0.4.0"} +ark-bls12-377 = { version = "^0.4.0", features = ["r1cs","curve"] } +ark-serialize = { version = "^0.4.0", features = ["derive"] } +ark-crypto-primitives = {version = "^0.4.0", features = ["sponge","r1cs","snark"] } +ark-r1cs-std = { version = "^0.4.0", default-features = false } +ark-relations = { version = "^0.4.0", default-features = false, optional = true } +ark-snark = { version = "^0.4.0", default-features = false } ark-groth16 = { version = "^0.3.0", features = ["r1cs"] } -ark-bw6-761 = { version = "^0.3.0" } -ark-poly-commit = { version = "^0.3.0" } -ark-poly = {version = "^0.3.0"} +ark-bw6-761 = { version = "^0.4.0" } +ark-poly-commit = { version = "^0.4.0" } +ark-poly = {version = "^0.4.0"} lazy_static = "1.4.0" @@ -91,7 +89,6 @@ parallel = [ "std", "ark-ff/parallel", "ark-std/parallel", "ark-ec/parallel", "a std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-serialize/std"] [patch.crates-io] -ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/", rev = "a2a5ac491ae005ba2afd03fd21b7d3160d794a83"} -ark-poly-commit = {git = "https://github.com/maramihali/poly-commit", branch="pst_g2"} - +ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/pst_on_g2"} +ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", features = ["r1cs"] } diff --git a/examples/cubic.rs b/examples/cubic.rs index 6ccdbfc8..6d068d8a 100644 --- a/examples/cubic.rs +++ b/examples/cubic.rs @@ -38,7 +38,7 @@ fn produce_r1cs() -> ( let mut B: Vec<(usize, usize, Vec)> = Vec::new(); let mut C: Vec<(usize, usize, Vec)> = Vec::new(); - let one = Scalar::one().into_repr().to_bytes_le(); + let one = Scalar::one().into_bigint().to_bytes_le(); // R1CS is a set of three sparse matrices A B C, where is a row for every // constraint and a column for every entry in z = (vars, 1, inputs) @@ -67,7 +67,7 @@ fn produce_r1cs() -> ( // constraint 3 entries in (A,B,C) // constraint 3 is (Z3 + 5) * 1 - I0 = 0. A.push((3, 3, one.clone())); - A.push((3, num_vars, Scalar::from(5u32).into_repr().to_bytes_le())); + A.push((3, num_vars, Scalar::from(5u32).into_bigint().to_bytes_le())); B.push((3, num_vars, one.clone())); C.push((3, num_vars + 1, one)); @@ -82,16 +82,16 @@ fn produce_r1cs() -> ( let i0 = z3 + Scalar::from(5u32); // constraint 3 // create a VarsAssignment - let mut vars = vec![Scalar::zero().into_repr().to_bytes_le(); num_vars]; - vars[0] = z0.into_repr().to_bytes_le(); - vars[1] = z1.into_repr().to_bytes_le(); - vars[2] = z2.into_repr().to_bytes_le(); - vars[3] = z3.into_repr().to_bytes_le(); + let mut vars = vec![Scalar::zero().into_bigint().to_bytes_le(); num_vars]; + vars[0] = z0.into_bigint().to_bytes_le(); + vars[1] = z1.into_bigint().to_bytes_le(); + vars[2] = z2.into_bigint().to_bytes_le(); + vars[3] = z3.into_bigint().to_bytes_le(); let assignment_vars = VarsAssignment::new(&vars).unwrap(); // create an InputsAssignment - let mut inputs = vec![Scalar::zero().into_repr().to_bytes_le(); num_inputs]; - inputs[0] = i0.into_repr().to_bytes_le(); + let mut inputs = vec![Scalar::zero().into_bigint().to_bytes_le(); num_inputs]; + inputs[0] = i0.into_bigint().to_bytes_le(); let assignment_inputs = InputsAssignment::new(&inputs).unwrap(); // check if the instance we created is satisfiable diff --git a/profiler/nizk.rs b/profiler/nizk.rs index 941124bd..a5e50583 100644 --- a/profiler/nizk.rs +++ b/profiler/nizk.rs @@ -38,7 +38,9 @@ pub fn main() { let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); let mut proof_encoded = Vec::new(); - proof.serialize(&mut proof_encoded).unwrap(); + proof + .serialize_with_mode(&mut proof_encoded, Compress::Yes) + .unwrap(); let msg_proof_len = format!("NIZK::proof_compressed_len {:?}", proof_encoded.len()); print(&msg_proof_len); diff --git a/profiler/snark.rs b/profiler/snark.rs index 8327e3d7..c2b2b666 100644 --- a/profiler/snark.rs +++ b/profiler/snark.rs @@ -49,7 +49,9 @@ pub fn main() { ); let mut proof_encoded = Vec::new(); - proof.serialize(&mut proof_encoded).unwrap(); + proof + .serialize_with_mode(&mut proof_encoded, Compress::Yes) + .unwrap(); let msg_proof_len = format!("SNARK::proof_compressed_len {:?}", proof_encoded.len()); print(&msg_proof_len); diff --git a/src/commitments.rs b/src/commitments.rs index 2818ad34..bac7ea73 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -2,11 +2,12 @@ use super::group::{GroupElement, GroupElementAffine, VartimeMultiscalarMul, GROU use super::scalar::Scalar; use crate::group::CompressGroupElement; use crate::parameters::*; -use ark_ec::{AffineCurve, ProjectiveCurve}; +use ark_ec::{AffineRepr, CurveGroup}; use ark_ff::PrimeField; +use std::ops::Mul; -use ark_sponge::poseidon::PoseidonSponge; -use ark_sponge::CryptographicSponge; +use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; +use ark_crypto_primitives::sponge::CryptographicSponge; #[derive(Debug, Clone)] pub struct MultiCommitGens { @@ -29,7 +30,7 @@ impl MultiCommitGens { let uniform_bytes = sponge.squeeze_bytes(64); el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes); } - let el = el_aff.unwrap().mul_by_cofactor_to_projective(); + let el = el_aff.unwrap().clear_cofactor().into_group(); gens.push(el); } @@ -80,13 +81,13 @@ impl Commitments for Scalar { impl Commitments for Vec { fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement { assert_eq!(gens_n.n, self.len()); - GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr()) + GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind) } } impl Commitments for [Scalar] { fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement { assert_eq!(gens_n.n, self.len()); - GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind.into_repr()) + GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind) } } diff --git a/src/constraints.rs b/src/constraints.rs index 71436c7e..8e2deca4 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -10,9 +10,8 @@ use crate::{ use ark_bls12_377::{constraints::PairingVar as IV, Bls12_377 as I, Fr}; -use ark_crypto_primitives::{ - snark::BooleanInputVar, CircuitSpecificSetupSNARK, SNARKGadget, SNARK, -}; +use ark_crypto_primitives::snark::{BooleanInputVar, SNARKGadget}; +use ark_snark::{CircuitSpecificSetupSNARK, SNARK}; use ark_ff::{BitIteratorLE, PrimeField, Zero}; use ark_groth16::{ @@ -20,6 +19,10 @@ use ark_groth16::{ Groth16, PreparedVerifyingKey, Proof as GrothProof, }; +use ark_crypto_primitives::sponge::{ + constraints::CryptographicSpongeVar, + poseidon::{constraints::PoseidonSpongeVar, PoseidonConfig}, +}; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof, VerifierKey}; use ark_r1cs_std::{ alloc::{AllocVar, AllocationMode}, @@ -28,24 +31,16 @@ use ark_r1cs_std::{ R1CSVar, }; use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystemRef, Namespace, SynthesisError}; -use ark_sponge::{ - constraints::CryptographicSpongeVar, - poseidon::{constraints::PoseidonSpongeVar, PoseidonParameters}, -}; use rand::{CryptoRng, Rng}; pub struct PoseidonTranscripVar { pub cs: ConstraintSystemRef, pub sponge: PoseidonSpongeVar, - pub params: PoseidonParameters, + pub params: PoseidonConfig, } impl PoseidonTranscripVar { - fn new( - cs: ConstraintSystemRef, - params: &PoseidonParameters, - challenge: Option, - ) -> Self { + fn new(cs: ConstraintSystemRef, params: &PoseidonConfig, challenge: Option) -> Self { let mut sponge = PoseidonSpongeVar::new(cs.clone(), params); if let Some(c) = challenge { @@ -246,7 +241,7 @@ pub struct R1CSVerificationCircuit { pub input: Vec, pub input_as_sparse_poly: SparsePolynomial, pub evals: (Fr, Fr, Fr), - pub params: PoseidonParameters, + pub params: PoseidonConfig, pub prev_challenge: Fr, pub claims_phase2: (Scalar, Scalar, Scalar, Scalar), pub eval_vars_at_ry: Fr, @@ -412,7 +407,7 @@ pub struct VerifierConfig { pub input: Vec, pub input_as_sparse_poly: SparsePolynomial, pub evals: (Fr, Fr, Fr), - pub params: PoseidonParameters, + pub params: PoseidonConfig, pub prev_challenge: Fr, pub claims_phase2: (Fr, Fr, Fr, Fr), pub eval_vars_at_ry: Fr, @@ -465,7 +460,7 @@ impl ConstraintSynthesizer for VerifierCircuit { let bits = pubs .iter() .map(|c| { - let bits: Vec = BitIteratorLE::new(c.into_repr().as_ref().to_vec()).collect(); + let bits: Vec = BitIteratorLE::new(c.into_bigint().as_ref().to_vec()).collect(); Vec::new_witness(cs.clone(), || Ok(bits)) }) .collect::, _>>()?; diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 47517ebf..612d5c31 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -15,8 +15,8 @@ use super::random::RandomTape; use super::scalar::Scalar; use super::transcript::{AppendToTranscript, ProofTranscript}; use ark_bls12_377::{Bls12_377 as I, G1Affine}; -use ark_ec::msm::VariableBaseMSM; -use ark_ec::{PairingEngine, ProjectiveCurve}; +use ark_ec::scalar_mul::variable_base::VariableBaseMSM; +use ark_ec::{pairing::Pairing, CurveGroup}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; use ark_poly_commit::multilinear_pc::data_structures::{ diff --git a/src/group.rs b/src/group.rs index 8e0baa92..f8e13bf7 100644 --- a/src/group.rs +++ b/src/group.rs @@ -1,12 +1,12 @@ use crate::errors::ProofVerifyError; -use ark_ec::msm::VariableBaseMSM; +use ark_ec::scalar_mul::variable_base::VariableBaseMSM; +use ark_ec::Group; use ark_ff::PrimeField; - use lazy_static::lazy_static; use super::scalar::Scalar; -use ark_ec::ProjectiveCurve; +use ark_ec::CurveGroup; use ark_serialize::*; use core::borrow::Borrow; @@ -19,7 +19,7 @@ pub type Fr = ark_bls12_377::Fr; pub struct CompressedGroup(pub Vec); lazy_static! { - pub static ref GROUP_BASEPOINT: GroupElement = GroupElement::prime_subgroup_generator(); + pub static ref GROUP_BASEPOINT: GroupElement = GroupElement::generator(); } pub trait CompressGroupElement { @@ -37,14 +37,16 @@ pub trait UnpackGroupElement { impl CompressGroupElement for GroupElement { fn compress(&self) -> CompressedGroup { let mut point_encoding = Vec::new(); - self.serialize(&mut point_encoding).unwrap(); + self + .serialize_with_mode(&mut point_encoding, Compress::Yes) + .unwrap(); CompressedGroup(point_encoding) } } impl DecompressGroupElement for GroupElement { fn decompress(encoded: &CompressedGroup) -> Option { - let res = GroupElement::deserialize(&*encoded.0); + let res = GroupElement::deserialize_compressed(&*encoded.0); if let Ok(r) = res { Some(r) } else { @@ -67,14 +69,11 @@ pub trait VartimeMultiscalarMul { impl VartimeMultiscalarMul for GroupElement { fn vartime_multiscalar_mul(scalars: &[Scalar], points: &[GroupElement]) -> GroupElement { - let repr_scalars = scalars - .iter() - .map(|S| S.borrow().into_repr()) - .collect::::BigInt>>(); + assert!(scalars.len() == points.len()); let aff_points = points .iter() .map(|P| P.borrow().into_affine()) .collect::>(); - VariableBaseMSM::multi_scalar_mul(aff_points.as_slice(), repr_scalars.as_slice()) + ::msm_unchecked(aff_points.as_slice(), scalars) } } diff --git a/src/lib.rs b/src/lib.rs index ebb4e88a..a5ebc88c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -399,7 +399,9 @@ impl SNARK { }; let mut proof_encoded: Vec = Vec::new(); - proof.serialize(&mut proof_encoded).unwrap(); + proof + .serialize_with_mode(&mut proof_encoded, Compress::Yes) + .unwrap(); Timer::print(&format!("len_r1cs_sat_proof {:?}", proof_encoded.len())); (proof, rx, ry) @@ -436,7 +438,9 @@ impl SNARK { ); let mut proof_encoded: Vec = Vec::new(); - proof.serialize(&mut proof_encoded).unwrap(); + proof + .serialize_with_mode(&mut proof_encoded, Compress::Yes) + .unwrap(); Timer::print(&format!("len_r1cs_eval_proof {:?}", proof_encoded.len())); proof }; @@ -575,7 +579,9 @@ impl NIZK { // &mut random_tape, ); let mut proof_encoded = Vec::new(); - proof.serialize(&mut proof_encoded).unwrap(); + proof + .serialize_with_mode(&mut proof_encoded, Compress::Yes) + .unwrap(); Timer::print(&format!("len_r1cs_sat_proof {:?}", proof_encoded.len())); (proof, rx, ry) }; @@ -772,24 +778,24 @@ mod tests { let mut C: Vec<(usize, usize, Vec)> = Vec::new(); // Create a^2 + b + 13 - A.push((0, num_vars + 2, (Scalar::one().into_repr().to_bytes_le()))); // 1*a - B.push((0, num_vars + 2, Scalar::one().into_repr().to_bytes_le())); // 1*a - C.push((0, num_vars + 1, Scalar::one().into_repr().to_bytes_le())); // 1*z + A.push((0, num_vars + 2, (Scalar::one().into_bigint().to_bytes_le()))); // 1*a + B.push((0, num_vars + 2, Scalar::one().into_bigint().to_bytes_le()));// 1*a + C.push((0, num_vars + 1, Scalar::one().into_bigint().to_bytes_le())); // 1*z C.push(( 0, num_vars, - (-Scalar::from(13u64)).into_repr().to_bytes_le(), + (-Scalar::from(13u64)).into_bigint().to_bytes_le(), )); // -13*1 - C.push((0, num_vars + 3, (-Scalar::one()).into_repr().to_bytes_le())); // -1*b + C.push((0, num_vars + 3, (-Scalar::one()).into_bigint().to_bytes_le())); // -1*b // Var Assignments (Z_0 = 16 is the only output) - let vars = vec![Scalar::zero().into_repr().to_bytes_le(); num_vars]; + let vars = vec![Scalar::zero().into_bigint().to_bytes_le(); num_vars]; // create an InputsAssignment (a = 1, b = 2) - let mut inputs = vec![Scalar::zero().into_repr().to_bytes_le(); num_inputs]; - inputs[0] = Scalar::from(16u64).into_repr().to_bytes_le(); - inputs[1] = Scalar::from(1u64).into_repr().to_bytes_le(); - inputs[2] = Scalar::from(2u64).into_repr().to_bytes_le(); + let mut inputs = vec![Scalar::zero().into_bigint().to_bytes_le(); num_inputs]; + inputs[0] = Scalar::from(16u64).into_bigint().to_bytes_le(); + inputs[1] = Scalar::from(1u64).into_bigint().to_bytes_le(); + inputs[2] = Scalar::from(2u64).into_bigint().to_bytes_le(); let assignment_inputs = InputsAssignment::new(&inputs).unwrap(); let assignment_vars = VarsAssignment::new(&vars).unwrap(); diff --git a/src/mipp.rs b/src/mipp.rs index e6108f8e..00afabf7 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -1,11 +1,11 @@ use super::macros::*; -use ark_ec::msm::VariableBaseMSM; -use ark_ec::ProjectiveCurve; -use ark_ec::{AffineCurve, PairingEngine}; -use ark_ff::{Field, PrimeField}; +use ark_ec::scalar_mul::variable_base::VariableBaseMSM; +use ark_ec::CurveGroup; +use ark_ec::{pairing::Pairing, AffineRepr}; +use ark_ff::{BigInt, Field, PrimeField}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; use ark_poly_commit::multilinear_pc::data_structures::{ - Commitment_G2, CommitterKey, Proof, Proof_G1, VerifierKey, + CommitmentG2, CommitterKey, Proof, ProofG1, VerifierKey, }; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; @@ -19,23 +19,23 @@ use std::ops::{Add, AddAssign, Mul, MulAssign, SubAssign}; use thiserror::Error; #[derive(Debug, Clone, CanonicalDeserialize, CanonicalSerialize)] -pub struct MippProof { - pub comms_t: Vec<(::Fqk, ::Fqk)>, +pub struct MippProof { + pub comms_t: Vec<(::TargetField, ::TargetField)>, pub comms_u: Vec<(E::G1Affine, E::G1Affine)>, pub final_a: E::G1Affine, pub final_h: E::G2Affine, - pub pst_proof_h: Proof_G1, + pub pst_proof_h: ProofG1, } -impl MippProof { +impl MippProof { pub fn prove( transcript: &mut impl Transcript, ck: &CommitterKey, a: Vec, - y: Vec, + y: Vec, h: Vec, U: &E::G1Affine, - T: &::Fqk, + T: &::TargetField, ) -> Result, Error> { // the values of vectors A and y rescaled at each step of the loop let (mut m_a, mut m_y) = (a.clone(), y.clone()); @@ -48,8 +48,8 @@ impl MippProof { let mut comms_u = Vec::new(); // the transcript challenges - let mut xs: Vec = Vec::new(); - let mut xs_inv: Vec = Vec::new(); + let mut xs: Vec = Vec::new(); + let mut xs_inv: Vec = Vec::new(); // we append only the MIPP because the aggregated commitment T has been // appended already @@ -95,7 +95,7 @@ impl MippProof { transcript.append(b"comm_u_r", &comm_u_r); transcript.append(b"comm_t_l", &comm_t_l); transcript.append(b"comm_t_r", &comm_t_r); - let c_inv = transcript.challenge_scalar::(b"challenge_i"); + let c_inv = transcript.challenge_scalar::(b"challenge_i"); // Optimization for multiexponentiation to rescale G2 elements with // 128-bit challenge Swap 'c' and 'c_inv' since we @@ -122,17 +122,17 @@ impl MippProof { let final_h = m_h[0]; // get polynomial f_h - let poly = DenseMultilinearExtension::::from_evaluations_vec( + let poly = DenseMultilinearExtension::::from_evaluations_vec( xs_inv.len(), - Self::polynomial_evaluations_from_transcript::(&xs_inv), + Self::polynomial_evaluations_from_transcript::(&xs_inv), ); let c = MultilinearPC::::commit_g2(ck, &poly); debug_assert!(c.h_product == final_h); // create proof that final_h is well-formed - let mut point: Vec = (0..poly.num_vars) + let mut point: Vec = (0..poly.num_vars) .into_iter() - .map(|_| transcript.challenge_scalar::(b"random_point")) + .map(|_| transcript.challenge_scalar::(b"random_point")) .collect(); let pst_proof_h = MultilinearPC::::open_g1(ck, &poly, &point); @@ -175,20 +175,20 @@ impl MippProof { vk: &VerifierKey, transcript: &mut impl Transcript, proof: &MippProof, - point: Vec, + point: Vec, U: &E::G1Affine, - T: &::Fqk, + T: &::TargetField, ) -> bool { let comms_u = proof.comms_u.clone(); let comms_t = proof.comms_t.clone(); let mut xs = Vec::new(); let mut xs_inv = Vec::new(); - let mut final_y = E::Fr::one(); + let mut final_y = E::ScalarField::one(); let mut final_res = MippTU { tc: T.clone(), - uc: U.into_projective(), + uc: U.into_group(), }; transcript.append(b"U", U); @@ -204,7 +204,7 @@ impl MippProof { transcript.append(b"comm_u_r", comm_u_r); transcript.append(b"comm_t_l", comm_t_l); transcript.append(b"comm_t_r", comm_t_r); - let c_inv = transcript.challenge_scalar::(b"challenge_i"); + let c_inv = transcript.challenge_scalar::(b"challenge_i"); let c = c_inv.inverse().unwrap(); xs.push(c); @@ -213,11 +213,11 @@ impl MippProof { // the verifier computes the final_y by themselves given // it's field operations it is quite fast and parallelisation // doesn't bring much improvement - final_y *= E::Fr::one() + c_inv.mul(point[i]) - point[i]; + final_y *= E::ScalarField::one() + c_inv.mul(point[i]) - point[i]; } - enum Op<'a, E: PairingEngine> { - TC(&'a E::Fqk, ::BigInt), - UC(&'a E::G1Affine, ::BigInt), + enum Op<'a, E: Pairing> { + TC(&'a E::TargetField, ::BigInt), + UC(&'a E::G1Affine, &'a E::ScalarField), } let res = comms_t @@ -228,25 +228,22 @@ impl MippProof { let (comm_t_l, comm_t_r) = comm_t; let (comm_u_l, comm_u_r) = comm_u; - let c_repr = c.into_repr(); - let c_inv_repr = c_inv.into_repr(); - // we multiple left side by x^-1 and right side by x vec![ - Op::TC::(comm_t_l, c_inv_repr), - Op::TC(comm_t_r, c_repr), - Op::UC(comm_u_l, c_inv_repr), - Op::UC(comm_u_r, c_repr), + Op::TC::(comm_t_l, c_inv.into_bigint()), + Op::TC(comm_t_r, c.into_bigint()), + Op::UC(comm_u_l, c_inv), + Op::UC(comm_u_r, c), ] }) .fold(MippTU::::default, |mut res, op: Op| { match op { Op::TC(tx, c) => { - let tx: E::Fqk = tx.pow(c); + let tx: E::TargetField = tx.pow(c); res.tc.mul_assign(&tx); } Op::UC(zx, c) => { - let uxp: E::G1Projective = zx.mul(c); + let uxp: E::G1 = zx.mul(c); res.uc.add_assign(&uxp); } } @@ -261,25 +258,25 @@ impl MippProof { let ref_final_res = &mut final_res; ref_final_res.merge(&res); - let mut point: Vec = Vec::new(); + let mut point: Vec = Vec::new(); let m = xs_inv.len(); for i in 0..m { - let r = transcript.challenge_scalar::(b"random_point"); + let r = transcript.challenge_scalar::(b"random_point"); point.push(r); } let v = (0..m) .into_par_iter() - .map(|i| E::Fr::one() + point[i].mul(xs_inv[m - i - 1]) - point[i]) + .map(|i| E::ScalarField::one() + point[i].mul(xs_inv[m - i - 1]) - point[i]) .product(); - let comm_h = Commitment_G2 { + let comm_h = CommitmentG2 { nv: m, h_product: proof.final_h, }; let check_h = MultilinearPC::::check_2(vk, &comm_h, &point, v, &proof.pst_proof_h); let final_u = proof.final_a.mul(final_y); - let final_t: ::Fqk = E::pairing(proof.final_a, proof.final_h); + let final_t: ::TargetField = E::pairing(proof.final_a, proof.final_h).0; let check_t = ref_final_res.tc == final_t; @@ -291,26 +288,26 @@ impl MippProof { /// MippTU keeps track of the variables that have been sent by the prover and /// must be multiplied together by the verifier. -struct MippTU { - pub tc: E::Fqk, - pub uc: E::G1Projective, +struct MippTU { + pub tc: E::TargetField, + pub uc: E::G1, } impl Default for MippTU where - E: PairingEngine, + E: Pairing, { fn default() -> Self { Self { - tc: E::Fqk::one(), - uc: E::G1Projective::zero(), + tc: E::TargetField::one(), + uc: E::G1::zero(), } } } impl MippTU where - E: PairingEngine, + E: Pairing, { fn merge(&mut self, other: &Self) { self.tc.mul_assign(&other.tc); @@ -321,15 +318,15 @@ where /// compress is similar to commit::{V,W}KEY::compress: it modifies the `vec` /// vector by setting the value at index $i:0 -> split$ $vec[i] = vec[i] + /// vec[i+split]^scaler$. The `vec` vector is half of its size after this call. -pub fn compress(vec: &mut Vec, split: usize, scaler: &C::ScalarField) { +pub fn compress(vec: &mut Vec, split: usize, scaler: &C::ScalarField) { let (left, right) = vec.split_at_mut(split); left .par_iter_mut() .zip(right.par_iter()) .for_each(|(a_l, a_r)| { // TODO remove that with master version - let mut x = a_r.mul(scaler.into_repr()); - x.add_assign_mixed(&a_l); + let mut x = a_r.mul(scaler); + x.add_assign(a_l.into_group()); *a_l = x.into_affine(); }); let len = left.len(); @@ -352,31 +349,28 @@ pub fn compress_field(vec: &mut Vec, split: usize, scaler: &F) vec.resize(len, F::zero()); } -pub fn multiexponentiation( +pub fn multiexponentiation( left: &[G], right: &[G::ScalarField], -) -> Result { +) -> Result { if left.len() != right.len() { return Err(Error::InvalidIPVectorLength); } - Ok(VariableBaseMSM::multi_scalar_mul( - left, - &cfg_iter!(right).map(|s| s.into_repr()).collect::>(), - )) + Ok(::msm_unchecked(left, right)) } -pub fn pairings_product(gs: &[E::G1Affine], hs: &[E::G2Affine]) -> E::Fqk { - let pairings: Vec<_> = gs - .into_par_iter() - .map(|g| ::G1Prepared::from(*g)) - .zip( - hs.into_par_iter() - .map(|h| ::G2Prepared::from(*h)), - ) - .collect(); - - E::product_of_pairings(pairings.iter()) +pub fn pairings_product(gs: &[E::G1Affine], hs: &[E::G2Affine]) -> E::TargetField { + //let pairings: Vec<_> = gs + // .into_par_iter() + // .map(|g| ::G1Prepared::from(*g)) + // .zip( + // hs.into_par_iter() + // .map(|h| ::G2Prepared::from(*h)), + // ) + // .collect(); + + E::multi_pairing(gs, hs).0 } #[derive(Debug, Error)] diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index e9044062..26667b7a 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -9,9 +9,10 @@ use super::group::{ }; use super::random::RandomTape; use super::scalar::Scalar; -use ark_ec::ProjectiveCurve; +use ark_ec::CurveGroup; use ark_ff::PrimeField; use ark_serialize::*; +use std::ops::Mul; mod bullet; use bullet::BulletReductionProof; @@ -68,7 +69,7 @@ impl KnowledgeProof { let c = transcript.challenge_scalar(); let lhs = self.z1.commit(&self.z2, gens_n).compress(); - let rhs = (C.unpack()?.mul(c.into_repr()) + self.alpha.unpack()?).compress(); + let rhs = (C.unpack()?.mul(c) + self.alpha.unpack()?).compress(); if lhs == rhs { Ok(()) @@ -109,7 +110,7 @@ impl EqualityProof { let C2 = v2.commit(s2, gens_n).compress(); transcript.append_point(&C2); - let alpha = gens_n.h.mul(r.into_repr()).compress(); + let alpha = gens_n.h.mul(r).compress(); transcript.append_point(&alpha); let c = transcript.challenge_scalar(); @@ -135,11 +136,11 @@ impl EqualityProof { let c = transcript.challenge_scalar(); let rhs = { let C = C1.unpack()? - C2.unpack()?; - (C.mul(c.into_repr()) + self.alpha.unpack()?).compress() + (C.mul(c) + self.alpha.unpack()?).compress() }; println!("rhs {:?}", rhs); - let lhs = gens_n.h.mul(self.z.into_repr()).compress(); + let lhs = gens_n.h.mul(self.z).compress(); println!("lhs {:?}", lhs); if lhs == rhs { Ok(()) @@ -248,9 +249,8 @@ impl ProductProof { z2: &Scalar, ) -> bool { println!("{:?}", X); - let lhs = (GroupElement::decompress(P).unwrap() - + GroupElement::decompress(X).unwrap().mul(c.into_repr())) - .compress(); + let lhs = (GroupElement::decompress(P).unwrap() + GroupElement::decompress(X).unwrap().mul(c)) + .compress(); let rhs = z1.commit(z2, gens_n).compress(); lhs == rhs @@ -404,12 +404,12 @@ impl DotProductProof { let c = transcript.challenge_scalar(); - let mut result = Cx.unpack()?.mul(c.into_repr()) + self.delta.unpack()? - == self.z.commit(&self.z_delta, gens_n); + let mut result = + Cx.unpack()?.mul(c) + self.delta.unpack()? == self.z.commit(&self.z_delta, gens_n); let dotproduct_z_a = DotProductProof::compute_dotproduct(&self.z, a); - result &= Cy.unpack()?.mul(c.into_repr()) + self.beta.unpack()? - == dotproduct_z_a.commit(&self.z_beta, gens_1); + result &= + Cy.unpack()?.mul(c) + self.beta.unpack()? == dotproduct_z_a.commit(&self.z_beta, gens_1); if result { Ok(()) } else { @@ -573,10 +573,9 @@ impl DotProductProofLog { let z1_s = &self.z1; let z2_s = &self.z2; - let lhs = - ((Gamma_hat.mul(c_s.into_repr()) + beta_s).mul(a_hat_s.into_repr()) + delta_s).compress(); - let rhs = ((g_hat + gens.gens_1.G[0].mul(a_hat_s.into_repr())).mul(z1_s.into_repr()) - + gens.gens_1.h.mul(z2_s.into_repr())) + let lhs = ((Gamma_hat.mul(c_s) + beta_s).mul(a_hat_s) + delta_s).compress(); + let rhs = ((g_hat + gens.gens_1.G[0].mul(a_hat_s)).mul(z1_s) + + gens.gens_1.h.mul(z2_s)) .compress(); assert_eq!(lhs, rhs); diff --git a/src/parameters.rs b/src/parameters.rs index 142d3276..cee6e8a2 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -1,6 +1,6 @@ use std::str::FromStr; -use ark_sponge::poseidon::PoseidonParameters; +use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; // Copyright: https://github.com/nikkolasg/ark-dkg/blob/main/src/parameters.rs use json::JsonValue; use lazy_static::lazy_static; @@ -145,7 +145,7 @@ array!["228517621981785468369663538305998424621845824654552006112396193307208970 } /// TODO -pub fn poseidon_params() -> PoseidonParameters { +pub fn poseidon_params() -> PoseidonConfig { let arks = FR["ark"] .members() .map(|ark| { @@ -163,15 +163,19 @@ pub fn poseidon_params() -> PoseidonParameters { .collect::>() }) .collect::>(); - PoseidonParameters::new( - FR["full_rounds"].as_u32().unwrap(), - FR["partial_rounds"].as_u32().unwrap(), + PoseidonConfig::new( + FR["full_rounds"].as_usize().unwrap(), + FR["partial_rounds"].as_usize().unwrap(), FR["alpha"].as_u64().unwrap(), mds, arks, + FR["rate"].as_usize().unwrap(), + // TODO (nikkolasg): check out the concrete parameters for the capacity + // so far taken from https://github.com/AleoHQ/snarkVM/blob/d6ce2d3540b9355b59ef580db998188c786f8599/fields/src/traits/poseidon_default.rs#L43 + 1, ) } lazy_static! { - pub static ref POSEIDON_PARAMETERS_FR_377: PoseidonParameters = poseidon_params(); + pub static ref POSEIDON_PARAMETERS_FR_377: PoseidonConfig = poseidon_params(); } diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 7be88d78..329ce5c5 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -1,21 +1,21 @@ -use crate::group::{CompressedGroup, Fr}; - use super::scalar::Scalar; +use crate::group::{CompressedGroup, Fr}; use crate::mipp::Transcript; use ark_bls12_377::{Bls12_377 as I, G1Affine}; -use ark_ec::PairingEngine; +use ark_crypto_primitives::sponge::{ + poseidon::{PoseidonConfig, PoseidonSponge}, + CryptographicSponge, +}; +use ark_ec::pairing::Pairing; use ark_ff::{Field, PrimeField}; use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; -use ark_sponge::{ - poseidon::{PoseidonParameters, PoseidonSponge}, - CryptographicSponge, -}; +use ark_serialize::Compress; #[derive(Clone)] /// TODO pub struct PoseidonTranscript { sponge: PoseidonSponge, - params: PoseidonParameters, + params: PoseidonConfig, } impl Transcript for PoseidonTranscript { @@ -25,7 +25,9 @@ impl Transcript for PoseidonTranscript { fn append(&mut self, label: &'static [u8], point: &S) { let mut buf = Vec::new(); - point.serialize(&mut buf).expect("serialization failed"); + point + .serialize_with_mode(&mut buf, Compress::Yes) + .expect("serialization failed"); self.sponge.absorb(&buf); } @@ -36,7 +38,7 @@ impl Transcript for PoseidonTranscript { impl PoseidonTranscript { /// create a new transcript - pub fn new(params: &PoseidonParameters) -> Self { + pub fn new(params: &PoseidonConfig) -> Self { let sponge = PoseidonSponge::new(params); PoseidonTranscript { sponge, @@ -73,9 +75,9 @@ impl PoseidonTranscript { } } - pub fn append_gt(&mut self, g_t: &::Fqk) { + pub fn append_gt(&mut self, g_t: &::TargetField) { let mut bytes = Vec::new(); - g_t.serialize(&mut bytes).unwrap(); + g_t.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); self.append_bytes(&bytes); } @@ -101,7 +103,7 @@ impl AppendToPoseidon for CompressedGroup { impl AppendToPoseidon for Commitment { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { let mut bytes = Vec::new(); - self.serialize(&mut bytes).unwrap(); + self.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); transcript.append_bytes(&bytes); } } @@ -109,7 +111,7 @@ impl AppendToPoseidon for Commitment { impl AppendToPoseidon for G1Affine { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { let mut bytes = Vec::new(); - self.serialize(&mut bytes).unwrap(); + self.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); transcript.append_bytes(&bytes); } } diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index 323d0402..e3eb94e1 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -161,7 +161,7 @@ impl R1CSInstance { pub fn get_digest(&self) -> Vec { let mut bytes = Vec::new(); - self.serialize(&mut bytes).unwrap(); + self.serialize_with_mode(&mut bytes,Compress::Yes).unwrap(); let mut shake = Shake256::default(); shake.input(bytes); let mut reader = shake.xof_result(); diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 9b046314..3c67d12e 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -1,28 +1,28 @@ #![allow(clippy::too_many_arguments)] +use super::commitments::MultiCommitGens; +use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; +use super::errors::ProofVerifyError; use crate::constraints::{VerifierCircuit, VerifierConfig}; use crate::group::{Fq, Fr}; use crate::math::Math; +use crate::mipp::MippProof; use crate::parameters::poseidon_params; use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; use crate::sqrt_pst::Polynomial; use crate::sumcheck::SumcheckInstanceProof; use ark_bls12_377::Bls12_377 as I; use ark_bw6_761::BW6_761 as P; -use ark_ec::PairingEngine; +use ark_ec::pairing::Pairing; use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; use ark_poly_commit::multilinear_pc::MultilinearPC; -use crate::mipp::MippProof; -use super::commitments::MultiCommitGens; -use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; -use super::errors::ProofVerifyError; use super::r1csinstance::R1CSInstance; use super::scalar::Scalar; use super::sparse_mlpoly::{SparsePolyEntry, SparsePolynomial}; use super::timer::Timer; -use ark_crypto_primitives::{CircuitSpecificSetupSNARK, SNARK}; +use ark_snark::{CircuitSpecificSetupSNARK, SNARK}; use ark_groth16::Groth16; use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystem}; @@ -44,7 +44,7 @@ pub struct R1CSProof { ry: Vec, // The transcript state after the satisfiability proof was computed. pub transcript_sat_state: Scalar, - pub t: ::Fqk, + pub t: ::TargetField, pub mipp_proof: MippProof, } #[derive(Clone)] @@ -155,7 +155,7 @@ impl R1CSProof { let (comm_list, t) = pl.commit(&gens.gens_pc.ck); let mut bytes = Vec::new(); - t.serialize(&mut bytes).unwrap(); + t.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); transcript.append_bytes(&bytes); // comm.append_to_poseidon(transcript); @@ -290,7 +290,10 @@ impl R1CSProof { ) -> Result<(u128, u128, u128), ProofVerifyError> { // serialise and add the IPP commitment to the transcript let mut bytes = Vec::new(); - self.t.serialize(&mut bytes).unwrap(); + self + .t + .serialize_with_mode(&mut bytes, Compress::Yes) + .unwrap(); transcript.append_bytes(&bytes); let c = transcript.challenge_scalar(); @@ -324,22 +327,20 @@ impl R1CSProof { transcript_sat_state: self.transcript_sat_state, }; - let mut rng = ark_std::test_rng(); - let prove_inner = Timer::new("proveinnercircuit"); let start = Instant::now(); - let circuit = VerifierCircuit::new(&config, &mut rng).unwrap(); + let circuit = VerifierCircuit::new(&config, &mut rand::thread_rng()).unwrap(); let dp1 = start.elapsed().as_millis(); prove_inner.stop(); // this is universal, we don't measure it let start = Instant::now(); - let (pk, vk) = Groth16::

::setup(circuit.clone(), &mut rng).unwrap(); + let (pk, vk) = Groth16::

::setup(circuit.clone(), &mut rand::thread_rng()).unwrap(); let ds = start.elapsed().as_millis(); let prove_outer = Timer::new("proveoutercircuit"); let start = Instant::now(); - let proof = Groth16::

::prove(&pk, circuit, &mut rng).unwrap(); + let proof = Groth16::

::prove(&pk, circuit, &mut rand::thread_rng()).unwrap(); let dp2 = start.elapsed().as_millis(); prove_outer.stop(); @@ -385,7 +386,10 @@ impl R1CSProof { ) -> Result { // serialise and add the IPP commitment to the transcript let mut bytes = Vec::new(); - self.t.serialize(&mut bytes).unwrap(); + self + .t + .serialize_with_mode(&mut bytes, Compress::Yes) + .unwrap(); transcript.append_bytes(&bytes); let c = transcript.challenge_scalar(); @@ -420,7 +424,7 @@ impl R1CSProof { }; let mut rng = ark_std::test_rng(); - let circuit = VerifierCircuit::new(&config, &mut rng).unwrap(); + let circuit = VerifierCircuit::new(&config, &mut rand::thread_rng()).unwrap(); let nc_inner = verify_constraints_inner(circuit.clone(), &num_cons); diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index 3d8b0d54..d0663a93 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -1231,7 +1231,7 @@ impl ProductLayerProof { let mut product_layer_proof_encoded: Vec = Vec::new(); product_layer_proof - .serialize(&mut product_layer_proof_encoded) + .serialize_with_mode(&mut product_layer_proof_encoded, Compress::Yes) .unwrap(); let msg = format!( "len_product_layer_proof {:?}", diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 76fb9110..92c8092c 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,6 +1,6 @@ use crate::mipp::MippProof; -use ark_bls12_377::{Bls12_377 as I, G1Affine, G2Affine}; -use ark_ec::{msm::VariableBaseMSM, PairingEngine, ProjectiveCurve}; +use ark_bls12_377::{Bls12_377 as I, G1Affine, G1Projective as G1, G2Affine, G2Projective as G2}; +use ark_ec::{pairing::Pairing, scalar_mul::variable_base::VariableBaseMSM, CurveGroup}; use ark_ff::{BigInteger256, One, PrimeField}; use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, @@ -102,7 +102,7 @@ impl Polynomial { prods.sum() } - pub fn commit(&self, ck: &CommitterKey) -> (Vec>, ::Fqk) { + pub fn commit(&self, ck: &CommitterKey) -> (Vec>, ::TargetField) { let timer_commit = Timer::new("sqrt_commit"); let timer_list = Timer::new("comm_list"); @@ -119,19 +119,18 @@ impl Polynomial { assert!(comm_list.len() == h_vec.len()); let ipp_timer = Timer::new("ipp"); - let pairings: Vec<_> = comm_list + let left_pairs: Vec<_> = comm_list .clone() .into_par_iter() - .map(|c| ::G1Prepared::from(c.g_product)) - .zip( - h_vec - .into_par_iter() - .map(|h| ::G2Prepared::from(h)), - ) + .map(|c| ::G1Prepared::from(c.g_product)) + .collect(); + let right_pairs: Vec<_> = h_vec + .into_par_iter() + .map(|h| ::G2Prepared::from(h)) .collect(); // compute the IPP commitment - let t = I::product_of_pairings(pairings.iter()); + let t = I::multi_pairing(left_pairs, right_pairs).0; ipp_timer.stop(); timer_commit.stop(); @@ -159,7 +158,7 @@ impl Polynomial { comm_list: Vec>, ck: &CommitterKey, point: &[Scalar], - t: &::Fqk, + t: &::TargetField, ) -> (Commitment, Proof, MippProof) { let m = point.len() / 2; let a = &point[0..m]; @@ -186,14 +185,14 @@ impl Polynomial { if self.chis_b.is_none() { panic!("chis(b) should have been computed for q"); } + // TODO remove that cloning - the whole option thing let chis = self.chis_b.clone().unwrap(); - let chis_repr: Vec = chis.par_iter().map(|y| y.into_repr()).collect(); - assert!(chis_repr.len() == comm_list.len()); + assert!(chis.len() == comm_list.len()); let a_vec: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); let c_u = - VariableBaseMSM::multi_scalar_mul(a_vec.as_slice(), chis_repr.as_slice()).into_affine(); + ::msm_unchecked(a_vec.as_slice(), chis.as_slice()).into_affine(); timer_msm.stop(); let U: Commitment = Commitment { @@ -207,9 +206,16 @@ impl Polynomial { // MIPP proof that U is the inner product of the vector A // and the vector y, where A is the opening vector to T let timer_mipp_proof = Timer::new("mipp_prove"); - let mipp_proof = - MippProof::::prove::(transcript, ck, a_vec, chis, h_vec, &c_u, t) - .unwrap(); + let mipp_proof = MippProof::::prove::( + transcript, + ck, + a_vec, + chis.to_vec(), + h_vec, + &c_u, + t, + ) + .unwrap(); timer_mipp_proof.stop(); // PST proof for opening q at a @@ -231,7 +237,7 @@ impl Polynomial { v: Scalar, pst_proof: &Proof, mipp_proof: &MippProof, - T: &::Fqk, + T: &::TargetField, ) -> bool { let len = point.len(); let a = &point[0..len / 2]; diff --git a/src/transcript.rs b/src/transcript.rs index e1e5087e..1932590f 100644 --- a/src/transcript.rs +++ b/src/transcript.rs @@ -1,7 +1,7 @@ use super::scalar::Scalar; use crate::group::CompressedGroup; use ark_ff::{BigInteger, PrimeField}; -use ark_serialize::CanonicalSerialize; +use ark_serialize::{CanonicalSerialize, Compress}; use merlin::Transcript; pub trait ProofTranscript { @@ -12,20 +12,20 @@ pub trait ProofTranscript { fn challenge_vector(&mut self, label: &'static [u8], len: usize) -> Vec; } - - impl ProofTranscript for Transcript { fn append_protocol_name(&mut self, protocol_name: &'static [u8]) { self.append_message(b"protocol-name", protocol_name); } fn append_scalar(&mut self, label: &'static [u8], scalar: &Scalar) { - self.append_message(label, scalar.into_repr().to_bytes_le().as_slice()); + self.append_message(label, scalar.into_bigint().to_bytes_le().as_slice()); } fn append_point(&mut self, label: &'static [u8], point: &CompressedGroup) { let mut point_encoded = Vec::new(); - point.serialize(&mut point_encoded).unwrap(); + point + .serialize_with_mode(&mut point_encoded, Compress::Yes) + .unwrap(); self.append_message(label, point_encoded.as_slice()); } From 598b92a5bc0b2821121aecf39c930225f25afda1 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 7 Feb 2023 14:57:20 +0100 Subject: [PATCH 12/64] adding gh action --- .github/workflows/testudo.yml | 53 +++++++++++++++++------------------ 1 file changed, 25 insertions(+), 28 deletions(-) diff --git a/.github/workflows/testudo.yml b/.github/workflows/testudo.yml index bce620a4..4f64ad89 100644 --- a/.github/workflows/testudo.yml +++ b/.github/workflows/testudo.yml @@ -2,36 +2,33 @@ name: Build and Test Testudo on: push: - branches: [master] + branches: + - main pull_request: - branches: [master] -# The crate ark-ff uses the macro llvm_asm! when emitting asm which returns an -# error because it was deprecated in favour of asm!. We temporarily overcome -# this problem by setting the environment variable below (until the crate -# is updated). -env: - RUSTFLAGS: "--emit asm -C llvm-args=-x86-asm-syntax=intel" + +name: Test jobs: - build_nightly: + cargo-test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Install - run: rustup default nightly - - name: Install rustfmt Components - run: rustup component add rustfmt - # - name: Install clippy - # run: rustup component add clippy - - name: Build - run: cargo build --verbose - - name: Run tests - run: cargo test --release --all-features --verbose - - name: Build examples - run: cargo build --examples --verbose - - name: Check Rustfmt Code Style - run: cargo fmt --all -- --check - # cargo clippy uses cargo check which returns an error when asm is emitted - # we want to emit asm for ark-ff operations so we avoid using clippy for # now - # - name: Check clippy warnings - # run: cargo clippy --all-targets --all-features + - name: Checkout sources + uses: actions/checkout@v2 + with: + submodules: recursive + + - name: Install toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: nightly + profile: minimal + override: true + + - uses: Swatinem/rust-cache@v2 + with: + shared-key: cache-${{ hashFiles('**/Cargo.lock') }} + cache-on-failure: true + + - name: cargo test + working-directory: node + run: RUST_LOG=info cargo test --all --all-features -- --nocapture \ No newline at end of file From edf077bec69e6d8e8864a61d2df5a939fda0e2f0 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 7 Feb 2023 14:58:46 +0100 Subject: [PATCH 13/64] correct workflow item --- .github/workflows/testudo.yml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/.github/workflows/testudo.yml b/.github/workflows/testudo.yml index 4f64ad89..0b388390 100644 --- a/.github/workflows/testudo.yml +++ b/.github/workflows/testudo.yml @@ -1,12 +1,6 @@ name: Build and Test Testudo -on: - push: - branches: - - main - pull_request: - -name: Test +on: [push, pull_request] jobs: cargo-test: From 3835371568b1e24bd1d0fd2c8de00b3d1d87bd2e Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 7 Feb 2023 15:03:05 +0100 Subject: [PATCH 14/64] correct working dir and msrv --- .github/workflows/testudo.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/testudo.yml b/.github/workflows/testudo.yml index 0b388390..892a0514 100644 --- a/.github/workflows/testudo.yml +++ b/.github/workflows/testudo.yml @@ -14,7 +14,7 @@ jobs: - name: Install toolchain uses: actions-rs/toolchain@v1 with: - toolchain: nightly + toolchain: stable profile: minimal override: true @@ -24,5 +24,4 @@ jobs: cache-on-failure: true - name: cargo test - working-directory: node run: RUST_LOG=info cargo test --all --all-features -- --nocapture \ No newline at end of file From cdf8bd884e989381db85c9b3d1f083e2a34562f3 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Tue, 7 Feb 2023 15:06:52 +0000 Subject: [PATCH 15/64] remove unnecessary stuff --- src/dense_mlpoly.rs | 14 -- src/lib.rs | 1 - src/nizk/mod.rs | 537 +------------------------------------------ src/r1csinstance.rs | 12 - src/r1csproof.rs | 26 +-- src/random.rs | 19 +- src/sparse_mlpoly.rs | 24 -- src/sumcheck.rs | 476 -------------------------------------- src/transcript.rs | 69 ------ src/unipoly.rs | 25 +- 10 files changed, 14 insertions(+), 1189 deletions(-) delete mode 100644 src/transcript.rs diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 6828c491..d8e2e045 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -2,7 +2,6 @@ use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; - use super::commitments::{Commitments, MultiCommitGens}; use super::errors::ProofVerifyError; use super::group::{ @@ -13,7 +12,6 @@ use super::math::Math; use super::nizk::{DotProductProofGens, DotProductProofLog}; use super::random::RandomTape; use super::scalar::Scalar; -use super::transcript::{AppendToTranscript, ProofTranscript}; use ark_bls12_377::Bls12_377 as I; use ark_ff::{One, UniformRand, Zero}; use ark_poly::MultilinearExtension; @@ -21,7 +19,6 @@ use ark_poly_commit::multilinear_pc::data_structures::{CommitterKey, VerifierKey use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::*; use core::ops::Index; -use merlin::Transcript; use std::ops::{Add, AddAssign, Neg, Sub, SubAssign}; #[cfg(feature = "multicore")] @@ -461,16 +458,6 @@ impl Index for DensePolynomial { } } -impl AppendToTranscript for PolyCommitment { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_message(label, b"poly_commitment_begin"); - for i in 0..self.C.len() { - transcript.append_point(b"poly_commitment_share", &self.C[i]); - } - transcript.append_message(label, b"poly_commitment_end"); - } -} - impl AppendToPoseidon for PolyCommitment { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { for i in 0..self.C.len() { @@ -589,7 +576,6 @@ impl PolyEvalProof { #[cfg(test)] mod tests { - use crate::parameters::poseidon_params; diff --git a/src/lib.rs b/src/lib.rs index f172b83c..2f38c69a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -36,7 +36,6 @@ mod sparse_mlpoly; mod sqrt_pst; mod sumcheck; mod timer; -mod transcript; mod unipoly; pub mod parameters; diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index e9044062..14fb4749 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -1,12 +1,10 @@ #![allow(clippy::too_many_arguments)] use crate::math::Math; -use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; +use crate::poseidon_transcript::PoseidonTranscript; use super::commitments::{Commitments, MultiCommitGens}; use super::errors::ProofVerifyError; -use super::group::{ - CompressGroupElement, CompressedGroup, DecompressGroupElement, GroupElement, UnpackGroupElement, -}; +use super::group::{CompressGroupElement, CompressedGroup, UnpackGroupElement}; use super::random::RandomTape; use super::scalar::Scalar; use ark_ec::ProjectiveCurve; @@ -16,408 +14,6 @@ use ark_serialize::*; mod bullet; use bullet::BulletReductionProof; -#[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct KnowledgeProof { - alpha: CompressedGroup, - z1: Scalar, - z2: Scalar, -} - -impl KnowledgeProof { - fn protocol_name() -> &'static [u8] { - b"knowledge proof" - } - - pub fn prove( - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - x: &Scalar, - r: &Scalar, - ) -> (KnowledgeProof, CompressedGroup) { - // transcript.append_protocol_name(KnowledgeProof::protocol_name()); - - // produce two random Scalars - let t1 = random_tape.random_scalar(b"t1"); - let t2 = random_tape.random_scalar(b"t2"); - - let C = x.commit(r, gens_n).compress(); - C.append_to_poseidon(transcript); - - let alpha = t1.commit(&t2, gens_n).compress(); - alpha.append_to_poseidon(transcript); - - let c = transcript.challenge_scalar(); - - let z1 = c * x + t1; - let z2 = c * r + t2; - - (KnowledgeProof { alpha, z1, z2 }, C) - } - - pub fn verify( - &self, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - C: &CompressedGroup, - ) -> Result<(), ProofVerifyError> { - // transcript.append_protocol_name(KnowledgeProof::protocol_name()); - C.append_to_poseidon(transcript); - self.alpha.append_to_poseidon(transcript); - - let c = transcript.challenge_scalar(); - - let lhs = self.z1.commit(&self.z2, gens_n).compress(); - let rhs = (C.unpack()?.mul(c.into_repr()) + self.alpha.unpack()?).compress(); - - if lhs == rhs { - Ok(()) - } else { - Err(ProofVerifyError::InternalError) - } - } -} - -#[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct EqualityProof { - alpha: CompressedGroup, - z: Scalar, -} - -impl EqualityProof { - fn protocol_name() -> &'static [u8] { - b"equality proof" - } - - pub fn prove( - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - v1: &Scalar, - s1: &Scalar, - v2: &Scalar, - s2: &Scalar, - ) -> (EqualityProof, CompressedGroup, CompressedGroup) { - // transcript.append_protocol_name(EqualityProof::protocol_name()); - - // produce a random Scalar - let r = random_tape.random_scalar(b"r"); - - let C1 = v1.commit(s1, gens_n).compress(); - transcript.append_point(&C1); - - let C2 = v2.commit(s2, gens_n).compress(); - transcript.append_point(&C2); - - let alpha = gens_n.h.mul(r.into_repr()).compress(); - transcript.append_point(&alpha); - - let c = transcript.challenge_scalar(); - - let z = c * ((*s1) - s2) + r; - - (EqualityProof { alpha, z }, C1, C2) - } - - pub fn verify( - &self, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - C1: &CompressedGroup, - C2: &CompressedGroup, - ) -> Result<(), ProofVerifyError> { - // transcript.append_protocol_name(EqualityProof::protocol_name()); - - transcript.append_point(C1); - transcript.append_point(C2); - transcript.append_point(&self.alpha); - - let c = transcript.challenge_scalar(); - let rhs = { - let C = C1.unpack()? - C2.unpack()?; - (C.mul(c.into_repr()) + self.alpha.unpack()?).compress() - }; - println!("rhs {:?}", rhs); - - let lhs = gens_n.h.mul(self.z.into_repr()).compress(); - println!("lhs {:?}", lhs); - if lhs == rhs { - Ok(()) - } else { - Err(ProofVerifyError::InternalError) - } - } -} - -#[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct ProductProof { - alpha: CompressedGroup, - beta: CompressedGroup, - delta: CompressedGroup, - z: Vec, -} - -impl ProductProof { - fn protocol_name() -> &'static [u8] { - b"product proof" - } - - pub fn prove( - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - x: &Scalar, - rX: &Scalar, - y: &Scalar, - rY: &Scalar, - z: &Scalar, - rZ: &Scalar, - ) -> ( - ProductProof, - CompressedGroup, - CompressedGroup, - CompressedGroup, - ) { - // transcript.append_protocol_name(ProductProof::protocol_name()); - - // produce five random Scalar - let b1 = random_tape.random_scalar(b"b1"); - let b2 = random_tape.random_scalar(b"b2"); - let b3 = random_tape.random_scalar(b"b3"); - let b4 = random_tape.random_scalar(b"b4"); - let b5 = random_tape.random_scalar(b"b5"); - - let X_unc = x.commit(rX, gens_n); - - let X = X_unc.compress(); - transcript.append_point(&X); - let X_new = GroupElement::decompress(&X); - - assert_eq!(X_unc, X_new.unwrap()); - - let Y = y.commit(rY, gens_n).compress(); - transcript.append_point(&Y); - - let Z = z.commit(rZ, gens_n).compress(); - transcript.append_point(&Z); - - let alpha = b1.commit(&b2, gens_n).compress(); - transcript.append_point(&alpha); - - let beta = b3.commit(&b4, gens_n).compress(); - transcript.append_point(&beta); - - let delta = { - let gens_X = &MultiCommitGens { - n: 1, - G: vec![GroupElement::decompress(&X).unwrap()], - h: gens_n.h, - }; - b3.commit(&b5, gens_X).compress() - }; - transcript.append_point(&delta); - - let c = transcript.challenge_scalar(); - - let z1 = b1 + c * x; - let z2 = b2 + c * rX; - let z3 = b3 + c * y; - let z4 = b4 + c * rY; - let z5 = b5 + c * ((*rZ) - (*rX) * y); - let z = [z1, z2, z3, z4, z5].to_vec(); - - ( - ProductProof { - alpha, - beta, - delta, - z, - }, - X, - Y, - Z, - ) - } - - fn check_equality( - P: &CompressedGroup, - X: &CompressedGroup, - c: &Scalar, - gens_n: &MultiCommitGens, - z1: &Scalar, - z2: &Scalar, - ) -> bool { - println!("{:?}", X); - let lhs = (GroupElement::decompress(P).unwrap() - + GroupElement::decompress(X).unwrap().mul(c.into_repr())) - .compress(); - let rhs = z1.commit(z2, gens_n).compress(); - - lhs == rhs - } - - pub fn verify( - &self, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - X: &CompressedGroup, - Y: &CompressedGroup, - Z: &CompressedGroup, - ) -> Result<(), ProofVerifyError> { - // transcript.append_protocol_name(ProductProof::protocol_name()); - - X.append_to_poseidon(transcript); - Y.append_to_poseidon(transcript); - Z.append_to_poseidon(transcript); - self.alpha.append_to_poseidon(transcript); - self.beta.append_to_poseidon(transcript); - self.delta.append_to_poseidon(transcript); - - let z1 = self.z[0]; - let z2 = self.z[1]; - let z3 = self.z[2]; - let z4 = self.z[3]; - let z5 = self.z[4]; - - let c = transcript.challenge_scalar(); - - if ProductProof::check_equality(&self.alpha, X, &c, gens_n, &z1, &z2) - && ProductProof::check_equality(&self.beta, Y, &c, gens_n, &z3, &z4) - && ProductProof::check_equality( - &self.delta, - Z, - &c, - &MultiCommitGens { - n: 1, - G: vec![X.unpack()?], - h: gens_n.h, - }, - &z3, - &z5, - ) - { - Ok(()) - } else { - Err(ProofVerifyError::InternalError) - } - } -} - -#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct DotProductProof { - delta: CompressedGroup, - beta: CompressedGroup, - z: Vec, - z_delta: Scalar, - z_beta: Scalar, -} - -impl DotProductProof { - fn protocol_name() -> &'static [u8] { - b"dot product proof" - } - - pub fn compute_dotproduct(a: &[Scalar], b: &[Scalar]) -> Scalar { - assert_eq!(a.len(), b.len()); - (0..a.len()).map(|i| a[i] * b[i]).sum() - } - - pub fn prove( - gens_1: &MultiCommitGens, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - x_vec: &[Scalar], - blind_x: &Scalar, - a_vec: &[Scalar], - y: &Scalar, - blind_y: &Scalar, - ) -> (DotProductProof, CompressedGroup, CompressedGroup) { - // transcript.append_protocol_name(DotProductProof::protocol_name()); - - let n = x_vec.len(); - assert_eq!(x_vec.len(), a_vec.len()); - assert_eq!(gens_n.n, a_vec.len()); - assert_eq!(gens_1.n, 1); - - // produce randomness for the proofs - let d_vec = random_tape.random_vector(b"d_vec", n); - let r_delta = random_tape.random_scalar(b"r_delta"); - let r_beta = random_tape.random_scalar(b"r_beta"); - - let Cx = x_vec.commit(blind_x, gens_n).compress(); - Cx.append_to_poseidon(transcript); - - let Cy = y.commit(blind_y, gens_1).compress(); - Cy.append_to_poseidon(transcript); - - transcript.append_scalar_vector(a_vec); - - let delta = d_vec.commit(&r_delta, gens_n).compress(); - delta.append_to_poseidon(transcript); - - let dotproduct_a_d = DotProductProof::compute_dotproduct(a_vec, &d_vec); - - let beta = dotproduct_a_d.commit(&r_beta, gens_1).compress(); - beta.append_to_poseidon(transcript); - - let c = transcript.challenge_scalar(); - - let z = (0..d_vec.len()) - .map(|i| c * x_vec[i] + d_vec[i]) - .collect::>(); - - let z_delta = c * blind_x + r_delta; - let z_beta = c * blind_y + r_beta; - - ( - DotProductProof { - delta, - beta, - z, - z_delta, - z_beta, - }, - Cx, - Cy, - ) - } - - pub fn verify( - &self, - gens_1: &MultiCommitGens, - gens_n: &MultiCommitGens, - transcript: &mut PoseidonTranscript, - a: &[Scalar], - Cx: &CompressedGroup, - Cy: &CompressedGroup, - ) -> Result<(), ProofVerifyError> { - assert_eq!(gens_n.n, a.len()); - assert_eq!(gens_1.n, 1); - - // transcript.append_protocol_name(DotProductProof::protocol_name()); - Cx.append_to_poseidon(transcript); - Cy.append_to_poseidon(transcript); - transcript.append_scalar_vector(a); - self.delta.append_to_poseidon(transcript); - self.beta.append_to_poseidon(transcript); - - let c = transcript.challenge_scalar(); - - let mut result = Cx.unpack()?.mul(c.into_repr()) + self.delta.unpack()? - == self.z.commit(&self.z_delta, gens_n); - - let dotproduct_z_a = DotProductProof::compute_dotproduct(&self.z, a); - result &= Cy.unpack()?.mul(c.into_repr()) + self.beta.unpack()? - == dotproduct_z_a.commit(&self.z_beta, gens_1); - if result { - Ok(()) - } else { - Err(ProofVerifyError::InternalError) - } - } -} - #[derive(Clone)] pub struct DotProductProofGens { n: usize, @@ -596,133 +192,6 @@ mod tests { use super::*; use ark_std::UniformRand; - #[test] - fn check_knowledgeproof() { - let mut rng = ark_std::rand::thread_rng(); - - let gens_1 = MultiCommitGens::new(1, b"test-knowledgeproof"); - - let x = Scalar::rand(&mut rng); - let r = Scalar::rand(&mut rng); - - let params = poseidon_params(); - - let mut random_tape = RandomTape::new(b"proof"); - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, committed_value) = - KnowledgeProof::prove(&gens_1, &mut prover_transcript, &mut random_tape, &x, &r); - - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&gens_1, &mut verifier_transcript, &committed_value) - .is_ok()); - } - - #[test] - fn check_equalityproof() { - let mut rng = ark_std::rand::thread_rng(); - let params = poseidon_params(); - - let gens_1 = MultiCommitGens::new(1, b"test-equalityproof"); - let v1 = Scalar::rand(&mut rng); - let v2 = v1; - let s1 = Scalar::rand(&mut rng); - let s2 = Scalar::rand(&mut rng); - - let mut random_tape = RandomTape::new(b"proof"); - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, C1, C2) = EqualityProof::prove( - &gens_1, - &mut prover_transcript, - &mut random_tape, - &v1, - &s1, - &v2, - &s2, - ); - - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&gens_1, &mut verifier_transcript, &C1, &C2) - .is_ok()); - } - - #[test] - fn check_productproof() { - let mut rng = ark_std::rand::thread_rng(); - let pt = GroupElement::rand(&mut rng); - let pt_c = pt.compress(); - let pt2 = GroupElement::decompress(&pt_c).unwrap(); - assert_eq!(pt, pt2); - let params = poseidon_params(); - - let gens_1 = MultiCommitGens::new(1, b"test-productproof"); - let x = Scalar::rand(&mut rng); - let rX = Scalar::rand(&mut rng); - let y = Scalar::rand(&mut rng); - let rY = Scalar::rand(&mut rng); - let z = x * y; - let rZ = Scalar::rand(&mut rng); - - let mut random_tape = RandomTape::new(b"proof"); - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, X, Y, Z) = ProductProof::prove( - &gens_1, - &mut prover_transcript, - &mut random_tape, - &x, - &rX, - &y, - &rY, - &z, - &rZ, - ); - - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&gens_1, &mut verifier_transcript, &X, &Y, &Z) - .is_ok()); - } - - #[test] - fn check_dotproductproof() { - let mut rng = ark_std::rand::thread_rng(); - - let n = 1024; - - let gens_1 = MultiCommitGens::new(1, b"test-two"); - let gens_1024 = MultiCommitGens::new(n, b"test-1024"); - let params = poseidon_params(); - - let mut x: Vec = Vec::new(); - let mut a: Vec = Vec::new(); - for _ in 0..n { - x.push(Scalar::rand(&mut rng)); - a.push(Scalar::rand(&mut rng)); - } - let y = DotProductProofLog::compute_dotproduct(&x, &a); - let r_x = Scalar::rand(&mut rng); - let r_y = Scalar::rand(&mut rng); - - let mut random_tape = RandomTape::new(b"proof"); - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, Cx, Cy) = DotProductProof::prove( - &gens_1, - &gens_1024, - &mut prover_transcript, - &mut random_tape, - &x, - &r_x, - &a, - &y, - &r_y, - ); - - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&gens_1, &gens_1024, &mut verifier_transcript, &a, &Cx, &Cy) - .is_ok()); - } #[test] fn check_dotproductproof_log() { @@ -734,7 +203,7 @@ mod tests { let x: Vec = (0..n).map(|_i| Scalar::rand(&mut rng)).collect(); let a: Vec = (0..n).map(|_i| Scalar::rand(&mut rng)).collect(); - let y = DotProductProof::compute_dotproduct(&x, &a); + let y = DotProductProofLog::compute_dotproduct(&x, &a); let r_x = Scalar::rand(&mut rng); let r_y = Scalar::rand(&mut rng); diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index 323d0402..218d1044 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -1,5 +1,4 @@ use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; -use crate::transcript::AppendToTranscript; use super::dense_mlpoly::DensePolynomial; use super::errors::ProofVerifyError; @@ -15,8 +14,6 @@ use ark_ff::Field; use ark_serialize::*; use ark_std::{One, UniformRand, Zero}; use digest::{ExtendableOutput, Input}; - -use merlin::Transcript; use sha3::Shake256; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] @@ -58,15 +55,6 @@ pub struct R1CSCommitment { comm: SparseMatPolyCommitment, } -impl AppendToTranscript for R1CSCommitment { - fn append_to_transcript(&self, _label: &'static [u8], transcript: &mut Transcript) { - transcript.append_u64(b"num_cons", self.num_cons as u64); - transcript.append_u64(b"num_vars", self.num_vars as u64); - transcript.append_u64(b"num_inputs", self.num_inputs as u64); - self.comm.append_to_transcript(b"comm", transcript); - } -} - impl AppendToPoseidon for R1CSCommitment { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { transcript.append_u64(self.num_cons as u64); diff --git a/src/r1csproof.rs b/src/r1csproof.rs index f9f4569b..879396f0 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -1,5 +1,4 @@ #![allow(clippy::too_many_arguments)] -use super::commitments::MultiCommitGens; use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; use super::errors::ProofVerifyError; use crate::constraints::{VerifierCircuit, VerifierConfig}; @@ -46,31 +45,9 @@ pub struct R1CSProof { pub t: ::Fqk, pub mipp_proof: MippProof, } -#[derive(Clone)] -pub struct R1CSSumcheckGens { - gens_1: MultiCommitGens, - gens_3: MultiCommitGens, - gens_4: MultiCommitGens, -} - -// TODO: fix passing gens_1_ref -impl R1CSSumcheckGens { - pub fn new(label: &'static [u8], gens_1_ref: &MultiCommitGens) -> Self { - let gens_1 = gens_1_ref.clone(); - let gens_3 = MultiCommitGens::new(3, label); - let gens_4 = MultiCommitGens::new(4, label); - - R1CSSumcheckGens { - gens_1, - gens_3, - gens_4, - } - } -} #[derive(Clone)] pub struct R1CSGens { - gens_sc: R1CSSumcheckGens, gens_pc: PolyCommitmentGens, } @@ -78,8 +55,7 @@ impl R1CSGens { pub fn new(label: &'static [u8], _num_cons: usize, num_vars: usize) -> Self { let num_poly_vars = num_vars.log_2(); let gens_pc = PolyCommitmentGens::new(num_poly_vars, label); - let gens_sc = R1CSSumcheckGens::new(label, &gens_pc.gens.gens_1); - R1CSGens { gens_sc, gens_pc } + R1CSGens { gens_pc } } } diff --git a/src/random.rs b/src/random.rs index 56793f78..f54c6bbd 100644 --- a/src/random.rs +++ b/src/random.rs @@ -1,28 +1,27 @@ use super::scalar::Scalar; -use super::transcript::ProofTranscript; +use crate::{parameters::poseidon_params, poseidon_transcript::PoseidonTranscript}; use ark_std::UniformRand; -use merlin::Transcript; pub struct RandomTape { - tape: Transcript, + tape: PoseidonTranscript, } impl RandomTape { - pub fn new(name: &'static [u8]) -> Self { + pub fn new(_name: &'static [u8]) -> Self { let tape = { let mut rng = ark_std::rand::thread_rng(); - let mut tape = Transcript::new(name); - tape.append_scalar(b"init_randomness", &Scalar::rand(&mut rng)); + let mut tape = PoseidonTranscript::new(&poseidon_params()); + tape.append_scalar(&Scalar::rand(&mut rng)); tape }; Self { tape } } - pub fn random_scalar(&mut self, label: &'static [u8]) -> Scalar { - self.tape.challenge_scalar(label) + pub fn random_scalar(&mut self, _label: &'static [u8]) -> Scalar { + self.tape.challenge_scalar() } - pub fn random_vector(&mut self, label: &'static [u8], len: usize) -> Vec { - self.tape.challenge_vector(label, len) + pub fn random_vector(&mut self, _label: &'static [u8], len: usize) -> Vec { + self.tape.challenge_vector(len) } } diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index 3d8b0d54..4063e840 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -13,11 +13,9 @@ use super::product_tree::{DotProductCircuit, ProductCircuit, ProductCircuitEvalP use super::random::RandomTape; use super::scalar::Scalar; use super::timer::Timer; -use super::transcript::AppendToTranscript; use ark_ff::{Field, One, Zero}; use ark_serialize::*; use core::cmp::Ordering; -use merlin::Transcript; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] pub struct SparseMatEntry { @@ -209,14 +207,6 @@ impl DerefsEvalProof { } } -impl AppendToTranscript for DerefsCommitment { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_message(b"derefs_commitment", b"begin_derefs_commitment"); - self.comm_ops_val.append_to_transcript(label, transcript); - transcript.append_message(b"derefs_commitment", b"end_derefs_commitment"); - } -} - impl AppendToPoseidon for DerefsCommitment { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { self.comm_ops_val.append_to_poseidon(transcript); @@ -336,20 +326,6 @@ pub struct SparseMatPolyCommitment { comm_comb_mem: PolyCommitment, } -impl AppendToTranscript for SparseMatPolyCommitment { - fn append_to_transcript(&self, _label: &'static [u8], transcript: &mut Transcript) { - transcript.append_u64(b"batch_size", self.batch_size as u64); - transcript.append_u64(b"num_ops", self.num_ops as u64); - transcript.append_u64(b"num_mem_cells", self.num_mem_cells as u64); - self - .comm_comb_ops - .append_to_transcript(b"comm_comb_ops", transcript); - self - .comm_comb_mem - .append_to_transcript(b"comm_comb_mem", transcript); - } -} - impl AppendToPoseidon for SparseMatPolyCommitment { fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { transcript.append_u64(self.batch_size as u64); diff --git a/src/sumcheck.rs b/src/sumcheck.rs index b34f0375..617bfaac 100644 --- a/src/sumcheck.rs +++ b/src/sumcheck.rs @@ -60,125 +60,6 @@ impl SumcheckInstanceProof { } } -// #[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -// pub struct ZKSumcheckInstanceProof { -// comm_polys: Vec, -// comm_evals: Vec, -// proofs: Vec, -// } - -// impl ZKSumcheckInstanceProof { -// pub fn new( -// comm_polys: Vec, -// comm_evals: Vec, -// proofs: Vec, -// ) -> Self { -// ZKSumcheckInstanceProof { -// comm_polys, -// comm_evals, -// proofs, -// } -// } - -// pub fn verify( -// &self, -// comm_claim: &CompressedGroup, -// num_rounds: usize, -// degree_bound: usize, -// gens_1: &MultiCommitGens, -// gens_n: &MultiCommitGens, -// transcript: &mut Transcript, -// ) -> Result<(CompressedGroup, Vec), ProofVerifyError> { -// // verify degree bound -// assert_eq!(gens_n.n, degree_bound + 1); - -// // verify that there is a univariate polynomial for each round -// assert_eq!(self.comm_polys.len(), num_rounds); -// assert_eq!(self.comm_evals.len(), num_rounds); - -// let mut r: Vec = Vec::new(); -// for i in 0..self.comm_polys.len() { -// let comm_poly = &self.comm_polys[i]; - -// // append the prover's polynomial to the transcript -// comm_poly.append_to_transcript(b"comm_poly", transcript); - -// //derive the verifier's challenge for the next round -// let r_i = transcript.challenge_scalar(b"challenge_nextround"); - -// // verify the proof of sum-check and evals -// let res = { -// let comm_claim_per_round = if i == 0 { -// comm_claim -// } else { -// &self.comm_evals[i - 1] -// }; -// let mut comm_eval = &self.comm_evals[i]; - -// // add two claims to transcript -// comm_claim_per_round.append_to_transcript(transcript); -// comm_eval.append_to_transcript(transcript); - -// // produce two weights -// let w = transcript.challenge_vector(2); - -// // compute a weighted sum of the RHS -// let comm_target = GroupElement::vartime_multiscalar_mul( -// w.as_slice(), -// iter::once(&comm_claim_per_round) -// .chain(iter::once(&comm_eval)) -// .map(|pt| GroupElement::decompress(pt).unwrap()) -// .collect::>() -// .as_slice(), -// ) -// .compress(); - -// let a = { -// // the vector to use to decommit for sum-check test -// let a_sc = { -// let mut a = vec![Scalar::one(); degree_bound + 1]; -// a[0] += Scalar::one(); -// a -// }; - -// // the vector to use to decommit for evaluation -// let a_eval = { -// let mut a = vec![Scalar::one(); degree_bound + 1]; -// for j in 1..a.len() { -// a[j] = a[j - 1] * r_i; -// } -// a -// }; - -// // take weighted sum of the two vectors using w -// assert_eq!(a_sc.len(), a_eval.len()); -// (0..a_sc.len()) -// .map(|i| w[0] * a_sc[i] + w[1] * a_eval[i]) -// .collect::>() -// }; - -// self.proofs[i] -// .verify( -// gens_1, -// gens_n, -// transcript, -// &a, -// &self.comm_polys[i], -// &comm_target, -// ) -// .is_ok() -// }; -// if !res { -// return Err(ProofVerifyError::InternalError); -// } - -// r.push(r_i); -// } - -// Ok((self.comm_evals[&self.comm_evals.len() - 1].clone(), r)) -// } -// } - impl SumcheckInstanceProof { pub fn prove_cubic_with_additive_term( claim: &Scalar, @@ -554,360 +435,3 @@ impl SumcheckInstanceProof { ) } } - -// impl ZKSumcheckInstanceProof { -// pub fn prove_quad( -// claim: &Scalar, -// blind_claim: &Scalar, -// num_rounds: usize, -// poly_A: &mut DensePolynomial, -// poly_B: &mut DensePolynomial, -// comb_func: F, -// gens_1: &MultiCommitGens, -// gens_n: &MultiCommitGens, -// transcript: &mut Transcript, -// random_tape: &mut RandomTape, -// ) -> (Self, Vec, Vec, Scalar) -// where -// F: Fn(&Scalar, &Scalar) -> Scalar, -// { -// let (blinds_poly, blinds_evals) = ( -// random_tape.random_vector(b"blinds_poly", num_rounds), -// random_tape.random_vector(b"blinds_evals", num_rounds), -// ); -// let mut claim_per_round = *claim; -// let mut comm_claim_per_round = claim_per_round.commit(blind_claim, gens_1).compress(); - -// let mut r: Vec = Vec::new(); -// let mut comm_polys: Vec = Vec::new(); -// let mut comm_evals: Vec = Vec::new(); -// let mut proofs: Vec = Vec::new(); - -// for j in 0..num_rounds { -// let (poly, comm_poly) = { -// let mut eval_point_0 = Scalar::zero(); -// let mut eval_point_2 = Scalar::zero(); - -// let len = poly_A.len() / 2; -// for i in 0..len { -// // eval 0: bound_func is A(low) -// eval_point_0 += comb_func(&poly_A[i], &poly_B[i]); - -// // eval 2: bound_func is -A(low) + 2*A(high) -// let poly_A_bound_point = poly_A[len + i] + poly_A[len + i] - poly_A[i]; -// let poly_B_bound_point = poly_B[len + i] + poly_B[len + i] - poly_B[i]; -// eval_point_2 += comb_func(&poly_A_bound_point, &poly_B_bound_point); -// } - -// let evals = vec![eval_point_0, claim_per_round - eval_point_0, eval_point_2]; -// let poly = UniPoly::from_evals(&evals); -// let comm_poly = poly.commit(gens_n, &blinds_poly[j]).compress(); -// (poly, comm_poly) -// }; - -// // append the prover's message to the transcript -// comm_poly.append_to_transcript(b"comm_poly", transcript); -// comm_polys.push(comm_poly); - -// //derive the verifier's challenge for the next round -// let r_j = transcript.challenge_scalar(b"challenge_nextround"); - -// // bound all tables to the verifier's challenege -// poly_A.bound_poly_var_top(&r_j); -// poly_B.bound_poly_var_top(&r_j); - -// // produce a proof of sum-check and of evaluation -// let (proof, claim_next_round, comm_claim_next_round) = { -// let eval = poly.evaluate(&r_j); -// let comm_eval = eval.commit(&blinds_evals[j], gens_1).compress(); - -// // we need to prove the following under homomorphic commitments: -// // (1) poly(0) + poly(1) = claim_per_round -// // (2) poly(r_j) = eval - -// // Our technique is to leverage dot product proofs: -// // (1) we can prove: = claim_per_round -// // (2) we can prove: >() -// .as_slice(), -// ) -// .compress(); - -// let blind = { -// let blind_sc = if j == 0 { -// blind_claim -// } else { -// &blinds_evals[j - 1] -// }; - -// let blind_eval = &blinds_evals[j]; - -// w[0] * blind_sc + w[1] * blind_eval -// }; -// assert_eq!(target.commit(&blind, gens_1).compress(), comm_target); - -// let a = { -// // the vector to use to decommit for sum-check test -// let a_sc = { -// let mut a = vec![Scalar::one(); poly.degree() + 1]; -// a[0] += Scalar::one(); -// a -// }; - -// // the vector to use to decommit for evaluation -// let a_eval = { -// let mut a = vec![Scalar::one(); poly.degree() + 1]; -// for j in 1..a.len() { -// a[j] = a[j - 1] * r_j; -// } -// a -// }; - -// // take weighted sum of the two vectors using w -// assert_eq!(a_sc.len(), a_eval.len()); -// (0..a_sc.len()) -// .map(|i| w[0] * a_sc[i] + w[1] * a_eval[i]) -// .collect::>() -// }; - -// let (proof, _comm_poly, _comm_sc_eval) = DotProductProof::prove( -// gens_1, -// gens_n, -// transcript, -// random_tape, -// &poly.as_vec(), -// &blinds_poly[j], -// &a, -// &target, -// &blind, -// ); - -// (proof, eval, comm_eval) -// }; - -// claim_per_round = claim_next_round; -// comm_claim_per_round = comm_claim_next_round; - -// proofs.push(proof); -// r.push(r_j); -// comm_evals.push(comm_claim_per_round.clone()); -// } - -// ( -// ZKSumcheckInstanceProof::new(comm_polys, comm_evals, proofs), -// r, -// vec![poly_A[0], poly_B[0]], -// blinds_evals[num_rounds - 1], -// ) -// } - -// pub fn prove_cubic_with_additive_term( -// claim: &Scalar, -// blind_claim: &Scalar, -// num_rounds: usize, -// poly_A: &mut DensePolynomial, -// poly_B: &mut DensePolynomial, -// poly_C: &mut DensePolynomial, -// poly_D: &mut DensePolynomial, -// comb_func: F, -// gens_1: &MultiCommitGens, -// gens_n: &MultiCommitGens, -// transcript: &mut Transcript, -// random_tape: &mut RandomTape, -// ) -> (Self, Vec, Vec, Scalar) -// where -// F: Fn(&Scalar, &Scalar, &Scalar, &Scalar) -> Scalar, -// { -// let (blinds_poly, blinds_evals) = ( -// random_tape.random_vector(b"blinds_poly", num_rounds), -// random_tape.random_vector(b"blinds_evals", num_rounds), -// ); - -// let mut claim_per_round = *claim; -// let mut comm_claim_per_round = claim_per_round.commit(blind_claim, gens_1).compress(); - -// let mut r: Vec = Vec::new(); -// let mut comm_polys: Vec = Vec::new(); -// let mut comm_evals: Vec = Vec::new(); -// let mut proofs: Vec = Vec::new(); - -// for j in 0..num_rounds { -// let (poly, comm_poly) = { -// let mut eval_point_0 = Scalar::zero(); -// let mut eval_point_2 = Scalar::zero(); -// let mut eval_point_3 = Scalar::zero(); - -// let len = poly_A.len() / 2; -// for i in 0..len { -// // eval 0: bound_func is A(low) -// eval_point_0 += comb_func(&poly_A[i], &poly_B[i], &poly_C[i], &poly_D[i]); - -// // eval 2: bound_func is -A(low) + 2*A(high) -// let poly_A_bound_point = poly_A[len + i] + poly_A[len + i] - poly_A[i]; -// let poly_B_bound_point = poly_B[len + i] + poly_B[len + i] - poly_B[i]; -// let poly_C_bound_point = poly_C[len + i] + poly_C[len + i] - poly_C[i]; -// let poly_D_bound_point = poly_D[len + i] + poly_D[len + i] - poly_D[i]; -// eval_point_2 += comb_func( -// &poly_A_bound_point, -// &poly_B_bound_point, -// &poly_C_bound_point, -// &poly_D_bound_point, -// ); - -// // eval 3: bound_func is -2A(low) + 3A(high); computed incrementally with bound_func applied to eval(2) -// let poly_A_bound_point = poly_A_bound_point + poly_A[len + i] - poly_A[i]; -// let poly_B_bound_point = poly_B_bound_point + poly_B[len + i] - poly_B[i]; -// let poly_C_bound_point = poly_C_bound_point + poly_C[len + i] - poly_C[i]; -// let poly_D_bound_point = poly_D_bound_point + poly_D[len + i] - poly_D[i]; -// eval_point_3 += comb_func( -// &poly_A_bound_point, -// &poly_B_bound_point, -// &poly_C_bound_point, -// &poly_D_bound_point, -// ); -// } - -// let evals = vec![ -// eval_point_0, -// claim_per_round - eval_point_0, -// eval_point_2, -// eval_point_3, -// ]; -// let poly = UniPoly::from_evals(&evals); -// let comm_poly = poly.commit(gens_n, &blinds_poly[j]).compress(); -// (poly, comm_poly) -// }; - -// // append the prover's message to the transcript -// comm_poly.append_to_transcript(b"comm_poly", transcript); -// comm_polys.push(comm_poly); - -// //derive the verifier's challenge for the next round -// let r_j = transcript.challenge_scalar(b"challenge_nextround"); - -// // bound all tables to the verifier's challenege -// poly_A.bound_poly_var_top(&r_j); -// poly_B.bound_poly_var_top(&r_j); -// poly_C.bound_poly_var_top(&r_j); -// poly_D.bound_poly_var_top(&r_j); - -// // produce a proof of sum-check and of evaluation -// let (proof, claim_next_round, comm_claim_next_round) = { -// let eval = poly.evaluate(&r_j); -// let comm_eval = eval.commit(&blinds_evals[j], gens_1).compress(); - -// // we need to prove the following under homomorphic commitments: -// // (1) poly(0) + poly(1) = claim_per_round -// // (2) poly(r_j) = eval - -// // Our technique is to leverage dot product proofs: -// // (1) we can prove: = claim_per_round -// // (2) we can prove: >() -// .as_slice(), -// ) -// .compress(); - -// let blind = { -// let blind_sc = if j == 0 { -// blind_claim -// } else { -// &blinds_evals[j - 1] -// }; - -// let blind_eval = &blinds_evals[j]; - -// w[0] * blind_sc + w[1] * blind_eval -// }; - -// let res = target.commit(&blind, gens_1); - -// assert_eq!(res.compress(), comm_target); - -// let a = { -// // the vector to use to decommit for sum-check test -// let a_sc = { -// let mut a = vec![Scalar::one(); poly.degree() + 1]; -// a[0] += Scalar::one(); -// a -// }; - -// // the vector to use to decommit for evaluation -// let a_eval = { -// let mut a = vec![Scalar::one(); poly.degree() + 1]; -// for j in 1..a.len() { -// a[j] = a[j - 1] * r_j; -// } -// a -// }; - -// // take weighted sum of the two vectors using w -// assert_eq!(a_sc.len(), a_eval.len()); -// (0..a_sc.len()) -// .map(|i| w[0] * a_sc[i] + w[1] * a_eval[i]) -// .collect::>() -// }; - -// let (proof, _comm_poly, _comm_sc_eval) = DotProductProof::prove( -// gens_1, -// gens_n, -// transcript, -// random_tape, -// &poly.as_vec(), -// &blinds_poly[j], -// &a, -// &target, -// &blind, -// ); - -// (proof, eval, comm_eval) -// }; - -// proofs.push(proof); -// claim_per_round = claim_next_round; -// comm_claim_per_round = comm_claim_next_round; -// r.push(r_j); -// comm_evals.push(comm_claim_per_round.clone()); -// } - -// ( -// ZKSumcheckInstanceProof::new(comm_polys, comm_evals, proofs), -// r, -// vec![poly_A[0], poly_B[0], poly_C[0], poly_D[0]], -// blinds_evals[num_rounds - 1], -// ) -// } -// } diff --git a/src/transcript.rs b/src/transcript.rs deleted file mode 100644 index e1e5087e..00000000 --- a/src/transcript.rs +++ /dev/null @@ -1,69 +0,0 @@ -use super::scalar::Scalar; -use crate::group::CompressedGroup; -use ark_ff::{BigInteger, PrimeField}; -use ark_serialize::CanonicalSerialize; -use merlin::Transcript; - -pub trait ProofTranscript { - fn append_protocol_name(&mut self, protocol_name: &'static [u8]); - fn append_scalar(&mut self, label: &'static [u8], scalar: &Scalar); - fn append_point(&mut self, label: &'static [u8], point: &CompressedGroup); - fn challenge_scalar(&mut self, label: &'static [u8]) -> Scalar; - fn challenge_vector(&mut self, label: &'static [u8], len: usize) -> Vec; -} - - - -impl ProofTranscript for Transcript { - fn append_protocol_name(&mut self, protocol_name: &'static [u8]) { - self.append_message(b"protocol-name", protocol_name); - } - - fn append_scalar(&mut self, label: &'static [u8], scalar: &Scalar) { - self.append_message(label, scalar.into_repr().to_bytes_le().as_slice()); - } - - fn append_point(&mut self, label: &'static [u8], point: &CompressedGroup) { - let mut point_encoded = Vec::new(); - point.serialize(&mut point_encoded).unwrap(); - self.append_message(label, point_encoded.as_slice()); - } - - fn challenge_scalar(&mut self, label: &'static [u8]) -> Scalar { - let mut buf = [0u8; 64]; - self.challenge_bytes(label, &mut buf); - Scalar::from_le_bytes_mod_order(&buf) - } - - fn challenge_vector(&mut self, label: &'static [u8], len: usize) -> Vec { - (0..len) - .map(|_i| self.challenge_scalar(label)) - .collect::>() - } -} - -pub trait AppendToTranscript { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript); -} - -impl AppendToTranscript for Scalar { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_scalar(label, self); - } -} - -impl AppendToTranscript for [Scalar] { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_message(label, b"begin_append_vector"); - for item in self { - transcript.append_scalar(label, item); - } - transcript.append_message(label, b"end_append_vector"); - } -} - -impl AppendToTranscript for CompressedGroup { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_point(label, self); - } -} diff --git a/src/unipoly.rs b/src/unipoly.rs index d97b1892..93454a20 100644 --- a/src/unipoly.rs +++ b/src/unipoly.rs @@ -1,12 +1,7 @@ -use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; - -use super::commitments::{Commitments, MultiCommitGens}; -use super::group::GroupElement; use super::scalar::Scalar; -use super::transcript::{AppendToTranscript, ProofTranscript}; +use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; use ark_ff::Field; use ark_serialize::*; -use merlin::Transcript; // ax^2 + bx + c stored as vec![c,b,a] // ax^3 + bx^2 + cx + d stored as vec![d,c,b,a] #[derive(Debug, CanonicalDeserialize, CanonicalSerialize, Clone)] @@ -60,10 +55,6 @@ impl UniPoly { self.coeffs.len() - 1 } - pub fn as_vec(&self) -> Vec { - self.coeffs.clone() - } - pub fn eval_at_zero(&self) -> Scalar { self.coeffs[0] } @@ -89,10 +80,6 @@ impl UniPoly { coeffs_except_linear_term, } } - - pub fn commit(&self, gens: &MultiCommitGens, blind: &Scalar) -> GroupElement { - self.coeffs.commit(blind, gens) - } } impl CompressedUniPoly { @@ -122,16 +109,6 @@ impl AppendToPoseidon for UniPoly { } } -impl AppendToTranscript for UniPoly { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut Transcript) { - transcript.append_message(label, b"UniPoly_begin"); - for i in 0..self.coeffs.len() { - transcript.append_scalar(b"coeff", &self.coeffs[i]); - } - transcript.append_message(label, b"UniPoly_end"); - } -} - #[cfg(test)] mod tests { From 71122ec1678201740fd6a10fc40f01dff32b5e16 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 7 Feb 2023 18:44:13 +0100 Subject: [PATCH 16/64] wip --- src/commitments.rs | 58 ++++++++++++++++++----------------- src/dense_mlpoly.rs | 74 ++++++++++++++++++++++++--------------------- src/nizk/mod.rs | 15 +++++---- 3 files changed, 77 insertions(+), 70 deletions(-) diff --git a/src/commitments.rs b/src/commitments.rs index bac7ea73..8d166623 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -2,7 +2,7 @@ use super::group::{GroupElement, GroupElementAffine, VartimeMultiscalarMul, GROU use super::scalar::Scalar; use crate::group::CompressGroupElement; use crate::parameters::*; -use ark_ec::{AffineRepr, CurveGroup}; +use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::PrimeField; use std::ops::Mul; @@ -10,29 +10,30 @@ use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_crypto_primitives::sponge::CryptographicSponge; #[derive(Debug, Clone)] -pub struct MultiCommitGens { +pub struct MultiCommitGens { pub n: usize, - pub G: Vec, - pub h: GroupElement, + pub G: Vec, + pub h: G::Affine, } -impl MultiCommitGens { +impl MultiCommitGens { pub fn new(n: usize, label: &[u8]) -> Self { let params = poseidon_params(); let mut sponge = PoseidonSponge::new(¶ms); sponge.absorb(&label); sponge.absorb(&GROUP_BASEPOINT.compress().0); - let mut gens: Vec = Vec::new(); - for _ in 0..n + 1 { - let mut el_aff: Option = None; - while el_aff.is_none() { - let uniform_bytes = sponge.squeeze_bytes(64); - el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes); - } - let el = el_aff.unwrap().clear_cofactor().into_group(); - gens.push(el); - } + let gens = (0..=n) + .map(|i| { + let mut el_aff: Option = None; + while el_aff.is_none() { + let uniform_bytes = sponge.squeeze_bytes(64); + el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes); + } + let el = el_aff.unwrap().clear_cofactor(); + gens.push(el); + }) + .collect::>(); MultiCommitGens { n, @@ -41,7 +42,7 @@ impl MultiCommitGens { } } - pub fn clone(&self) -> MultiCommitGens { + pub fn clone(&self) -> Self { MultiCommitGens { n: self.n, h: self.h, @@ -49,7 +50,7 @@ impl MultiCommitGens { } } - pub fn split_at(&self, mid: usize) -> (MultiCommitGens, MultiCommitGens) { + pub fn split_at(&self, mid: usize) -> (Self, Self) { let (G1, G2) = self.G.split_at(mid); ( @@ -67,27 +68,28 @@ impl MultiCommitGens { } } -pub trait Commitments { - fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement; +// TODO replace that by arkworks CommitmentScheme probably exists +pub trait Commitments { + fn commit(&self, blind: &G::ScalarField, gens_n: &MultiCommitGens) -> G; } -impl Commitments for Scalar { - fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement { +impl Commitments for G::ScalarField { + fn commit(&self, blind: &G::ScalarField, gens_n: &MultiCommitGens) -> G { assert_eq!(gens_n.n, 1); - GroupElement::vartime_multiscalar_mul(&[*self, *blind], &[gens_n.G[0], gens_n.h]) + ::msm(&[*self, *blind], &[gens_n.G[0], gens_n.h]) } } -impl Commitments for Vec { - fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement { +impl Commitments for Vec { + fn commit(&self, blind: &G::ScalarField, gens_n: &MultiCommitGens) -> G { assert_eq!(gens_n.n, self.len()); - GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind) + ::msm(self, &gens_n.G) + gens_n.h.mul(blind) } } -impl Commitments for [Scalar] { - fn commit(&self, blind: &Scalar, gens_n: &MultiCommitGens) -> GroupElement { +impl Commitments for [G::ScalarField] { + fn commit(&self, blind: &G::ScalarField, gens_n: &MultiCommitGens) -> G { assert_eq!(gens_n.n, self.len()); - GroupElement::vartime_multiscalar_mul(self, &gens_n.G) + gens_n.h.mul(blind) + ::msm(self, &gens_n.G) + gens_n.h.mul(blind) } } diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 21b364b9..8d5653da 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -2,7 +2,6 @@ use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; - use super::commitments::{Commitments, MultiCommitGens}; use super::errors::ProofVerifyError; use super::group::{ @@ -14,6 +13,7 @@ use super::nizk::{DotProductProofGens, DotProductProofLog}; use super::random::RandomTape; use super::scalar::Scalar; use super::transcript::{AppendToTranscript, ProofTranscript}; +<<<<<<< Updated upstream use ark_bls12_377::{Bls12_377 as I, G1Affine}; use ark_ec::scalar_mul::variable_base::VariableBaseMSM; use ark_ec::{pairing::Pairing, CurveGroup}; @@ -22,6 +22,13 @@ use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; use ark_poly_commit::multilinear_pc::data_structures::{ Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, }; +======= +use ark_bls12_377::Bls12_377 as I; +use ark_ff::Field; +use ark_ff::{One, UniformRand, Zero}; +use ark_poly::MultilinearExtension; +use ark_poly_commit::multilinear_pc::data_structures::{CommitterKey, VerifierKey}; +>>>>>>> Stashed changes use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::*; use core::ops::Index; @@ -33,18 +40,18 @@ use rayon::prelude::*; // TODO: integrate the DenseMultilinearExtension(and Sparse) https://github.com/arkworks-rs/algebra/tree/master/poly/src/evaluations/multivariate/multilinear from arkworks into Spartan. This requires moving the specific Spartan functionalities in separate traits. #[derive(Debug, Clone, Eq, PartialEq, Hash, CanonicalDeserialize, CanonicalSerialize)] -pub struct DensePolynomial { +pub struct DensePolynomial { pub num_vars: usize, // the number of variables in the multilinear polynomial pub len: usize, - pub Z: Vec, // evaluations of the polynomial in all the 2^num_vars Boolean inputs + pub Z: Vec, // evaluations of the polynomial in all the 2^num_vars Boolean inputs } -impl MultilinearExtension for DensePolynomial { +impl MultilinearExtension for DensePolynomial { fn num_vars(&self) -> usize { self.get_num_vars() } - fn evaluate(&self, point: &[Scalar]) -> Option { + fn evaluate(&self, point: &[F]) -> Option { if point.len() == self.num_vars { Some(self.evaluate(&point)) } else { @@ -53,9 +60,9 @@ impl MultilinearExtension for DensePolynomial { } fn rand(num_vars: usize, rng: &mut R) -> Self { - let evals = (0..(1 << num_vars)).map(|_| Scalar::rand(rng)).collect(); + let evals = (0..(1 << num_vars)).map(|_| F::rand(rng)).collect(); Self { - num_vars: num_vars, + num_vars, len: 1 << num_vars, Z: evals, } @@ -74,12 +81,12 @@ impl MultilinearExtension for DensePolynomial { } } -impl Zero for DensePolynomial { +impl Zero for DensePolynomial { fn zero() -> Self { Self { num_vars: 0, len: 1, - Z: vec![Scalar::zero()], + Z: vec![F::zero()], } } @@ -88,8 +95,8 @@ impl Zero for DensePolynomial { } } -impl Add for DensePolynomial { - type Output = DensePolynomial; +impl Add for DensePolynomial { + type Output = DensePolynomial; fn add(self, other: Self) -> Self { &self + &other } @@ -97,10 +104,10 @@ impl Add for DensePolynomial { // function needed because the result might have a different lifetime than the // operands -impl<'a, 'b> Add<&'a DensePolynomial> for &'b DensePolynomial { - type Output = DensePolynomial; +impl<'a, 'b, F: Field> Add<&'a DensePolynomial> for &'b DensePolynomial { + type Output = DensePolynomial; - fn add(self, other: &'a DensePolynomial) -> Self::Output { + fn add(self, other: &'a DensePolynomial) -> Self::Output { if other.is_zero() { return self.clone(); } @@ -109,7 +116,7 @@ impl<'a, 'b> Add<&'a DensePolynomial> for &'b DensePolynomial { } assert_eq!(self.num_vars, other.num_vars); - let res: Vec = self + let res = self .Z .iter() .zip(other.Z.iter()) @@ -123,20 +130,20 @@ impl<'a, 'b> Add<&'a DensePolynomial> for &'b DensePolynomial { } } -impl AddAssign for DensePolynomial { +impl AddAssign for DensePolynomial { fn add_assign(&mut self, other: Self) { *self = &*self + &other; } } -impl<'a, 'b> AddAssign<&'a DensePolynomial> for DensePolynomial { - fn add_assign(&mut self, other: &'a DensePolynomial) { +impl<'a, 'b, F: Field> AddAssign<&'a DensePolynomial> for DensePolynomial { + fn add_assign(&mut self, other: &'a DensePolynomial) { *self = &*self + other; } } -impl<'a, 'b> AddAssign<(Scalar, &'a DensePolynomial)> for DensePolynomial { - fn add_assign(&mut self, (scalar, other): (Scalar, &'a DensePolynomial)) { +impl<'a, 'b, F: Field> AddAssign<(Scalar, &'a DensePolynomial)> for DensePolynomial { + fn add_assign(&mut self, (scalar, other): (Scalar, &'a DensePolynomial)) { let other = Self { num_vars: other.num_vars, len: 1 << other.num_vars, @@ -146,8 +153,8 @@ impl<'a, 'b> AddAssign<(Scalar, &'a DensePolynomial)> for DensePolynomial { } } -impl Neg for DensePolynomial { - type Output = DensePolynomial; +impl Neg for DensePolynomial { + type Output = DensePolynomial; fn neg(self) -> Self::Output { Self::Output { @@ -158,39 +165,39 @@ impl Neg for DensePolynomial { } } -impl Sub for DensePolynomial { - type Output = DensePolynomial; +impl Sub for DensePolynomial { + type Output = DensePolynomial; fn sub(self, other: Self) -> Self::Output { &self - &other } } -impl<'a, 'b> Sub<&'a DensePolynomial> for &'b DensePolynomial { - type Output = DensePolynomial; +impl<'a, 'b,F:Field> Sub<&'a DensePolynomial> for &'b DensePolynomial { + type Output = DensePolynomial; - fn sub(self, other: &'a DensePolynomial) -> Self::Output { + fn sub(self, other: &'a DensePolynomial) -> Self::Output { self + &other.clone().neg() } } -impl SubAssign for DensePolynomial { +impl SubAssign for DensePolynomial { fn sub_assign(&mut self, other: Self) { *self = &*self - &other; } } -impl<'a, 'b> SubAssign<&'a DensePolynomial> for DensePolynomial { - fn sub_assign(&mut self, other: &'a DensePolynomial) { +impl<'a, 'b,F:Field> SubAssign<&'a DensePolynomial> for DensePolynomial { + fn sub_assign(&mut self, other: &'a DensePolynomial) { *self = &*self - other; } } #[derive(Clone)] -pub struct PolyCommitmentGens { +pub struct PolyCommitmentGens{ pub gens: DotProductProofGens, - pub ck: CommitterKey, - pub vk: VerifierKey, + pub ck: CommitterKey, + pub vk: VerifierKey, } impl PolyCommitmentGens { @@ -593,7 +600,6 @@ impl PolyEvalProof { #[cfg(test)] mod tests { - use crate::parameters::poseidon_params; diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index 26667b7a..3f172a3a 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -419,15 +419,15 @@ impl DotProductProof { } #[derive(Clone)] -pub struct DotProductProofGens { +pub struct DotProductProofGens { n: usize, - pub gens_n: MultiCommitGens, - pub gens_1: MultiCommitGens, + pub gens_n: MultiCommitGens, + pub gens_1: MultiCommitGens, } -impl DotProductProofGens { +impl DotProductProofGens { pub fn new(n: usize, label: &[u8]) -> Self { - let (gens_n, gens_1) = MultiCommitGens::new(n + 1, label).split_at(n); + let (gens_n, gens_1) = MultiCommitGens::::new(n + 1, label).split_at(n); DotProductProofGens { n, gens_n, gens_1 } } } @@ -574,9 +574,8 @@ impl DotProductProofLog { let z2_s = &self.z2; let lhs = ((Gamma_hat.mul(c_s) + beta_s).mul(a_hat_s) + delta_s).compress(); - let rhs = ((g_hat + gens.gens_1.G[0].mul(a_hat_s)).mul(z1_s) - + gens.gens_1.h.mul(z2_s)) - .compress(); + let rhs = + ((g_hat + gens.gens_1.G[0].mul(a_hat_s)).mul(z1_s) + gens.gens_1.h.mul(z2_s)).compress(); assert_eq!(lhs, rhs); From 8fedd99f69a2eb63d9a888c71f8f28f74385d441 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 7 Feb 2023 22:42:52 +0100 Subject: [PATCH 17/64] wip --- src/group.rs | 79 -------------------- src/lib.rs | 1 - src/mipp.rs | 2 +- src/parameters.rs | 3 +- src/product_tree.rs | 173 ++++++++++++++++++++++---------------------- src/sumcheck.rs | 161 +++++++++++++++++++++-------------------- src/transcript.rs | 18 +++++ 7 files changed, 186 insertions(+), 251 deletions(-) delete mode 100644 src/group.rs create mode 100644 src/transcript.rs diff --git a/src/group.rs b/src/group.rs deleted file mode 100644 index 4569c66e..00000000 --- a/src/group.rs +++ /dev/null @@ -1,79 +0,0 @@ -use crate::errors::ProofVerifyError; -use ark_ec::scalar_mul::variable_base::VariableBaseMSM; -use ark_ec::Group; - -use lazy_static::lazy_static; - -use super::scalar::Scalar; - -use ark_ec::CurveGroup; -use ark_serialize::*; -use core::borrow::Borrow; - -pub type GroupElement = ark_bls12_377::G1Projective; -pub type GroupElementAffine = ark_bls12_377::G1Affine; -pub type Fq = ark_bls12_377::Fq; -pub type Fr = ark_bls12_377::Fr; - -#[derive(Clone, Eq, PartialEq, Hash, Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct CompressedGroup(pub Vec); - -lazy_static! { - pub static ref GROUP_BASEPOINT: GroupElement = GroupElement::generator(); -} - -pub trait CompressGroupElement { - fn compress(&self) -> CompressedGroup; -} - -pub trait DecompressGroupElement { - fn decompress(encoded: &CompressedGroup) -> Option; -} - -pub trait UnpackGroupElement { - fn unpack(&self) -> Result; -} - -impl CompressGroupElement for GroupElement { - fn compress(&self) -> CompressedGroup { - let mut point_encoding = Vec::new(); - self - .serialize_with_mode(&mut point_encoding, Compress::Yes) - .unwrap(); - CompressedGroup(point_encoding) - } -} - -impl DecompressGroupElement for GroupElement { - fn decompress(encoded: &CompressedGroup) -> Option { - let res = GroupElement::deserialize_compressed(&*encoded.0); - if let Ok(r) = res { - Some(r) - } else { - println!("{:?}", res); - None - } - } -} - -impl UnpackGroupElement for CompressedGroup { - fn unpack(&self) -> Result { - let encoded = self.0.clone(); - GroupElement::decompress(self).ok_or(ProofVerifyError::DecompressionError(encoded)) - } -} - -pub trait VartimeMultiscalarMul { - fn vartime_multiscalar_mul(scalars: &[Scalar], points: &[GroupElement]) -> GroupElement; -} - -impl VartimeMultiscalarMul for GroupElement { - fn vartime_multiscalar_mul(scalars: &[Scalar], points: &[GroupElement]) -> GroupElement { - assert!(scalars.len() == points.len()); - let aff_points = points - .iter() - .map(|P| P.borrow().into_affine()) - .collect::>(); - ::msm_unchecked(aff_points.as_slice(), scalars) - } -} diff --git a/src/lib.rs b/src/lib.rs index 6342adab..38d1bf2a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -21,7 +21,6 @@ extern crate rayon; mod commitments; mod dense_mlpoly; mod errors; -mod group; #[macro_use] pub(crate) mod macros; mod math; diff --git a/src/mipp.rs b/src/mipp.rs index 79be3da4..aec369ea 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -8,7 +8,7 @@ use ark_poly_commit::multilinear_pc::data_structures::{ }; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError, Write}; - +use crate::transcript::Transcript; use ark_std::One; use ark_std::Zero; use rayon::iter::ParallelIterator; diff --git a/src/parameters.rs b/src/parameters.rs index cee6e8a2..b5416c41 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -1,11 +1,10 @@ use std::str::FromStr; - +use ark_bls12_377::Fr; use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; // Copyright: https://github.com/nikkolasg/ark-dkg/blob/main/src/parameters.rs use json::JsonValue; use lazy_static::lazy_static; -use crate::group::Fr; lazy_static! { // bls12377_rate2_constraints_fr: pub static ref FR: JsonValue = object! { diff --git a/src/product_tree.rs b/src/product_tree.rs index 5d4b071f..bf8ff27e 100644 --- a/src/product_tree.rs +++ b/src/product_tree.rs @@ -6,27 +6,28 @@ use super::dense_mlpoly::EqPolynomial; use super::math::Math; use super::scalar::Scalar; use super::sumcheck::SumcheckInstanceProof; +use ark_ff::PrimeField; use ark_serialize::*; use ark_std::One; #[derive(Debug)] -pub struct ProductCircuit { - left_vec: Vec, - right_vec: Vec, +pub struct ProductCircuit { + left_vec: Vec>, + right_vec: Vec>, } -impl ProductCircuit { +impl ProductCircuit { fn compute_layer( - inp_left: &DensePolynomial, - inp_right: &DensePolynomial, - ) -> (DensePolynomial, DensePolynomial) { + inp_left: &DensePolynomial, + inp_right: &DensePolynomial, + ) -> (DensePolynomial, DensePolynomial) { let len = inp_left.len() + inp_right.len(); let outp_left = (0..len / 4) .map(|i| inp_left[i] * inp_right[i]) - .collect::>(); + .collect::>(); let outp_right = (len / 4..len / 2) .map(|i| inp_left[i] * inp_right[i]) - .collect::>(); + .collect::>(); ( DensePolynomial::new(outp_left), @@ -34,9 +35,9 @@ impl ProductCircuit { ) } - pub fn new(poly: &DensePolynomial) -> Self { - let mut left_vec: Vec = Vec::new(); - let mut right_vec: Vec = Vec::new(); + pub fn new(poly: &DensePolynomial) -> Self { + let mut left_vec: Vec> = Vec::new(); + let mut right_vec: Vec> = Vec::new(); let num_layers = poly.len().log_2(); let (outp_left, outp_right) = poly.split(poly.len() / 2); @@ -56,7 +57,7 @@ impl ProductCircuit { } } - pub fn evaluate(&self) -> Scalar { + pub fn evaluate(&self) -> F { let len = self.left_vec.len(); assert_eq!(self.left_vec[len - 1].get_num_vars(), 0); assert_eq!(self.right_vec[len - 1].get_num_vars(), 0); @@ -64,14 +65,18 @@ impl ProductCircuit { } } -pub struct DotProductCircuit { - left: DensePolynomial, - right: DensePolynomial, - weight: DensePolynomial, +pub struct DotProductCircuit { + left: DensePolynomial, + right: DensePolynomial, + weight: DensePolynomial, } -impl DotProductCircuit { - pub fn new(left: DensePolynomial, right: DensePolynomial, weight: DensePolynomial) -> Self { +impl DotProductCircuit { + pub fn new( + left: DensePolynomial, + right: DensePolynomial, + weight: DensePolynomial, + ) -> Self { assert_eq!(left.len(), right.len()); assert_eq!(left.len(), weight.len()); DotProductCircuit { @@ -81,13 +86,13 @@ impl DotProductCircuit { } } - pub fn evaluate(&self) -> Scalar { + pub fn evaluate(&self) -> F { (0..self.left.len()) .map(|i| self.left[i] * self.right[i] * self.weight[i]) .sum() } - pub fn split(&mut self) -> (DotProductCircuit, DotProductCircuit) { + pub fn split(&mut self) -> (Self, Self) { let idx = self.left.len() / 2; assert_eq!(idx * 2, self.left.len()); let (l1, l2) = self.left.split(idx); @@ -110,20 +115,20 @@ impl DotProductCircuit { #[allow(dead_code)] #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct LayerProof { - pub proof: SumcheckInstanceProof, - pub claims: Vec, +pub struct LayerProof { + pub proof: SumcheckInstanceProof, + pub claims: Vec, } #[allow(dead_code)] -impl LayerProof { +impl LayerProof { pub fn verify( &self, - claim: Scalar, + claim: F, num_rounds: usize, degree_bound: usize, - transcript: &mut PoseidonTranscript, - ) -> (Scalar, Vec) { + transcript: &mut PoseidonTranscript, + ) -> (F, Vec) { self .proof .verify(claim, num_rounds, degree_bound, transcript) @@ -133,21 +138,21 @@ impl LayerProof { #[allow(dead_code)] #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct LayerProofBatched { - pub proof: SumcheckInstanceProof, - pub claims_prod_left: Vec, - pub claims_prod_right: Vec, +pub struct LayerProofBatched { + pub proof: SumcheckInstanceProof, + pub claims_prod_left: Vec, + pub claims_prod_right: Vec, } #[allow(dead_code)] -impl LayerProofBatched { +impl LayerProofBatched { pub fn verify( &self, - claim: Scalar, + claim: F, num_rounds: usize, degree_bound: usize, - transcript: &mut PoseidonTranscript, - ) -> (Scalar, Vec) { + transcript: &mut PoseidonTranscript, + ) -> (F, Vec) { self .proof .verify(claim, num_rounds, degree_bound, transcript) @@ -156,23 +161,23 @@ impl LayerProofBatched { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct ProductCircuitEvalProof { - proof: Vec, +pub struct ProductCircuitEvalProof { + proof: Vec>, } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct ProductCircuitEvalProofBatched { - proof: Vec, - claims_dotp: (Vec, Vec, Vec), +pub struct ProductCircuitEvalProofBatched { + proof: Vec>, + claims_dotp: (Vec, Vec, Vec), } -impl ProductCircuitEvalProof { +impl ProductCircuitEvalProof { #![allow(dead_code)] pub fn prove( - circuit: &mut ProductCircuit, - transcript: &mut PoseidonTranscript, - ) -> (Self, Scalar, Vec) { - let mut proof: Vec = Vec::new(); + circuit: &mut ProductCircuit, + transcript: &mut PoseidonTranscript, + ) -> (Self, F, Vec) { + let mut proof: Vec> = Vec::new(); let num_layers = circuit.left_vec.len(); let mut claim = circuit.evaluate(); @@ -184,10 +189,9 @@ impl ProductCircuitEvalProof { assert_eq!(poly_C.len(), len / 2); let num_rounds_prod = poly_C.len().log_2(); - let comb_func_prod = |poly_A_comp: &Scalar, - poly_B_comp: &Scalar, - poly_C_comp: &Scalar| - -> Scalar { (*poly_A_comp) * poly_B_comp * poly_C_comp }; + let comb_func_prod = |poly_A_comp: &F, poly_B_comp: &F, poly_C_comp: &F| -> F { + (*poly_A_comp) * poly_B_comp * poly_C_comp + }; let (proof_prod, rand_prod, claims_prod) = SumcheckInstanceProof::prove_cubic( &claim, num_rounds_prod, @@ -218,15 +222,10 @@ impl ProductCircuitEvalProof { (ProductCircuitEvalProof { proof }, claim, rand) } - pub fn verify( - &self, - eval: Scalar, - len: usize, - transcript: &mut PoseidonTranscript, - ) -> (Scalar, Vec) { + pub fn verify(&self, eval: F, len: usize, transcript: &mut PoseidonTranscript) -> (F, Vec) { let num_layers = len.log_2(); let mut claim = eval; - let mut rand: Vec = Vec::new(); + let mut rand: Vec = Vec::new(); //let mut num_rounds = 0; assert_eq!(self.proof.len(), num_layers); for (num_rounds, i) in (0..num_layers).enumerate() { @@ -237,16 +236,14 @@ impl ProductCircuitEvalProof { transcript.append_scalar(&claims_prod[1]); assert_eq!(rand.len(), rand_prod.len()); - let eq: Scalar = (0..rand.len()) - .map(|i| { - rand[i] * rand_prod[i] + (Scalar::one() - rand[i]) * (Scalar::one() - rand_prod[i]) - }) + let eq: F = (0..rand.len()) + .map(|i| rand[i] * rand_prod[i] + (F::one() - rand[i]) * (F::one() - rand_prod[i])) .product(); assert_eq!(claims_prod[0] * claims_prod[1] * eq, claim_last); // produce a random challenge let r_layer = transcript.challenge_scalar(); - claim = (Scalar::one() - r_layer) * claims_prod[0] + r_layer * claims_prod[1]; + claim = (F::one() - r_layer) * claims_prod[0] + r_layer * claims_prod[1]; let mut ext = vec![r_layer]; ext.extend(rand_prod); rand = ext; @@ -256,21 +253,21 @@ impl ProductCircuitEvalProof { } } -impl ProductCircuitEvalProofBatched { +impl ProductCircuitEvalProofBatched { pub fn prove( - prod_circuit_vec: &mut Vec<&mut ProductCircuit>, - dotp_circuit_vec: &mut Vec<&mut DotProductCircuit>, - transcript: &mut PoseidonTranscript, - ) -> (Self, Vec) { + prod_circuit_vec: &mut Vec<&mut ProductCircuit>, + dotp_circuit_vec: &mut Vec<&mut DotProductCircuit>, + transcript: &mut PoseidonTranscript, + ) -> (Self, Vec) { assert!(!prod_circuit_vec.is_empty()); let mut claims_dotp_final = (Vec::new(), Vec::new(), Vec::new()); - let mut proof_layers: Vec = Vec::new(); + let mut proof_layers: Vec> = Vec::new(); let num_layers = prod_circuit_vec[0].left_vec.len(); let mut claims_to_verify = (0..prod_circuit_vec.len()) .map(|i| prod_circuit_vec[i].evaluate()) - .collect::>(); + .collect::>(); let mut rand = Vec::new(); for layer_id in (0..num_layers).rev() { // prepare paralell instance that share poly_C first @@ -281,13 +278,13 @@ impl ProductCircuitEvalProofBatched { assert_eq!(poly_C_par.len(), len / 2); let num_rounds_prod = poly_C_par.len().log_2(); - let comb_func_prod = |poly_A_comp: &Scalar, - poly_B_comp: &Scalar, - poly_C_comp: &Scalar| - -> Scalar { (*poly_A_comp) * poly_B_comp * poly_C_comp }; + let comb_func_prod = |poly_A_comp: &F, + poly_B_comp: &F, + poly_C_comp: &F| + -> F { (*poly_A_comp) * poly_B_comp * poly_C_comp }; - let mut poly_A_batched_par: Vec<&mut DensePolynomial> = Vec::new(); - let mut poly_B_batched_par: Vec<&mut DensePolynomial> = Vec::new(); + let mut poly_A_batched_par: Vec<&mut DensePolynomial> = Vec::new(); + let mut poly_B_batched_par: Vec<&mut DensePolynomial> = Vec::new(); for prod_circuit in prod_circuit_vec.iter_mut() { poly_A_batched_par.push(&mut prod_circuit.left_vec[layer_id]); poly_B_batched_par.push(&mut prod_circuit.right_vec[layer_id]) @@ -299,9 +296,9 @@ impl ProductCircuitEvalProofBatched { ); // prepare sequential instances that don't share poly_C - let mut poly_A_batched_seq: Vec<&mut DensePolynomial> = Vec::new(); - let mut poly_B_batched_seq: Vec<&mut DensePolynomial> = Vec::new(); - let mut poly_C_batched_seq: Vec<&mut DensePolynomial> = Vec::new(); + let mut poly_A_batched_seq: Vec<&mut DensePolynomial> = Vec::new(); + let mut poly_B_batched_seq: Vec<&mut DensePolynomial> = Vec::new(); + let mut poly_C_batched_seq: Vec<&mut DensePolynomial> = Vec::new(); if layer_id == 0 && !dotp_circuit_vec.is_empty() { // add additional claims for item in dotp_circuit_vec.iter() { @@ -360,7 +357,7 @@ impl ProductCircuitEvalProofBatched { claims_to_verify = (0..prod_circuit_vec.len()) .map(|i| claims_prod_left[i] + r_layer * (claims_prod_right[i] - claims_prod_left[i])) - .collect::>(); + .collect::>(); let mut ext = vec![r_layer]; ext.extend(rand_prod); @@ -384,18 +381,18 @@ impl ProductCircuitEvalProofBatched { pub fn verify( &self, - claims_prod_vec: &[Scalar], - claims_dotp_vec: &[Scalar], + claims_prod_vec: &[F], + claims_dotp_vec: &[F], len: usize, - transcript: &mut PoseidonTranscript, - ) -> (Vec, Vec, Vec) { + transcript: &mut PoseidonTranscript, + ) -> (Vec, Vec, Vec) { let num_layers = len.log_2(); - let mut rand: Vec = Vec::new(); + let mut rand: Vec = Vec::new(); //let mut num_rounds = 0; assert_eq!(self.proof.len(), num_layers); let mut claims_to_verify = claims_prod_vec.to_owned(); - let mut claims_to_verify_dotp: Vec = Vec::new(); + let mut claims_to_verify_dotp: Vec = Vec::new(); for (num_rounds, i) in (0..num_layers).enumerate() { if i == num_layers - 1 { claims_to_verify.extend(claims_dotp_vec); @@ -422,12 +419,12 @@ impl ProductCircuitEvalProofBatched { } assert_eq!(rand.len(), rand_prod.len()); - let eq: Scalar = (0..rand.len()) + let eq: F= (0..rand.len()) .map(|i| { - rand[i] * rand_prod[i] + (Scalar::one() - rand[i]) * (Scalar::one() - rand_prod[i]) + rand[i] * rand_prod[i] + (F::one() - rand[i]) * (F::one() - rand_prod[i]) }) .product(); - let mut claim_expected: Scalar = (0..claims_prod_vec.len()) + let mut claim_expected: F = (0..claims_prod_vec.len()) .map(|i| coeff_vec[i] * (claims_prod_left[i] * claims_prod_right[i] * eq)) .sum(); diff --git a/src/sumcheck.rs b/src/sumcheck.rs index 617bfaac..34e31edc 100644 --- a/src/sumcheck.rs +++ b/src/sumcheck.rs @@ -1,37 +1,38 @@ #![allow(clippy::too_many_arguments)] #![allow(clippy::type_complexity)] -use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; - use super::dense_mlpoly::DensePolynomial; use super::errors::ProofVerifyError; +use crate::poseidon_transcript::PoseidonTranscript; +use crate::transcript::TranscriptWriter; use super::scalar::Scalar; use super::unipoly::UniPoly; +use ark_ff::PrimeField; use ark_ff::Zero; use ark_serialize::*; use itertools::izip; #[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct SumcheckInstanceProof { - pub polys: Vec, +pub struct SumcheckInstanceProof { + pub polys: Vec>, } -impl SumcheckInstanceProof { - pub fn new(polys: Vec) -> SumcheckInstanceProof { +impl SumcheckInstanceProof { + pub fn new(polys: Vec>) -> Self { SumcheckInstanceProof { polys } } pub fn verify( &self, - claim: Scalar, + claim: F, num_rounds: usize, degree_bound: usize, - transcript: &mut PoseidonTranscript, - ) -> Result<(Scalar, Vec), ProofVerifyError> { + transcript: &mut PoseidonTranscript, + ) -> Result<(F, Vec), ProofVerifyError> { let mut e = claim; - let mut r: Vec = Vec::new(); + let mut r: Vec = Vec::new(); // verify that there is a univariate polynomial for each round assert_eq!(self.polys.len(), num_rounds); @@ -60,27 +61,27 @@ impl SumcheckInstanceProof { } } -impl SumcheckInstanceProof { - pub fn prove_cubic_with_additive_term( - claim: &Scalar, +impl SumcheckInstanceProof { + pub fn prove_cubic_with_additive_term( + claim: &F, num_rounds: usize, - poly_tau: &mut DensePolynomial, - poly_A: &mut DensePolynomial, - poly_B: &mut DensePolynomial, - poly_C: &mut DensePolynomial, - comb_func: F, - transcript: &mut PoseidonTranscript, - ) -> (Self, Vec, Vec) + poly_tau: &mut DensePolynomial, + poly_A: &mut DensePolynomial, + poly_B: &mut DensePolynomial, + poly_C: &mut DensePolynomial, + comb_func: C, + transcript: &mut PoseidonTranscript, + ) -> (Self, Vec, Vec) where - F: Fn(&Scalar, &Scalar, &Scalar, &Scalar) -> Scalar, + C: Fn(&F, &F, &F, &F) -> F, { let mut e = *claim; - let mut r: Vec = Vec::new(); - let mut cubic_polys: Vec = Vec::new(); + let mut r: Vec = Vec::new(); + let mut cubic_polys: Vec> = Vec::new(); for _j in 0..num_rounds { - let mut eval_point_0 = Scalar::zero(); - let mut eval_point_2 = Scalar::zero(); - let mut eval_point_3 = Scalar::zero(); + let mut eval_point_0 = F::zero(); + let mut eval_point_2 = F::zero(); + let mut eval_point_3 = F::zero(); let len = poly_tau.len() / 2; for i in 0..len { @@ -118,7 +119,7 @@ impl SumcheckInstanceProof { let poly = UniPoly::from_evals(&evals); // append the prover's message to the transcript - poly.append_to_poseidon(transcript); + poly.write_to_transcript(transcript); //derive the verifier's challenge for the next round let r_j = transcript.challenge_scalar(); r.push(r_j); @@ -138,25 +139,25 @@ impl SumcheckInstanceProof { vec![poly_tau[0], poly_A[0], poly_B[0], poly_C[0]], ) } - pub fn prove_cubic( - claim: &Scalar, + pub fn prove_cubic( + claim: &F, num_rounds: usize, - poly_A: &mut DensePolynomial, - poly_B: &mut DensePolynomial, - poly_C: &mut DensePolynomial, - comb_func: F, - transcript: &mut PoseidonTranscript, - ) -> (Self, Vec, Vec) + poly_A: &mut DensePolynomial, + poly_B: &mut DensePolynomial, + poly_C: &mut DensePolynomial, + comb_func: C, + transcript: &mut PoseidonTranscript, + ) -> (Self, Vec, Vec) where - F: Fn(&Scalar, &Scalar, &Scalar) -> Scalar, + C: Fn(&F, &F, &F) -> F, { let mut e = *claim; - let mut r: Vec = Vec::new(); - let mut cubic_polys: Vec = Vec::new(); + let mut r: Vec = Vec::new(); + let mut cubic_polys: Vec> = Vec::new(); for _j in 0..num_rounds { - let mut eval_point_0 = Scalar::zero(); - let mut eval_point_2 = Scalar::zero(); - let mut eval_point_3 = Scalar::zero(); + let mut eval_point_0 = F::zero(); + let mut eval_point_2 = F::zero(); + let mut eval_point_3 = F::zero(); let len = poly_A.len() / 2; for i in 0..len { @@ -189,7 +190,7 @@ impl SumcheckInstanceProof { let poly = UniPoly::from_evals(&evals); // append the prover's message to the transcript - poly.append_to_poseidon(transcript); + poly.write_to_transcript(transcript); //derive the verifier's challenge for the next round let r_j = transcript.challenge_scalar(); @@ -209,46 +210,46 @@ impl SumcheckInstanceProof { ) } - pub fn prove_cubic_batched( - claim: &Scalar, + pub fn prove_cubic_batched( + claim: &F, num_rounds: usize, poly_vec_par: ( - &mut Vec<&mut DensePolynomial>, - &mut Vec<&mut DensePolynomial>, - &mut DensePolynomial, + &mut Vec<&mut DensePolynomial>, + &mut Vec<&mut DensePolynomial>, + &mut DensePolynomial, ), poly_vec_seq: ( - &mut Vec<&mut DensePolynomial>, - &mut Vec<&mut DensePolynomial>, - &mut Vec<&mut DensePolynomial>, + &mut Vec<&mut DensePolynomial>, + &mut Vec<&mut DensePolynomial>, + &mut Vec<&mut DensePolynomial>, ), - coeffs: &[Scalar], - comb_func: F, - transcript: &mut PoseidonTranscript, + coeffs: &[F], + comb_func: C, + transcript: &mut PoseidonTranscript, ) -> ( Self, - Vec, - (Vec, Vec, Scalar), - (Vec, Vec, Vec), + Vec, + (Vec, Vec, F), + (Vec, Vec, Vec), ) where - F: Fn(&Scalar, &Scalar, &Scalar) -> Scalar, + C: Fn(&F, &F, &F) -> F, { let (poly_A_vec_par, poly_B_vec_par, poly_C_par) = poly_vec_par; let (poly_A_vec_seq, poly_B_vec_seq, poly_C_vec_seq) = poly_vec_seq; //let (poly_A_vec_seq, poly_B_vec_seq, poly_C_vec_seq) = poly_vec_seq; let mut e = *claim; - let mut r: Vec = Vec::new(); - let mut cubic_polys: Vec = Vec::new(); + let mut r: Vec = Vec::new(); + let mut cubic_polys: Vec> = Vec::new(); for _j in 0..num_rounds { - let mut evals: Vec<(Scalar, Scalar, Scalar)> = Vec::new(); + let mut evals: Vec<(F, F, F)> = Vec::new(); for (poly_A, poly_B) in poly_A_vec_par.iter().zip(poly_B_vec_par.iter()) { - let mut eval_point_0 = Scalar::zero(); - let mut eval_point_2 = Scalar::zero(); - let mut eval_point_3 = Scalar::zero(); + let mut eval_point_0 = F::zero(); + let mut eval_point_2 = F::zero(); + let mut eval_point_3 = F::zero(); let len = poly_A.len() / 2; for i in 0..len { @@ -285,9 +286,9 @@ impl SumcheckInstanceProof { poly_B_vec_seq.iter(), poly_C_vec_seq.iter() ) { - let mut eval_point_0 = Scalar::zero(); - let mut eval_point_2 = Scalar::zero(); - let mut eval_point_3 = Scalar::zero(); + let mut eval_point_0 = F::zero(); + let mut eval_point_2 = F::zero(); + let mut eval_point_3 = F::zero(); let len = poly_A.len() / 2; for i in 0..len { // eval 0: bound_func is A(low) @@ -327,7 +328,7 @@ impl SumcheckInstanceProof { let poly = UniPoly::from_evals(&evals); // append the prover's message to the transcript - poly.append_to_poseidon(transcript); + poly.write_to_transcript(transcript); //derive the verifier's challenge for the next round let r_j = transcript.challenge_scalar(); @@ -381,24 +382,24 @@ impl SumcheckInstanceProof { ) } - pub fn prove_quad( - claim: &Scalar, + pub fn prove_quad( + claim: &F, num_rounds: usize, - poly_A: &mut DensePolynomial, - poly_B: &mut DensePolynomial, - comb_func: F, - transcript: &mut PoseidonTranscript, - ) -> (Self, Vec, Vec) + poly_A: &mut DensePolynomial, + poly_B: &mut DensePolynomial, + comb_func: C, + transcript: &mut PoseidonTranscript, + ) -> (Self, Vec, Vec) where - F: Fn(&Scalar, &Scalar) -> Scalar, + C: Fn(&F, &F) -> F, { let mut e = *claim; - let mut r: Vec = Vec::new(); - let mut quad_polys: Vec = Vec::new(); + let mut r: Vec = Vec::new(); + let mut quad_polys: Vec> = Vec::new(); for _j in 0..num_rounds { - let mut eval_point_0 = Scalar::zero(); - let mut eval_point_2 = Scalar::zero(); + let mut eval_point_0 = F::zero(); + let mut eval_point_2 = F::zero(); let len = poly_A.len() / 2; for i in 0..len { @@ -415,7 +416,7 @@ impl SumcheckInstanceProof { let poly = UniPoly::from_evals(&evals); // append the prover's message to the transcript - poly.append_to_poseidon(transcript); + poly.write_to_transcript(transcript); //derive the verifier's challenge for the next round let r_j = transcript.challenge_scalar(); diff --git a/src/transcript.rs b/src/transcript.rs new file mode 100644 index 00000000..cf61b8f8 --- /dev/null +++ b/src/transcript.rs @@ -0,0 +1,18 @@ +use ark_ff::PrimeField; +use ark_serialize::CanonicalSerialize; +/// Transcript is the application level transcript to derive the challenges +/// needed for Fiat Shamir during aggregation. It is given to the +/// prover/verifier so that the transcript can be fed with any other data first. +/// TODO: Make this trait the only Transcript trait +pub trait Transcript { + fn domain_sep(&mut self); + fn append(&mut self, label: &'static [u8], point: &S); + fn challenge_scalar(&mut self, label: &'static [u8]) -> F; + fn challenge_scalar_vec(&mut self, label: &'static [u8], n: usize) -> Vec { + (0..n).map(|_| self.challenge_scalar(label)).collect() + } +} + +pub trait TranscriptWriter { + fn write_to_transcript(&self, transcript: &mut impl Transcript); +} From 86ae3bec79fc96d00c9ac7813c582cb3e54799e1 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Tue, 7 Feb 2023 21:50:34 +0000 Subject: [PATCH 18/64] remove circuit in fq as it's not needed now --- src/constraints.rs | 124 +++++++++++++++++++++++---------------------- src/mipp.rs | 33 ++++++------ src/r1csproof.rs | 67 ++++++++++++------------ src/sqrt_pst.rs | 14 ++--- 4 files changed, 118 insertions(+), 120 deletions(-) diff --git a/src/constraints.rs b/src/constraints.rs index aefc60c7..e39bccbf 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -23,7 +23,7 @@ use ark_crypto_primitives::sponge::{ constraints::CryptographicSpongeVar, poseidon::{constraints::PoseidonSpongeVar, PoseidonConfig}, }; -use ark_poly_commit::multilinear_pc::data_structures::{Commitment}; +use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_r1cs_std::{ alloc::{AllocVar, AllocationMode}, fields::fp::FpVar, @@ -252,7 +252,7 @@ pub struct R1CSVerificationCircuit { } impl R1CSVerificationCircuit { - fn new(config: &VerifierConfig) -> Self { + pub fn new(config: &VerifierConfig) -> Self { Self { num_vars: config.num_vars, num_cons: config.num_cons, @@ -322,10 +322,10 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { let (Az_claim, Bz_claim, Cz_claim, prod_Az_Bz_claims) = &self.claims_phase2; - let Az_claim_var = FpVar::::new_input(cs.clone(), || Ok(Az_claim))?; - let Bz_claim_var = FpVar::::new_input(cs.clone(), || Ok(Bz_claim))?; - let Cz_claim_var = FpVar::::new_input(cs.clone(), || Ok(Cz_claim))?; - let prod_Az_Bz_claim_var = FpVar::::new_input(cs.clone(), || Ok(prod_Az_Bz_claims))?; + let Az_claim_var = FpVar::::new_witness(cs.clone(), || Ok(Az_claim))?; + let Bz_claim_var = FpVar::::new_witness(cs.clone(), || Ok(Bz_claim))?; + let Cz_claim_var = FpVar::::new_witness(cs.clone(), || Ok(Cz_claim))?; + let prod_Az_Bz_claim_var = FpVar::::new_witness(cs.clone(), || Ok(prod_Az_Bz_claims))?; let one = FpVar::::one(); let prod_vars: Vec> = (0..rx_var.len()) .map(|i| (&rx_var[i] * &tau_vars[i]) + (&one - &rx_var[i]) * (&one - &tau_vars[i])) @@ -415,59 +415,61 @@ pub struct VerifierConfig { pub ry: Vec, pub transcript_sat_state: Scalar, } -#[derive(Clone)] -pub struct VerifierCircuit { - pub inner_circuit: R1CSVerificationCircuit, - pub inner_proof: GrothProof, - pub inner_vk: PreparedVerifyingKey, - pub eval_vars_at_ry: Fr, - pub claims_phase2: (Fr, Fr, Fr, Fr), - pub ry: Vec, - pub transcript_sat_state: Scalar, -} -impl VerifierCircuit { - pub fn new( - config: &VerifierConfig, - mut rng: &mut R, - ) -> Result { - let inner_circuit = R1CSVerificationCircuit::new(config); - let (pk, vk) = Groth16::::setup(inner_circuit.clone(), &mut rng).unwrap(); - let proof = Groth16::::prove(&pk, inner_circuit.clone(), &mut rng)?; - let pvk = Groth16::::process_vk(&vk).unwrap(); - Ok(Self { - inner_circuit, - inner_proof: proof, - inner_vk: pvk, - eval_vars_at_ry: config.eval_vars_at_ry, - claims_phase2: config.claims_phase2, - ry: config.ry.clone(), - transcript_sat_state: config.transcript_sat_state, - }) - } -} - -impl ConstraintSynthesizer for VerifierCircuit { - fn generate_constraints(self, cs: ConstraintSystemRef) -> ark_relations::r1cs::Result<()> { - let proof_var = ProofVar::::new_witness(cs.clone(), || Ok(self.inner_proof.clone()))?; - let (v_A, v_B, v_C, v_AB) = self.claims_phase2; - let mut pubs = vec![]; - pubs.extend(self.ry); - pubs.extend(vec![v_A, v_B, v_C, v_AB]); - pubs.extend(vec![self.eval_vars_at_ry, self.transcript_sat_state]); - - let bits = pubs - .iter() - .map(|c| { - let bits: Vec = BitIteratorLE::new(c.into_bigint().as_ref().to_vec()).collect(); - Vec::new_witness(cs.clone(), || Ok(bits)) - }) - .collect::, _>>()?; - let input_var = BooleanInputVar::::new(bits); - - let vk_var = PreparedVerifyingKeyVar::new_witness(cs, || Ok(self.inner_vk.clone()))?; - Groth16VerifierGadget::verify_with_processed_vk(&vk_var, &input_var, &proof_var)? - .enforce_equal(&Boolean::constant(true))?; - Ok(()) - } -} +// Skeleton for the polynomial commitment verification circuit +// #[derive(Clone)] +// pub struct VerifierCircuit { +// pub inner_circuit: R1CSVerificationCircuit, +// pub inner_proof: GrothProof, +// pub inner_vk: PreparedVerifyingKey, +// pub eval_vars_at_ry: Fr, +// pub claims_phase2: (Fr, Fr, Fr, Fr), +// pub ry: Vec, +// pub transcript_sat_state: Scalar, +// } + +// impl VerifierCircuit { +// pub fn new( +// config: &VerifierConfig, +// mut rng: &mut R, +// ) -> Result { +// let inner_circuit = R1CSVerificationCircuit::new(config); +// let (pk, vk) = Groth16::::setup(inner_circuit.clone(), &mut rng).unwrap(); +// let proof = Groth16::::prove(&pk, inner_circuit.clone(), &mut rng)?; +// let pvk = Groth16::::process_vk(&vk).unwrap(); +// Ok(Self { +// inner_circuit, +// inner_proof: proof, +// inner_vk: pvk, +// eval_vars_at_ry: config.eval_vars_at_ry, +// claims_phase2: config.claims_phase2, +// ry: config.ry.clone(), +// transcript_sat_state: config.transcript_sat_state, +// }) +// } +// } + +// impl ConstraintSynthesizer for VerifierCircuit { +// fn generate_constraints(self, cs: ConstraintSystemRef) -> ark_relations::r1cs::Result<()> { +// let proof_var = ProofVar::::new_witness(cs.clone(), || Ok(self.inner_proof.clone()))?; +// let (v_A, v_B, v_C, v_AB) = self.claims_phase2; +// let mut pubs = vec![]; +// pubs.extend(self.ry); +// pubs.extend(vec![v_A, v_B, v_C, v_AB]); +// pubs.extend(vec![self.eval_vars_at_ry, self.transcript_sat_state]); + +// let bits = pubs +// .iter() +// .map(|c| { +// let bits: Vec = BitIteratorLE::new(c.into_bigint().as_ref().to_vec()).collect(); +// Vec::new_witness(cs.clone(), || Ok(bits)) +// }) +// .collect::, _>>()?; +// let input_var = BooleanInputVar::::new(bits); + +// let vk_var = PreparedVerifyingKeyVar::new_witness(cs, || Ok(self.inner_vk.clone()))?; +// Groth16VerifierGadget::verify_with_processed_vk(&vk_var, &input_var, &proof_var)? +// .enforce_equal(&Boolean::constant(true))?; +// Ok(()) +// } +// } diff --git a/src/mipp.rs b/src/mipp.rs index 06fe9e29..c37b2e6c 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -1,9 +1,8 @@ - use ark_ec::scalar_mul::variable_base::VariableBaseMSM; use ark_ec::CurveGroup; use ark_ec::{pairing::Pairing, AffineRepr}; use ark_ff::{Field, PrimeField}; -use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; +use ark_poly::DenseMultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{ CommitmentG2, CommitterKey, ProofG1, VerifierKey, }; @@ -75,20 +74,24 @@ impl MippProof { let (ry_l, ry_r) = (&y_l, &y_r); // See section 3.3 for paper version with equivalent names try_par! { - // MIPP part - // Compute cross commitments - // u_l = a[n':] ^ y[:n'] - // TODO to replace by bitsf_multiexp - let comm_u_l = multiexponentiation(ra_l, &ry_r), - // u_r = a[:n'] ^ y[n':] - let comm_u_r = multiexponentiation(ra_r, &ry_l) + // MIPP part + // Compute cross commitments + // u_l = a[n':] ^ y[:n'] + // TODO to replace by bitsf_multiexp + let comm_u_l = multiexponentiation(ra_l, &ry_r), + // u_r = a[:n'] ^ y[n':] + let comm_u_r = multiexponentiation(ra_r, &ry_l) + // Compute the cross pairing products over the distinct halfs of A + + }; + + par! { + // t_l = a[n':] * h[:n'] + let comm_t_l = pairings_product::(&a_l, h_r), + // t_r = a[:n'] * h[n':] + let comm_t_r = pairings_product::(&a_r, h_l) }; - // Compute the cross pairing products over the distinct halfs of A - // t_l = a[n':] * h[:n'] - let comm_t_l = pairings_product::(&a_l, h_r); - // t_r = a[:n'] * h[n':] - let comm_t_r = pairings_product::(&a_r, h_l); // Fiat-Shamir challenge transcript.append(b"comm_u_l", &comm_u_l); @@ -221,7 +224,7 @@ impl MippProof { // respective challenges which is done in parralel and, at the end, // the results are merged together for each vector following their // corresponding merge operation. - enum Op<'a, E: Pairing> { + enum Op<'a, E: Pairing> { TC(&'a E::TargetField, ::BigInt), UC(&'a E::G1Affine, &'a E::ScalarField), } diff --git a/src/r1csproof.rs b/src/r1csproof.rs index ccf87d3e..c9be1112 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -1,7 +1,7 @@ #![allow(clippy::too_many_arguments)] use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; use super::errors::ProofVerifyError; -use crate::constraints::{VerifierCircuit, VerifierConfig}; +use crate::constraints::{R1CSVerificationCircuit, VerifierConfig}; use crate::group::{Fq, Fr}; use crate::math::Math; use crate::mipp::MippProof; @@ -15,7 +15,6 @@ use ark_ec::pairing::Pairing; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; - use super::r1csinstance::R1CSInstance; use super::scalar::Scalar; @@ -302,26 +301,26 @@ impl R1CSProof { transcript_sat_state: self.transcript_sat_state, }; - let prove_inner = Timer::new("proveinnercircuit"); - let start = Instant::now(); - let circuit = VerifierCircuit::new(&config, &mut rand::thread_rng()).unwrap(); - let dp1 = start.elapsed().as_millis(); - prove_inner.stop(); + let circuit = R1CSVerificationCircuit::new(&config); // this is universal, we don't measure it let start = Instant::now(); - let (pk, vk) = Groth16::

::setup(circuit.clone(), &mut rand::thread_rng()).unwrap(); + let (pk, vk) = Groth16::::setup(circuit.clone(), &mut rand::thread_rng()).unwrap(); let ds = start.elapsed().as_millis(); - let prove_outer = Timer::new("proveoutercircuit"); + let prove_outer = Timer::new("provecircuit"); let start = Instant::now(); - let proof = Groth16::

::prove(&pk, circuit, &mut rand::thread_rng()).unwrap(); - let dp2 = start.elapsed().as_millis(); + let proof = Groth16::::prove(&pk, circuit, &mut rand::thread_rng()).unwrap(); + let dp = start.elapsed().as_millis(); prove_outer.stop(); let start = Instant::now(); let verifier_time = Timer::new("groth16_verification"); - let is_verified = Groth16::

::verify(&vk, &[], &proof).unwrap(); + let (v_A, v_B, v_C, v_AB) = self.claims_phase2; + let mut pubs = vec![]; + pubs.extend(self.ry.clone()); + pubs.extend(vec![self.eval_vars_at_ry, self.transcript_sat_state]); + let is_verified = Groth16::::verify(&vk, &pubs, &proof).unwrap(); assert!(is_verified); verifier_time.stop(); @@ -345,7 +344,7 @@ impl R1CSProof { assert!(res == true); let dv = start.elapsed().as_millis(); - Ok((ds, dp1 + dp2, dv)) + Ok((ds, dp, dv)) } // Helper function to find the number of constraint in the circuit which @@ -399,31 +398,31 @@ impl R1CSProof { }; let _rng = ark_std::test_rng(); - let circuit = VerifierCircuit::new(&config, &mut rand::thread_rng()).unwrap(); - - let nc_inner = verify_constraints_inner(circuit.clone(), &num_cons); + let circuit = R1CSVerificationCircuit::new(&config); + let cs = ConstraintSystem::::new_ref(); + circuit.generate_constraints(cs.clone()).unwrap(); + assert!(cs.is_satisfied().unwrap()); - let nc_outer = verify_constraints_outer(circuit, &num_cons); - Ok(nc_inner + nc_outer) + Ok(cs.num_constraints()) } } -fn verify_constraints_outer(circuit: VerifierCircuit, _num_cons: &usize) -> usize { - let cs = ConstraintSystem::::new_ref(); - circuit.generate_constraints(cs.clone()).unwrap(); - assert!(cs.is_satisfied().unwrap()); - cs.num_constraints() -} - -fn verify_constraints_inner(circuit: VerifierCircuit, _num_cons: &usize) -> usize { - let cs = ConstraintSystem::::new_ref(); - circuit - .inner_circuit - .generate_constraints(cs.clone()) - .unwrap(); - assert!(cs.is_satisfied().unwrap()); - cs.num_constraints() -} +// fn verify_constraints_outer(circuit: VerifierCircuit, _num_cons: &usize) -> usize { +// let cs = ConstraintSystem::::new_ref(); +// circuit.generate_constraints(cs.clone()).unwrap(); +// assert!(cs.is_satisfied().unwrap()); +// cs.num_constraints() +// } + +// fn verify_constraints_inner(circuit: VerifierCircuit, _num_cons: &usize) -> usize { +// let cs = ConstraintSystem::::new_ref(); +// circuit +// .inner_circuit +// .generate_constraints(cs.clone()) +// .unwrap(); +// assert!(cs.is_satisfied().unwrap()); +// cs.num_constraints() +// } #[cfg(test)] mod tests { diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index b423a2dc..e99cbd55 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,22 +1,17 @@ use crate::mipp::MippProof; use ark_bls12_377::{Bls12_377 as I, G1Projective as G1}; use ark_ec::{pairing::Pairing, scalar_mul::variable_base::VariableBaseMSM, CurveGroup}; -use ark_ff::{One}; +use ark_ff::One; use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, MultilinearPC, }; -use rayon::prelude::{ - IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, -}; +use rayon::prelude::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; use super::scalar::Scalar; use crate::{ - dense_mlpoly::DensePolynomial, - math::Math, - poseidon_transcript::{PoseidonTranscript}, - timer::Timer, + dense_mlpoly::DensePolynomial, math::Math, poseidon_transcript::PoseidonTranscript, timer::Timer, }; pub struct Polynomial { @@ -266,12 +261,11 @@ impl Polynomial { #[cfg(test)] mod tests { - use crate::parameters::poseidon_params; use super::*; - + use ark_std::UniformRand; #[test] fn check_sqrt_poly_eval() { From 6600432927090e09bed82ece50b3511a5427544f Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Wed, 8 Feb 2023 00:06:03 +0100 Subject: [PATCH 19/64] done for tonight --- src/scalar/mod.rs | 44 ---- src/sparse_mlpoly.rs | 616 ++++++++++++++++++++++--------------------- src/sqrt_pst.rs | 95 ++++--- 3 files changed, 355 insertions(+), 400 deletions(-) delete mode 100644 src/scalar/mod.rs diff --git a/src/scalar/mod.rs b/src/scalar/mod.rs deleted file mode 100644 index b6182eeb..00000000 --- a/src/scalar/mod.rs +++ /dev/null @@ -1,44 +0,0 @@ -pub use ark_bls12_377::Fr as Scalar; -// mod ristretto255; - -// pub type Scalar = ristretto255::Scalar; -// pub type ScalarBytes = curve25519_dalek::scalar::Scalar; - -// pub trait ScalarFromPrimitives { -// fn to_scalar(self) -> Scalar; -// } - -// impl ScalarFromPrimitives for usize { -// #[inline] -// fn to_scalar(self) -> Scalar { -// (0..self).map(|_i| Scalar::one()).sum() -// } -// } - -// impl ScalarFromPrimitives for bool { -// #[inline] -// fn to_scalar(self) -> Scalar { -// if self { -// Scalar::one() -// } else { -// Scalar::zero() -// } -// } -// } - -// pub trait ScalarBytesFromScalar { -// fn decompress_scalar(s: &Scalar) -> ScalarBytes; -// fn decompress_vector(s: &[Scalar]) -> Vec; -// } - -// impl ScalarBytesFromScalar for Scalar { -// fn decompress_scalar(s: &Scalar) -> ScalarBytes { -// ScalarBytes::from_bytes_mod_order(s.to_bytes()) -// } - -// fn decompress_vector(s: &[Scalar]) -> Vec { -// (0..s.len()) -// .map(|i| Scalar::decompress_scalar(&s[i])) -// .collect::>() -// } -// } diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index c2764eb2..ae0f5ea5 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -1,8 +1,6 @@ #![allow(clippy::type_complexity)] #![allow(clippy::too_many_arguments)] #![allow(clippy::needless_range_loop)] -use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; - use super::dense_mlpoly::DensePolynomial; use super::dense_mlpoly::{ EqPolynomial, IdentityPolynomial, PolyCommitment, PolyCommitmentGens, PolyEvalProof, @@ -10,46 +8,53 @@ use super::dense_mlpoly::{ use super::errors::ProofVerifyError; use super::math::Math; use super::product_tree::{DotProductCircuit, ProductCircuit, ProductCircuitEvalProofBatched}; -use super::random::RandomTape; use super::scalar::Scalar; use super::timer::Timer; +use crate::poseidon_transcript::PoseidonTranscript; +use crate::transcript::TranscriptWriter; +use ark_ec::pairing::Pairing; +use ark_ec::CurveGroup; +use ark_ff::PrimeField; use ark_ff::{Field, One, Zero}; use ark_serialize::*; use core::cmp::Ordering; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] -pub struct SparseMatEntry { +pub struct SparseMatEntry { row: usize, col: usize, - val: Scalar, + val: F, } -impl SparseMatEntry { - pub fn new(row: usize, col: usize, val: Scalar) -> Self { +impl SparseMatEntry { + pub fn new(row: usize, col: usize, val: F) -> Self { SparseMatEntry { row, col, val } } } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] -pub struct SparseMatPolynomial { +pub struct SparseMatPolynomial { num_vars_x: usize, num_vars_y: usize, - M: Vec, + M: Vec>, } -pub struct Derefs { - row_ops_val: Vec, - col_ops_val: Vec, - comb: DensePolynomial, +pub struct Derefs { + row_ops_val: Vec>, + col_ops_val: Vec>, + comb: DensePolynomial, } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct DerefsCommitment { - comm_ops_val: PolyCommitment, +pub struct DerefsCommitment { + comm_ops_val: PolyCommitment, } -impl Derefs { - pub fn new(row_ops_val: Vec, col_ops_val: Vec) -> Self { +impl Derefs { + pub fn new( + row_ops_val: Vec>, + col_ops_val: Vec>, + ) -> Self { assert_eq!(row_ops_val.len(), col_ops_val.len()); let derefs = { @@ -66,30 +71,29 @@ impl Derefs { derefs } - pub fn commit(&self, gens: &PolyCommitmentGens) -> DerefsCommitment { + pub fn commit(&self, gens: &PolyCommitmentGens) -> DerefsCommitment { let (comm_ops_val, _blinds) = self.comb.commit(gens, None); DerefsCommitment { comm_ops_val } } } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct DerefsEvalProof { - proof_derefs: PolyEvalProof, +pub struct DerefsEvalProof { + proof_derefs: PolyEvalProof, } -impl DerefsEvalProof { +impl DerefsEvalProof { fn protocol_name() -> &'static [u8] { b"Derefs evaluation proof" } fn prove_single( - joint_poly: &DensePolynomial, - r: &[Scalar], - evals: Vec, - gens: &PolyCommitmentGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - ) -> PolyEvalProof { + joint_poly: &DensePolynomial, + r: &[E::ScalarField], + evals: Vec, + gens: &PolyCommitmentGens, + transcript: &mut PoseidonTranscript, + ) -> PolyEvalProof { assert_eq!(joint_poly.get_num_vars(), r.len() + evals.len().log_2()); // append the claimed evaluations to transcript @@ -121,7 +125,6 @@ impl DerefsEvalProof { None, gens, transcript, - random_tape, ); proof_derefs @@ -129,13 +132,12 @@ impl DerefsEvalProof { // evalues both polynomials at r and produces a joint proof of opening pub fn prove( - derefs: &Derefs, - eval_row_ops_val_vec: &[Scalar], - eval_col_ops_val_vec: &[Scalar], + derefs: &Derefs, + eval_row_ops_val_vec: &[E::ScalarField], + eval_col_ops_val_vec: &[E::ScalarField], r: &[Scalar], - gens: &PolyCommitmentGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, + gens: &PolyCommitmentGens, + transcript: &mut PoseidonTranscript, ) -> Self { // transcript.append_protocol_name(DerefsEvalProof::protocol_name()); @@ -145,19 +147,18 @@ impl DerefsEvalProof { evals.resize(evals.len().next_power_of_two(), Scalar::zero()); evals }; - let proof_derefs = - DerefsEvalProof::prove_single(&derefs.comb, r, evals, gens, transcript, random_tape); + let proof_derefs = DerefsEvalProof::prove_single(&derefs.comb, r, evals, gens, transcript); DerefsEvalProof { proof_derefs } } fn verify_single( - proof: &PolyEvalProof, - comm: &PolyCommitment, - r: &[Scalar], - evals: Vec, - gens: &PolyCommitmentGens, - transcript: &mut PoseidonTranscript, + proof: &PolyEvalProof, + comm: &PolyCommitment, + r: &[E::ScalarField], + evals: Vec, + gens: &PolyCommitmentGens, + transcript: &mut PoseidonTranscript, ) -> Result<(), ProofVerifyError> { // append the claimed evaluations to transcript // evals.append_to_transcript(b"evals_ops_val", transcript); @@ -184,12 +185,12 @@ impl DerefsEvalProof { // verify evaluations of both polynomials at r pub fn verify( &self, - r: &[Scalar], - eval_row_ops_val_vec: &[Scalar], - eval_col_ops_val_vec: &[Scalar], - gens: &PolyCommitmentGens, - comm: &DerefsCommitment, - transcript: &mut PoseidonTranscript, + r: &[E::ScalarField], + eval_row_ops_val_vec: &[E::ScalarField], + eval_col_ops_val_vec: &[E::ScalarField], + gens: &PolyCommitmentGens, + comm: &DerefsCommitment, + transcript: &mut PoseidonTranscript, ) -> Result<(), ProofVerifyError> { // transcript.append_protocol_name(DerefsEvalProof::protocol_name()); let mut evals = eval_row_ops_val_vec.to_owned(); @@ -207,27 +208,27 @@ impl DerefsEvalProof { } } -impl AppendToPoseidon for DerefsCommitment { - fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { - self.comm_ops_val.append_to_poseidon(transcript); +impl TranscriptWriter for DerefsCommitment { + fn write_to_transcript(&self, transcript: &mut impl Transcript) { + self.comm_ops_val.write_to_transcript(transcript); } } -struct AddrTimestamps { +struct AddrTimestamps { ops_addr_usize: Vec>, - ops_addr: Vec, - read_ts: Vec, - audit_ts: DensePolynomial, + ops_addr: Vec>, + read_ts: Vec>, + audit_ts: DensePolynomial, } -impl AddrTimestamps { +impl AddrTimestamps { pub fn new(num_cells: usize, num_ops: usize, ops_addr: Vec>) -> Self { for item in ops_addr.iter() { assert_eq!(item.len(), num_ops); } let mut audit_ts = vec![0usize; num_cells]; - let mut ops_addr_vec: Vec = Vec::new(); - let mut read_ts_vec: Vec = Vec::new(); + let mut ops_addr_vec: Vec> = Vec::new(); + let mut read_ts_vec: Vec> = Vec::new(); for ops_addr_inst in ops_addr.iter() { let mut read_ts = vec![0usize; num_ops]; @@ -255,47 +256,47 @@ impl AddrTimestamps { } } - fn deref_mem(addr: &[usize], mem_val: &[Scalar]) -> DensePolynomial { + fn deref_mem(addr: &[usize], mem_val: &[F]) -> DensePolynomial { DensePolynomial::new( (0..addr.len()) .map(|i| { let a = addr[i]; mem_val[a] }) - .collect::>(), + .collect::>(), ) } - pub fn deref(&self, mem_val: &[Scalar]) -> Vec { + pub fn deref(&self, mem_val: &[F]) -> Vec> { (0..self.ops_addr.len()) .map(|i| AddrTimestamps::deref_mem(&self.ops_addr_usize[i], mem_val)) - .collect::>() + .collect::>>() } } -pub struct MultiSparseMatPolynomialAsDense { +pub struct MultiSparseMatPolynomialAsDense { batch_size: usize, - val: Vec, - row: AddrTimestamps, - col: AddrTimestamps, - comb_ops: DensePolynomial, - comb_mem: DensePolynomial, + val: Vec>, + row: AddrTimestamps, + col: AddrTimestamps, + comb_ops: DensePolynomial, + comb_mem: DensePolynomial, } -pub struct SparseMatPolyCommitmentGens { - gens_ops: PolyCommitmentGens, - gens_mem: PolyCommitmentGens, - gens_derefs: PolyCommitmentGens, +pub struct SparseMatPolyCommitmentGens { + gens_ops: PolyCommitmentGens, + gens_mem: PolyCommitmentGens, + gens_derefs: PolyCommitmentGens, } -impl SparseMatPolyCommitmentGens { +impl SparseMatPolyCommitmentGens { pub fn new( label: &'static [u8], num_vars_x: usize, num_vars_y: usize, num_nz_entries: usize, batch_size: usize, - ) -> SparseMatPolyCommitmentGens { + ) -> Self { let num_vars_ops = num_nz_entries.next_power_of_two().log_2() + (batch_size * 5).next_power_of_two().log_2(); let num_vars_mem = if num_vars_x > num_vars_y { @@ -318,26 +319,26 @@ impl SparseMatPolyCommitmentGens { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct SparseMatPolyCommitment { +pub struct SparseMatPolyCommitment { batch_size: usize, num_ops: usize, num_mem_cells: usize, - comm_comb_ops: PolyCommitment, - comm_comb_mem: PolyCommitment, + comm_comb_ops: PolyCommitment, + comm_comb_mem: PolyCommitment, } -impl AppendToPoseidon for SparseMatPolyCommitment { - fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { - transcript.append_u64(self.batch_size as u64); - transcript.append_u64(self.num_ops as u64); - transcript.append_u64(self.num_mem_cells as u64); - self.comm_comb_ops.append_to_poseidon(transcript); - self.comm_comb_mem.append_to_poseidon(transcript); +impl TranscriptWriter for SparseMatPolyCommitment { + fn write_to_transcript(&self, transcript: &mut impl Transcript) { + transcript.append(self.batch_size as u64); + transcript.append(self.num_ops as u64); + transcript.append(self.num_mem_cells as u64); + self.comm_comb_ops.write_to_transcript(transcript); + self.comm_comb_mem.write_to_transcript(transcript); } } -impl SparseMatPolynomial { - pub fn new(num_vars_x: usize, num_vars_y: usize, M: Vec) -> Self { +impl SparseMatPolynomial { + pub fn new(num_vars_x: usize, num_vars_y: usize, M: Vec>) -> Self { SparseMatPolynomial { num_vars_x, num_vars_y, @@ -349,11 +350,11 @@ impl SparseMatPolynomial { self.M.len().next_power_of_two() } - fn sparse_to_dense_vecs(&self, N: usize) -> (Vec, Vec, Vec) { + fn sparse_to_dense_vecs(&self, N: usize) -> (Vec, Vec, Vec) { assert!(N >= self.get_num_nz_entries()); let mut ops_row: Vec = vec![0; N]; let mut ops_col: Vec = vec![0; N]; - let mut val: Vec = vec![Scalar::zero(); N]; + let mut val: Vec = vec![Scalar::zero(); N]; for i in 0..self.M.len() { ops_row[i] = self.M[i].row; @@ -364,8 +365,8 @@ impl SparseMatPolynomial { } fn multi_sparse_to_dense_rep( - sparse_polys: &[&SparseMatPolynomial], - ) -> MultiSparseMatPolynomialAsDense { + sparse_polys: &[&SparseMatPolynomial], + ) -> MultiSparseMatPolynomialAsDense { assert!(!sparse_polys.is_empty()); for i in 1..sparse_polys.len() { assert_eq!(sparse_polys[i].num_vars_x, sparse_polys[0].num_vars_x); @@ -380,7 +381,7 @@ impl SparseMatPolynomial { let mut ops_row_vec: Vec> = Vec::new(); let mut ops_col_vec: Vec> = Vec::new(); - let mut val_vec: Vec = Vec::new(); + let mut val_vec: Vec> = Vec::new(); for poly in sparse_polys { let (ops_row, ops_col, val) = poly.sparse_to_dense_vecs(N); ops_row_vec.push(ops_row); @@ -422,7 +423,7 @@ impl SparseMatPolynomial { } } - fn evaluate_with_tables(&self, eval_table_rx: &[Scalar], eval_table_ry: &[Scalar]) -> Scalar { + fn evaluate_with_tables(&self, eval_table_rx: &[F], eval_table_ry: &[F]) -> F { assert_eq!(self.num_vars_x.pow2(), eval_table_rx.len()); assert_eq!(self.num_vars_y.pow2(), eval_table_ry.len()); @@ -436,20 +437,16 @@ impl SparseMatPolynomial { .sum() } - pub fn multi_evaluate( - polys: &[&SparseMatPolynomial], - rx: &[Scalar], - ry: &[Scalar], - ) -> Vec { + pub fn multi_evaluate(polys: &[&SparseMatPolynomial], rx: &[F], ry: &[F]) -> Vec { let eval_table_rx = EqPolynomial::new(rx.to_vec()).evals(); let eval_table_ry = EqPolynomial::new(ry.to_vec()).evals(); (0..polys.len()) .map(|i| polys[i].evaluate_with_tables(&eval_table_rx, &eval_table_ry)) - .collect::>() + .collect::>() } - pub fn multiply_vec(&self, num_rows: usize, num_cols: usize, z: &[Scalar]) -> Vec { + pub fn multiply_vec(&self, num_rows: usize, num_cols: usize, z: &[F]) -> Vec { assert_eq!(z.len(), num_cols); (0..self.M.len()) @@ -459,21 +456,16 @@ impl SparseMatPolynomial { let val = &self.M[i].val; (row, z[col] * val) }) - .fold(vec![Scalar::zero(); num_rows], |mut Mz, (r, v)| { + .fold(vec![F::zero(); num_rows], |mut Mz, (r, v)| { Mz[r] += v; Mz }) } - pub fn compute_eval_table_sparse( - &self, - rx: &[Scalar], - num_rows: usize, - num_cols: usize, - ) -> Vec { + pub fn compute_eval_table_sparse(&self, rx: &[F], num_rows: usize, num_cols: usize) -> Vec { assert_eq!(rx.len(), num_rows); - let mut M_evals: Vec = vec![Scalar::zero(); num_cols]; + let mut M_evals: Vec = vec![F::zero(); num_cols]; for i in 0..self.M.len() { let entry = &self.M[i]; @@ -482,10 +474,16 @@ impl SparseMatPolynomial { M_evals } - pub fn multi_commit( - sparse_polys: &[&SparseMatPolynomial], - gens: &SparseMatPolyCommitmentGens, - ) -> (SparseMatPolyCommitment, MultiSparseMatPolynomialAsDense) { + pub fn multi_commit( + sparse_polys: &[&SparseMatPolynomial], + gens: &SparseMatPolyCommitmentGens, + ) -> ( + SparseMatPolyCommitment, + MultiSparseMatPolynomialAsDense, + ) + where + E: Pairing, + { let batch_size = sparse_polys.len(); let dense = SparseMatPolynomial::multi_sparse_to_dense_rep(sparse_polys); @@ -505,8 +503,8 @@ impl SparseMatPolynomial { } } -impl MultiSparseMatPolynomialAsDense { - pub fn deref(&self, row_mem_val: &[Scalar], col_mem_val: &[Scalar]) -> Derefs { +impl MultiSparseMatPolynomialAsDense { + pub fn deref(&self, row_mem_val: &[F], col_mem_val: &[F]) -> Derefs { let row_ops_val = self.row.deref(row_mem_val); let col_ops_val = self.col.deref(col_mem_val); @@ -515,39 +513,37 @@ impl MultiSparseMatPolynomialAsDense { } #[derive(Debug)] -struct ProductLayer { - init: ProductCircuit, - read_vec: Vec, - write_vec: Vec, - audit: ProductCircuit, +struct ProductLayer { + init: ProductCircuit, + read_vec: Vec>, + write_vec: Vec>, + audit: ProductCircuit, } #[derive(Debug)] -struct Layers { - prod_layer: ProductLayer, +struct Layers { + prod_layer: ProductLayer, } -impl Layers { +impl Layers { fn build_hash_layer( - eval_table: &[Scalar], - addrs_vec: &[DensePolynomial], - derefs_vec: &[DensePolynomial], - read_ts_vec: &[DensePolynomial], - audit_ts: &DensePolynomial, - r_mem_check: &(Scalar, Scalar), + eval_table: &[F], + addrs_vec: &[DensePolynomial], + derefs_vec: &[DensePolynomial], + read_ts_vec: &[DensePolynomial], + audit_ts: &DensePolynomial, + r_mem_check: &(F, F), ) -> ( - DensePolynomial, - Vec, - Vec, - DensePolynomial, + DensePolynomial, + Vec>, + Vec>, + DensePolynomial, ) { let (r_hash, r_multiset_check) = r_mem_check; //hash(addr, val, ts) = ts * r_hash_sqr + val * r_hash + addr let r_hash_sqr = r_hash.square(); - let hash_func = |addr: &Scalar, val: &Scalar, ts: &Scalar| -> Scalar { - r_hash_sqr * ts + (*val) * r_hash + addr - }; + let hash_func = |addr: &F, val: &F, ts: &F| -> F { r_hash_sqr * ts + (*val) * r_hash + addr }; // hash init and audit that does not depend on #instances let num_mem_cells = eval_table.len(); @@ -555,22 +551,22 @@ impl Layers { (0..num_mem_cells) .map(|i| { // at init time, addr is given by i, init value is given by eval_table, and ts = 0 - hash_func(&Scalar::from(i as u64), &eval_table[i], &Scalar::zero()) - r_multiset_check + hash_func(&F::from(i as u64), &eval_table[i], &F::zero()) - r_multiset_check }) - .collect::>(), + .collect::>(), ); let poly_audit_hashed = DensePolynomial::new( (0..num_mem_cells) .map(|i| { // at audit time, addr is given by i, value is given by eval_table, and ts is given by audit_ts - hash_func(&Scalar::from(i as u64), &eval_table[i], &audit_ts[i]) - r_multiset_check + hash_func(&F::from(i as u64), &eval_table[i], &audit_ts[i]) - r_multiset_check }) - .collect::>(), + .collect::>(), ); // hash read and write that depends on #instances - let mut poly_read_hashed_vec: Vec = Vec::new(); - let mut poly_write_hashed_vec: Vec = Vec::new(); + let mut poly_read_hashed_vec: Vec> = Vec::new(); + let mut poly_write_hashed_vec: Vec> = Vec::new(); for i in 0..addrs_vec.len() { let (addrs, derefs, read_ts) = (&addrs_vec[i], &derefs_vec[i], &read_ts_vec[i]); assert_eq!(addrs.len(), derefs.len()); @@ -592,7 +588,7 @@ impl Layers { // at write time, addr is given by addrs, value is given by derefs, and ts is given by write_ts = read_ts + 1 hash_func(&addrs[i], &derefs[i], &(read_ts[i] + Scalar::one())) - r_multiset_check }) - .collect::>(), + .collect::>(), ); poly_write_hashed_vec.push(poly_write_hashed); } @@ -606,10 +602,10 @@ impl Layers { } pub fn new( - eval_table: &[Scalar], - addr_timestamps: &AddrTimestamps, - poly_ops_val: &[DensePolynomial], - r_mem_check: &(Scalar, Scalar), + eval_table: &[F], + addr_timestamps: &AddrTimestamps, + poly_ops_val: &[DensePolynomial], + r_mem_check: &(F, F), ) -> Self { let (poly_init_hashed, poly_read_hashed_vec, poly_write_hashed_vec, poly_audit_hashed) = Layers::build_hash_layer( @@ -627,19 +623,19 @@ impl Layers { .collect::>(); let prod_write_vec = (0..poly_write_hashed_vec.len()) .map(|i| ProductCircuit::new(&poly_write_hashed_vec[i])) - .collect::>(); + .collect::>>(); let prod_audit = ProductCircuit::new(&poly_audit_hashed); // subset audit check - let hashed_writes: Scalar = (0..prod_write_vec.len()) + let hashed_writes: F = (0..prod_write_vec.len()) .map(|i| prod_write_vec[i].evaluate()) .product(); - let hashed_write_set: Scalar = prod_init.evaluate() * hashed_writes; + let hashed_write_set: F = prod_init.evaluate() * hashed_writes; - let hashed_reads: Scalar = (0..prod_read_vec.len()) + let hashed_reads: F = (0..prod_read_vec.len()) .map(|i| prod_read_vec[i].evaluate()) .product(); - let hashed_read_set: Scalar = hashed_reads * prod_audit.evaluate(); + let hashed_read_set: F = hashed_reads * prod_audit.evaluate(); //assert_eq!(hashed_read_set, hashed_write_set); debug_assert_eq!(hashed_read_set, hashed_write_set); @@ -656,18 +652,18 @@ impl Layers { } #[derive(Debug)] -struct PolyEvalNetwork { - row_layers: Layers, - col_layers: Layers, +struct PolyEvalNetwork { + row_layers: Layers, + col_layers: Layers, } -impl PolyEvalNetwork { +impl PolyEvalNetwork { pub fn new( - dense: &MultiSparseMatPolynomialAsDense, - derefs: &Derefs, - mem_rx: &[Scalar], - mem_ry: &[Scalar], - r_mem_check: &(Scalar, Scalar), + dense: &MultiSparseMatPolynomialAsDense, + derefs: &Derefs, + mem_rx: &[F], + mem_ry: &[F], + r_mem_check: &(F, F), ) -> Self { let row_layers = Layers::new(mem_rx, &dense.row, &derefs.row_ops_val, r_mem_check); let col_layers = Layers::new(mem_ry, &dense.col, &derefs.col_ops_val, r_mem_check); @@ -680,25 +676,25 @@ impl PolyEvalNetwork { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -struct HashLayerProof { - eval_row: (Vec, Vec, Scalar), - eval_col: (Vec, Vec, Scalar), - eval_val: Vec, - eval_derefs: (Vec, Vec), - proof_ops: PolyEvalProof, - proof_mem: PolyEvalProof, - proof_derefs: DerefsEvalProof, +struct HashLayerProof { + eval_row: (Vec, Vec, E::ScalarField), + eval_col: (Vec, Vec, E::ScalarField), + eval_val: Vec, + eval_derefs: (Vec, Vec), + proof_ops: PolyEvalProof, + proof_mem: PolyEvalProof, + proof_derefs: DerefsEvalProof, } -impl HashLayerProof { +impl HashLayerProof { fn protocol_name() -> &'static [u8] { b"Sparse polynomial hash layer proof" } fn prove_helper( - rand: (&Vec, &Vec), - addr_timestamps: &AddrTimestamps, - ) -> (Vec, Vec, Scalar) { + rand: (&Vec, &Vec), + addr_timestamps: &AddrTimestamps, + ) -> (Vec, Vec, Scalar) { let (rand_mem, rand_ops) = rand; // decommit ops-addr at rand_ops @@ -709,7 +705,7 @@ impl HashLayerProof { } // decommit read_ts at rand_ops - let mut eval_read_ts_vec: Vec = Vec::new(); + let mut eval_read_ts_vec: Vec = Vec::new(); for i in 0..addr_timestamps.read_ts.len() { let eval_read_ts = addr_timestamps.read_ts[i].evaluate(rand_ops); eval_read_ts_vec.push(eval_read_ts); @@ -722,12 +718,11 @@ impl HashLayerProof { } fn prove( - rand: (&Vec, &Vec), - dense: &MultiSparseMatPolynomialAsDense, - derefs: &Derefs, - gens: &SparseMatPolyCommitmentGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, + rand: (&Vec, &Vec), + dense: &MultiSparseMatPolynomialAsDense, + derefs: &Derefs, + gens: &SparseMatPolyCommitmentGens, + transcript: &mut PoseidonTranscript, ) -> Self { // transcript.append_protocol_name(HashLayerProof::protocol_name()); @@ -736,10 +731,10 @@ impl HashLayerProof { // decommit derefs at rand_ops let eval_row_ops_val = (0..derefs.row_ops_val.len()) .map(|i| derefs.row_ops_val[i].evaluate(rand_ops)) - .collect::>(); + .collect::>(); let eval_col_ops_val = (0..derefs.col_ops_val.len()) .map(|i| derefs.col_ops_val[i].evaluate(rand_ops)) - .collect::>(); + .collect::>(); let proof_derefs = DerefsEvalProof::prove( derefs, &eval_row_ops_val, @@ -747,7 +742,6 @@ impl HashLayerProof { rand_ops, &gens.gens_derefs, transcript, - random_tape, ); let eval_derefs = (eval_row_ops_val, eval_col_ops_val); @@ -759,16 +753,16 @@ impl HashLayerProof { HashLayerProof::prove_helper((rand_mem, rand_ops), &dense.col); let eval_val_vec = (0..dense.val.len()) .map(|i| dense.val[i].evaluate(rand_ops)) - .collect::>(); + .collect::>(); // form a single decommitment using comm_comb_ops - let mut evals_ops: Vec = Vec::new(); + let mut evals_ops: Vec = Vec::new(); evals_ops.extend(&eval_row_addr_vec); evals_ops.extend(&eval_row_read_ts_vec); evals_ops.extend(&eval_col_addr_vec); evals_ops.extend(&eval_col_read_ts_vec); evals_ops.extend(&eval_val_vec); - evals_ops.resize(evals_ops.len().next_power_of_two(), Scalar::zero()); + evals_ops.resize(evals_ops.len().next_power_of_two(), E::ScalarField::zero()); transcript.append_scalar_vector(&evals_ops); let challenges_ops = transcript.challenge_vector(evals_ops.len().log_2()); @@ -790,7 +784,6 @@ impl HashLayerProof { None, &gens.gens_ops, transcript, - random_tape, ); // form a single decommitment using comb_comb_mem at rand_mem @@ -817,7 +810,6 @@ impl HashLayerProof { None, &gens.gens_mem, transcript, - random_tape, ); HashLayerProof { @@ -832,20 +824,26 @@ impl HashLayerProof { } fn verify_helper( - rand: &(&Vec, &Vec), - claims: &(Scalar, Vec, Vec, Scalar), - eval_ops_val: &[Scalar], - eval_ops_addr: &[Scalar], - eval_read_ts: &[Scalar], - eval_audit_ts: &Scalar, - r: &[Scalar], - r_hash: &Scalar, - r_multiset_check: &Scalar, + rand: &(&Vec, &Vec), + claims: &( + E::ScalarField, + Vec, + Vec, + E::ScalarField, + ), + eval_ops_val: &[E::ScalarField], + eval_ops_addr: &[E::ScalarField], + eval_read_ts: &[E::ScalarField], + eval_audit_ts: &E::ScalarField, + r: &[E::ScalarField], + r_hash: &E::ScalarField, + r_multiset_check: &E::ScalarField, ) -> Result<(), ProofVerifyError> { let r_hash_sqr = r_hash.square(); - let hash_func = |addr: &Scalar, val: &Scalar, ts: &Scalar| -> Scalar { - r_hash_sqr * ts + (*val) * r_hash + addr - }; + let hash_func = |addr: &E::ScalarField, + val: &E::ScalarField, + ts: &E::ScalarField| + -> E::ScalarField { r_hash_sqr * ts + (*val) * r_hash + addr }; let (rand_mem, _rand_ops) = rand; let (claim_init, claim_read, claim_write, claim_audit) = claims; @@ -854,7 +852,7 @@ impl HashLayerProof { let eval_init_addr = IdentityPolynomial::new(rand_mem.len()).evaluate(rand_mem); let eval_init_val = EqPolynomial::new(r.to_vec()).evaluate(rand_mem); let hash_init_at_rand_mem = - hash_func(&eval_init_addr, &eval_init_val, &Scalar::zero()) - r_multiset_check; // verify the claim_last of init chunk + hash_func(&eval_init_addr, &eval_init_val, &E::ScalarField::zero()) - r_multiset_check; // verify the claim_last of init chunk assert_eq!(&hash_init_at_rand_mem, claim_init); // read @@ -866,7 +864,7 @@ impl HashLayerProof { // write: shares addr, val component; only decommit write_ts for i in 0..eval_ops_addr.len() { - let eval_write_ts = eval_read_ts[i] + Scalar::one(); + let eval_write_ts = eval_read_ts[i] + E::ScalarField::one(); let hash_write_at_rand_ops = hash_func(&eval_ops_addr[i], &eval_ops_val[i], &eval_write_ts) - r_multiset_check; // verify the claim_last of init chunk assert_eq!(&hash_write_at_rand_ops, &claim_write[i]); @@ -884,18 +882,28 @@ impl HashLayerProof { fn verify( &self, - rand: (&Vec, &Vec), - claims_row: &(Scalar, Vec, Vec, Scalar), - claims_col: &(Scalar, Vec, Vec, Scalar), - claims_dotp: &[Scalar], - comm: &SparseMatPolyCommitment, - gens: &SparseMatPolyCommitmentGens, - comm_derefs: &DerefsCommitment, - rx: &[Scalar], - ry: &[Scalar], - r_hash: &Scalar, - r_multiset_check: &Scalar, - transcript: &mut PoseidonTranscript, + rand: (&Vec, &Vec), + claims_row: &( + E::ScalarField, + Vec, + Vec, + E::ScalarField, + ), + claims_col: &( + E::ScalarField, + Vec, + Vec, + E::ScalarField, + ), + claims_dotp: &[E::ScalarField], + comm: &SparseMatPolyCommitment, + gens: &SparseMatPolyCommitmentGens, + comm_derefs: &DerefsCommitment, + rx: &[E::ScalarField], + ry: &[E::ScalarField], + r_hash: &E::ScalarField, + r_multiset_check: &E::ScalarField, + transcript: &mut PoseidonTranscript, ) -> Result<(), ProofVerifyError> { let timer = Timer::new("verify_hash_proof"); // transcript.append_protocol_name(HashLayerProof::protocol_name()); @@ -937,7 +945,7 @@ impl HashLayerProof { evals_ops.extend(eval_col_addr_vec); evals_ops.extend(eval_col_read_ts_vec); evals_ops.extend(eval_val_vec); - evals_ops.resize(evals_ops.len().next_power_of_two(), Scalar::zero()); + evals_ops.resize(evals_ops.len().next_power_of_two(), E::ScalarField::zero()); transcript.append_scalar_vector(&evals_ops); // evals_ops.append_to_transcript(b"claim_evals_ops", transcript); let challenges_ops = transcript.challenge_vector(evals_ops.len().log_2()); @@ -964,7 +972,7 @@ impl HashLayerProof { // verify proof-mem using comm_comb_mem at rand_mem // form a single decommitment using comb_comb_mem at rand_mem - let evals_mem: Vec = vec![*eval_row_audit_ts, *eval_col_audit_ts]; + let evals_mem: Vec = vec![*eval_row_audit_ts, *eval_col_audit_ts]; // evals_mem.append_to_transcript(b"claim_evals_mem", transcript); transcript.append_scalar_vector(&evals_mem); let challenges_mem = transcript.challenge_vector(evals_mem.len().log_2()); @@ -1020,43 +1028,43 @@ impl HashLayerProof { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -struct ProductLayerProof { - eval_row: (Scalar, Vec, Vec, Scalar), - eval_col: (Scalar, Vec, Vec, Scalar), - eval_val: (Vec, Vec), - proof_mem: ProductCircuitEvalProofBatched, - proof_ops: ProductCircuitEvalProofBatched, +struct ProductLayerProof { + eval_row: (F, Vec, Vec, F), + eval_col: (F, Vec, Vec, F), + eval_val: (Vec, Vec), + proof_mem: ProductCircuitEvalProofBatched, + proof_ops: ProductCircuitEvalProofBatched, } -impl ProductLayerProof { +impl ProductLayerProof { fn protocol_name() -> &'static [u8] { b"Sparse polynomial product layer proof" } pub fn prove( - row_prod_layer: &mut ProductLayer, - col_prod_layer: &mut ProductLayer, - dense: &MultiSparseMatPolynomialAsDense, - derefs: &Derefs, - eval: &[Scalar], - transcript: &mut PoseidonTranscript, - ) -> (Self, Vec, Vec) { + row_prod_layer: &mut ProductLayer, + col_prod_layer: &mut ProductLayer, + dense: &MultiSparseMatPolynomialAsDense, + derefs: &Derefs, + eval: &[F], + transcript: &mut PoseidonTranscript, + ) -> (Self, Vec, Vec) { // transcript.append_protocol_name(ProductLayerProof::protocol_name()); let row_eval_init = row_prod_layer.init.evaluate(); let row_eval_audit = row_prod_layer.audit.evaluate(); let row_eval_read = (0..row_prod_layer.read_vec.len()) .map(|i| row_prod_layer.read_vec[i].evaluate()) - .collect::>(); + .collect::>(); let row_eval_write = (0..row_prod_layer.write_vec.len()) .map(|i| row_prod_layer.write_vec[i].evaluate()) - .collect::>(); + .collect::>(); // subset check - let ws: Scalar = (0..row_eval_write.len()) + let ws: F = (0..row_eval_write.len()) .map(|i| row_eval_write[i]) .product(); - let rs: Scalar = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product(); + let rs: F = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product(); assert_eq!(row_eval_init * ws, rs * row_eval_audit); transcript.append_scalar(&row_eval_init); @@ -1066,18 +1074,18 @@ impl ProductLayerProof { let col_eval_init = col_prod_layer.init.evaluate(); let col_eval_audit = col_prod_layer.audit.evaluate(); - let col_eval_read: Vec = (0..col_prod_layer.read_vec.len()) + let col_eval_read: Vec = (0..col_prod_layer.read_vec.len()) .map(|i| col_prod_layer.read_vec[i].evaluate()) .collect(); - let col_eval_write: Vec = (0..col_prod_layer.write_vec.len()) + let col_eval_write: Vec = (0..col_prod_layer.write_vec.len()) .map(|i| col_prod_layer.write_vec[i].evaluate()) .collect(); // subset check - let ws: Scalar = (0..col_eval_write.len()) + let ws: F = (0..col_eval_write.len()) .map(|i| col_eval_write[i]) .product(); - let rs: Scalar = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product(); + let rs: F = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product(); assert_eq!(col_eval_init * ws, rs * col_eval_audit); transcript.append_scalar(&col_eval_init); @@ -1089,10 +1097,10 @@ impl ProductLayerProof { assert_eq!(eval.len(), derefs.row_ops_val.len()); assert_eq!(eval.len(), derefs.col_ops_val.len()); assert_eq!(eval.len(), dense.val.len()); - let mut dotp_circuit_left_vec: Vec = Vec::new(); - let mut dotp_circuit_right_vec: Vec = Vec::new(); - let mut eval_dotp_left_vec: Vec = Vec::new(); - let mut eval_dotp_right_vec: Vec = Vec::new(); + let mut dotp_circuit_left_vec: Vec> = Vec::new(); + let mut dotp_circuit_right_vec: Vec> = Vec::new(); + let mut eval_dotp_left_vec: Vec = Vec::new(); + let mut eval_dotp_right_vec: Vec = Vec::new(); for i in 0..derefs.row_ops_val.len() { // evaluate sparse polynomial evaluation using two dotp checks let left = derefs.row_ops_val[i].clone(); @@ -1222,15 +1230,15 @@ impl ProductLayerProof { &self, num_ops: usize, num_cells: usize, - eval: &[Scalar], - transcript: &mut PoseidonTranscript, + eval: &[F], + transcript: &mut PoseidonTranscript, ) -> Result< ( - Vec, - Vec, - Vec, - Vec, - Vec, + Vec, + Vec, + Vec, + Vec, + Vec, ), ProofVerifyError, > { @@ -1243,7 +1251,7 @@ impl ProductLayerProof { let (row_eval_init, row_eval_read, row_eval_write, row_eval_audit) = &self.eval_row; assert_eq!(row_eval_write.len(), num_instances); assert_eq!(row_eval_read.len(), num_instances); - let ws: Scalar = (0..row_eval_write.len()) + let ws: F = (0..row_eval_write.len()) .map(|i| row_eval_write[i]) .product(); let rs: Scalar = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product(); @@ -1263,10 +1271,10 @@ impl ProductLayerProof { let (col_eval_init, col_eval_read, col_eval_write, col_eval_audit) = &self.eval_col; assert_eq!(col_eval_write.len(), num_instances); assert_eq!(col_eval_read.len(), num_instances); - let ws: Scalar = (0..col_eval_write.len()) + let ws: F = (0..col_eval_write.len()) .map(|i| col_eval_write[i]) .product(); - let rs: Scalar = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product(); + let rs: F = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product(); assert_eq!(ws * col_eval_init, rs * col_eval_audit); // col_eval_init.append_to_transcript(b"claim_col_eval_init", transcript); @@ -1283,7 +1291,7 @@ impl ProductLayerProof { let (eval_dotp_left, eval_dotp_right) = &self.eval_val; assert_eq!(eval_dotp_left.len(), eval_dotp_left.len()); assert_eq!(eval_dotp_left.len(), num_instances); - let mut claims_dotp_circuit: Vec = Vec::new(); + let mut claims_dotp_circuit: Vec = Vec::new(); for i in 0..num_instances { assert_eq!(eval_dotp_left[i] + eval_dotp_right[i], eval[i]); // eval_dotp_left[i].append_to_transcript(b"claim_eval_dotp_left", transcript); @@ -1296,7 +1304,7 @@ impl ProductLayerProof { } // verify the correctness of claim_row_eval_read, claim_row_eval_write, claim_col_eval_read, and claim_col_eval_write - let mut claims_prod_circuit: Vec = Vec::new(); + let mut claims_prod_circuit: Vec = Vec::new(); claims_prod_circuit.extend(row_eval_read); claims_prod_circuit.extend(row_eval_write); claims_prod_circuit.extend(col_eval_read); @@ -1327,24 +1335,23 @@ impl ProductLayerProof { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -struct PolyEvalNetworkProof { - proof_prod_layer: ProductLayerProof, - proof_hash_layer: HashLayerProof, +struct PolyEvalNetworkProof{ + proof_prod_layer: ProductLayerProof, + proof_hash_layer: HashLayerProof, } -impl PolyEvalNetworkProof { +impl PolyEvalNetworkProof { fn protocol_name() -> &'static [u8] { b"Sparse polynomial evaluation proof" } pub fn prove( - network: &mut PolyEvalNetwork, - dense: &MultiSparseMatPolynomialAsDense, - derefs: &Derefs, - evals: &[Scalar], - gens: &SparseMatPolyCommitmentGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, + network: &mut PolyEvalNetwork, + dense: &MultiSparseMatPolynomialAsDense, + derefs: &Derefs, + evals: &[E::ScalarField], + gens: &SparseMatPolyCommitmentGens, + transcript: &mut PoseidonTranscript, ) -> Self { // transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name()); @@ -1364,7 +1371,6 @@ impl PolyEvalNetworkProof { derefs, gens, transcript, - random_tape, ); PolyEvalNetworkProof { @@ -1375,15 +1381,15 @@ impl PolyEvalNetworkProof { pub fn verify( &self, - comm: &SparseMatPolyCommitment, - comm_derefs: &DerefsCommitment, - evals: &[Scalar], - gens: &SparseMatPolyCommitmentGens, - rx: &[Scalar], - ry: &[Scalar], - r_mem_check: &(Scalar, Scalar), + comm: &SparseMatPolyCommitment, + comm_derefs: &DerefsCommitment, + evals: &[E::ScalarField], + gens: &SparseMatPolyCommitmentGens, + rx: &[E::ScalarField], + ry: &[E::ScalarField], + r_mem_check: &(E::ScalarField, E::ScalarField), nz: usize, - transcript: &mut PoseidonTranscript, + transcript: &mut PoseidonTranscript, ) -> Result<(), ProofVerifyError> { let timer = Timer::new("verify_polyeval_proof"); // transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name()); @@ -1438,27 +1444,27 @@ impl PolyEvalNetworkProof { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct SparseMatPolyEvalProof { - comm_derefs: DerefsCommitment, - poly_eval_network_proof: PolyEvalNetworkProof, +pub struct SparseMatPolyEvalProof { + comm_derefs: DerefsCommitment, + poly_eval_network_proof: PolyEvalNetworkProof, } -impl SparseMatPolyEvalProof { +impl SparseMatPolyEvalProof { fn protocol_name() -> &'static [u8] { b"Sparse polynomial evaluation proof" } - fn equalize(rx: &[Scalar], ry: &[Scalar]) -> (Vec, Vec) { + fn equalize(rx: &[E::ScalarField], ry: &[E::ScalarField]) -> (Vec, Vec) { match rx.len().cmp(&ry.len()) { Ordering::Less => { let diff = ry.len() - rx.len(); - let mut rx_ext = vec![Scalar::zero(); diff]; + let mut rx_ext = vec![E::ScalarField::zero(); diff]; rx_ext.extend(rx); (rx_ext, ry.to_vec()) } Ordering::Greater => { let diff = rx.len() - ry.len(); - let mut ry_ext = vec![Scalar::zero(); diff]; + let mut ry_ext = vec![E::ScalarField::zero(); diff]; ry_ext.extend(ry); (rx.to_vec(), ry_ext) } @@ -1467,13 +1473,12 @@ impl SparseMatPolyEvalProof { } pub fn prove( - dense: &MultiSparseMatPolynomialAsDense, - rx: &[Scalar], // point at which the polynomial is evaluated - ry: &[Scalar], - evals: &[Scalar], // a vector evaluation of \widetilde{M}(r = (rx,ry)) for each M - gens: &SparseMatPolyCommitmentGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, + dense: &MultiSparseMatPolynomialAsDense, + rx: &[E::ScalarField], // point at which the polynomial is evaluated + ry: &[E::ScalarField], + evals: &[E::ScalarField], // a vector evaluation of \widetilde{M}(r = (rx,ry)) for each M + gens: &SparseMatPolyCommitmentGens, + transcript: &mut PoseidonTranscript, ) -> SparseMatPolyEvalProof { // transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name()); @@ -1522,7 +1527,6 @@ impl SparseMatPolyEvalProof { evals, gens, transcript, - random_tape, ); timer_eval_network.stop(); @@ -1537,12 +1541,12 @@ impl SparseMatPolyEvalProof { pub fn verify( &self, - comm: &SparseMatPolyCommitment, - rx: &[Scalar], // point at which the polynomial is evaluated - ry: &[Scalar], - evals: &[Scalar], // evaluation of \widetilde{M}(r = (rx,ry)) - gens: &SparseMatPolyCommitmentGens, - transcript: &mut PoseidonTranscript, + comm: &SparseMatPolyCommitment, + rx: &[E::ScalarField], // point at which the polynomial is evaluated + ry: &[E::ScalarField], + evals: &[E::ScalarField], // evaluation of \widetilde{M}(r = (rx,ry)) + gens: &SparseMatPolyCommitmentGens, + transcript: &mut PoseidonTranscript, ) -> Result<(), ProofVerifyError> { // transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name()); @@ -1573,44 +1577,44 @@ impl SparseMatPolyEvalProof { } #[derive(Clone)] -pub struct SparsePolyEntry { +pub struct SparsePolyEntry { pub idx: usize, - pub val: Scalar, + pub val: F, } -impl SparsePolyEntry { - pub fn new(idx: usize, val: Scalar) -> Self { +impl SparsePolyEntry{ + pub fn new(idx: usize, val: F) -> Self { SparsePolyEntry { idx, val } } } #[derive(Clone)] -pub struct SparsePolynomial { +pub struct SparsePolynomial { pub num_vars: usize, - pub Z: Vec, + pub Z: Vec>, } -impl SparsePolynomial { - pub fn new(num_vars: usize, Z: Vec) -> Self { +impl SparsePolynomial { + pub fn new(num_vars: usize, Z: Vec>) -> Self { SparsePolynomial { num_vars, Z } } // TF IS THIS?? - fn compute_chi(a: &[bool], r: &[Scalar]) -> Scalar { + fn compute_chi(a: &[bool], r: &[F]) -> F { assert_eq!(a.len(), r.len()); - let mut chi_i = Scalar::one(); + let mut chi_i = F::one(); for j in 0..r.len() { if a[j] { chi_i *= r[j]; } else { - chi_i *= Scalar::one() - r[j]; + chi_i *= F::one() - r[j]; } } chi_i } // Takes O(n log n). TODO: do this in O(n) where n is the number of entries in Z - pub fn evaluate(&self, r: &[Scalar]) -> Scalar { + pub fn evaluate(&self, r: &[F]) -> F { assert_eq!(self.num_vars, r.len()); (0..self.Z.len()) diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index b423a2dc..50fdc7fd 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,7 +1,6 @@ use crate::mipp::MippProof; -use ark_bls12_377::{Bls12_377 as I, G1Projective as G1}; use ark_ec::{pairing::Pairing, scalar_mul::variable_base::VariableBaseMSM, CurveGroup}; -use ark_ff::{One}; +use ark_ff::One; use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, MultilinearPC, @@ -13,36 +12,33 @@ use rayon::prelude::{ use super::scalar::Scalar; use crate::{ - dense_mlpoly::DensePolynomial, - math::Math, - poseidon_transcript::{PoseidonTranscript}, - timer::Timer, + dense_mlpoly::DensePolynomial, math::Math, poseidon_transcript::PoseidonTranscript, timer::Timer, }; -pub struct Polynomial { +pub struct Polynomial { m: usize, - polys: Vec, - q: Option, - chis_b: Option>, + polys: Vec>, + q: Option>, + chis_b: Option>, } -impl Polynomial { +impl Polynomial { // Given the evaluations over the boolean hypercube of a polynomial p of size // 2*m compute the sqrt-sized polynomials p_i as // p_i(Y) = \sum_{j \in \{0,1\}^m} p(j, i) * chi_j(Y) // where p(X,Y) = \sum_{i \in \{0,\1}^m} chi_i(X) * p_i(Y) // // TODO: add case when the length of the list is not an even power of 2 - pub fn from_evaluations(Z: &[Scalar]) -> Self { + pub fn from_evaluations(Z: &[E::ScalarField]) -> Self { let pl_timer = Timer::new("poly_list_build"); // check the evaluation list is a power of 2 debug_assert!(Z.len() & (Z.len() - 1) == 0); let m = Z.len().log_2() / 2; let pow_m = 2_usize.pow(m as u32); - let polys: Vec = (0..pow_m) + let polys: Vec> = (0..pow_m) .into_par_iter() .map(|i| { - let z: Vec = (0..pow_m) + let z: Vec = (0..pow_m) .into_par_iter() // viewing the list of evaluation as a square matrix // we select by row i and column j @@ -65,19 +61,19 @@ impl Polynomial { // q(Y) = // \sum_{j \in \{0,1\}^m}(\sum_{i \in \{0,1\}^m} p(j,i) * chi_i(b)) * chi_j(Y) // and p(a,b) = q(b) where p is the initial polynomial - fn get_q(&mut self, point: &[Scalar]) { + fn get_q(&mut self, point: &[E::ScalarField]) { let q_timer = Timer::new("build_q"); debug_assert!(point.len() == 2 * self.m); let _a = &point[0..self.m]; let b = &point[self.m..2 * self.m]; let pow_m = 2_usize.pow(self.m as u32); - let chis: Vec = (0..pow_m) + let chis: Vec = (0..pow_m) .into_par_iter() .map(|i| Self::get_chi_i(b, i)) .collect(); - let z_q: Vec = (0..pow_m) + let z_q: Vec = (0..pow_m) .into_par_iter() .map(|j| (0..pow_m).map(|i| self.polys[i].Z[j] * chis[i]).sum()) .collect(); @@ -89,7 +85,7 @@ impl Polynomial { // Given point = (\vec{a}, \vec{b}) used to construct q // compute q(b) = p(a,b). - pub fn eval(&mut self, point: &[Scalar]) -> Scalar { + pub fn eval(&mut self, point: &[E::ScalarField]) -> E::ScalarField { let a = &point[0..point.len() / 2]; let _b = &point[point.len() / 2..point.len()]; if self.q.is_none() { @@ -103,16 +99,16 @@ impl Polynomial { prods.sum() } - pub fn commit(&self, ck: &CommitterKey) -> (Vec>, ::TargetField) { + pub fn commit(&self, ck: &CommitterKey) -> (Vec>, E::TargetField) { let timer_commit = Timer::new("sqrt_commit"); let timer_list = Timer::new("comm_list"); // commit to each of the sqrt sized p_i - let comm_list: Vec> = self + let comm_list: Vec> = self .polys .par_iter() - .map(|p| MultilinearPC::::commit(&ck, p)) + .map(|p| MultilinearPC::::commit(&ck, p)) .collect(); timer_list.stop(); @@ -123,15 +119,15 @@ impl Polynomial { let left_pairs: Vec<_> = comm_list .clone() .into_par_iter() - .map(|c| ::G1Prepared::from(c.g_product)) + .map(|c| E::G1Prepared::from(c.g_product)) .collect(); let right_pairs: Vec<_> = h_vec .into_par_iter() - .map(|h| ::G2Prepared::from(h)) + .map(|h| E::G2Prepared::from(h)) .collect(); // compute the IPP commitment - let t = I::multi_pairing(left_pairs, right_pairs).0; + let t = E::multi_pairing(left_pairs, right_pairs).0; ipp_timer.stop(); timer_commit.stop(); @@ -140,16 +136,16 @@ impl Polynomial { } // computes \chi_i(\vec{b}) = \prod_{i_j = 0}(1 - b_j)\prod_{i_j = 1}(b_j) - pub fn get_chi_i(b: &[Scalar], i: usize) -> Scalar { + pub fn get_chi_i(b: &[E::ScalarField], i: usize) -> E::ScalarField { let m = b.len(); - let mut prod = Scalar::one(); + let mut prod = E::ScalarField::one(); for j in 0..m { let b_j = b[j]; // iterate from msb to lsb of i to build chi_i as defined above if i >> (m - j - 1) & 1 == 1 { prod = prod * b_j; } else { - prod = prod * (Scalar::one() - b_j) + prod = prod * (E::ScalarField::one() - b_j) }; } prod @@ -157,12 +153,12 @@ impl Polynomial { pub fn open( &mut self, - transcript: &mut PoseidonTranscript, - comm_list: Vec>, - ck: &CommitterKey, - point: &[Scalar], - t: &::TargetField, - ) -> (Commitment, Proof, MippProof) { + transcript: &mut PoseidonTranscript, + comm_list: Vec>, + ck: &CommitterKey, + point: &[E::ScalarField], + t: &E::TargetField, + ) -> (Commitment, Proof, MippProof) { let m = point.len() / 2; let a = &point[0..m]; if self.q.is_none() { @@ -188,21 +184,21 @@ impl Polynomial { let a_vec: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); let c_u = - ::msm_unchecked(a_vec.as_slice(), chis.as_slice()).into_affine(); + ::msm_unchecked(a_vec.as_slice(), chis.as_slice()).into_affine(); timer_msm.stop(); - let U: Commitment = Commitment { + let U: Commitment = Commitment { nv: q.num_vars, g_product: c_u, }; - let comm = MultilinearPC::::commit(ck, &q); + let comm = MultilinearPC::::commit(ck, &q); debug_assert!(c_u == comm.g_product); let h_vec = ck.powers_of_h[0].clone(); // construct MIPP proof that U is the inner product of the vector A // and the vector y, where A is the opening vector to T let timer_mipp_proof = Timer::new("mipp_prove"); - let mipp_proof = MippProof::::prove::( + let mipp_proof = MippProof::::prove( transcript, ck, a_vec, @@ -219,7 +215,7 @@ impl Polynomial { a_rev.reverse(); // construct PST proof for opening q at a - let pst_proof = MultilinearPC::::open(ck, &q, &a_rev); + let pst_proof = MultilinearPC::::open(ck, &q, &a_rev); timer_proof.stop(); timer_open.stop(); @@ -227,14 +223,14 @@ impl Polynomial { } pub fn verify( - transcript: &mut PoseidonTranscript, - vk: &VerifierKey, - U: &Commitment, - point: &[Scalar], - v: Scalar, - pst_proof: &Proof, - mipp_proof: &MippProof, - T: &::TargetField, + transcript: &mut PoseidonTranscript, + vk: &VerifierKey, + U: &Commitment, + point: &[E::ScalarField], + v: E::ScalarField, + pst_proof: &Proof, + mipp_proof: &MippProof, + T: &E::TargetField, ) -> bool { let len = point.len(); let a = &point[0..len / 2]; @@ -242,7 +238,7 @@ impl Polynomial { let timer_mipp_verify = Timer::new("mipp_verify"); // verify that U = A^y where A is the opening vector of T - let res_mipp = MippProof::::verify::( + let res_mipp = MippProof::::verify( vk, transcript, mipp_proof, @@ -258,7 +254,7 @@ impl Polynomial { let timer_pst_verify = Timer::new("pst_verify"); // verify that q(a) is indeed v - let res = MultilinearPC::::check(vk, U, &a_rev, v, pst_proof); + let res = MultilinearPC::::check(vk, U, &a_rev, v, pst_proof); timer_pst_verify.stop(); res } @@ -266,12 +262,11 @@ impl Polynomial { #[cfg(test)] mod tests { - use crate::parameters::poseidon_params; use super::*; - + use ark_std::UniformRand; #[test] fn check_sqrt_poly_eval() { From 2f3be263b4c280f0b122cea7fbb468fa58649249 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Wed, 8 Feb 2023 09:49:16 +0100 Subject: [PATCH 20/64] wip --- src/sparse_mlpoly.rs | 106 +++++++++++++++++-------------------------- src/sqrt_pst.rs | 47 +++++++------------ src/sumcheck.rs | 1 - src/unipoly.rs | 44 +++++++++--------- 4 files changed, 79 insertions(+), 119 deletions(-) diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index ae0f5ea5..5cda447d 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -8,10 +8,9 @@ use super::dense_mlpoly::{ use super::errors::ProofVerifyError; use super::math::Math; use super::product_tree::{DotProductCircuit, ProductCircuit, ProductCircuitEvalProofBatched}; -use super::scalar::Scalar; use super::timer::Timer; use crate::poseidon_transcript::PoseidonTranscript; -use crate::transcript::TranscriptWriter; +use crate::transcript::{Transcript, TranscriptWriter}; use ark_ec::pairing::Pairing; use ark_ec::CurveGroup; use ark_ff::PrimeField; @@ -72,7 +71,7 @@ impl Derefs { } pub fn commit(&self, gens: &PolyCommitmentGens) -> DerefsCommitment { - let (comm_ops_val, _blinds) = self.comb.commit(gens, None); + let (comm_ops_val, _blinds) = self.comb.commit(gens); DerefsCommitment { comm_ops_val } } } @@ -135,7 +134,7 @@ impl DerefsEvalProof { derefs: &Derefs, eval_row_ops_val_vec: &[E::ScalarField], eval_col_ops_val_vec: &[E::ScalarField], - r: &[Scalar], + r: &[E::ScalarField], gens: &PolyCommitmentGens, transcript: &mut PoseidonTranscript, ) -> Self { @@ -144,7 +143,7 @@ impl DerefsEvalProof { let evals = { let mut evals = eval_row_ops_val_vec.to_owned(); evals.extend(eval_col_ops_val_vec); - evals.resize(evals.len().next_power_of_two(), Scalar::zero()); + evals.resize(evals.len().next_power_of_two(), E::ScalarField::zero()); evals }; let proof_derefs = DerefsEvalProof::prove_single(&derefs.comb, r, evals, gens, transcript); @@ -195,7 +194,7 @@ impl DerefsEvalProof { // transcript.append_protocol_name(DerefsEvalProof::protocol_name()); let mut evals = eval_row_ops_val_vec.to_owned(); evals.extend(eval_col_ops_val_vec); - evals.resize(evals.len().next_power_of_two(), Scalar::zero()); + evals.resize(evals.len().next_power_of_two(), E::ScalarField::zero()); DerefsEvalProof::verify_single( &self.proof_derefs, @@ -329,9 +328,9 @@ pub struct SparseMatPolyCommitment { impl TranscriptWriter for SparseMatPolyCommitment { fn write_to_transcript(&self, transcript: &mut impl Transcript) { - transcript.append(self.batch_size as u64); - transcript.append(self.num_ops as u64); - transcript.append(self.num_mem_cells as u64); + transcript.append(self.batch_size as u64, ""); + transcript.append(self.num_ops as u64, ""); + transcript.append(self.num_mem_cells as u64, ""); self.comm_comb_ops.write_to_transcript(transcript); self.comm_comb_mem.write_to_transcript(transcript); } @@ -354,7 +353,7 @@ impl SparseMatPolynomial { assert!(N >= self.get_num_nz_entries()); let mut ops_row: Vec = vec![0; N]; let mut ops_col: Vec = vec![0; N]; - let mut val: Vec = vec![Scalar::zero(); N]; + let mut val: Vec = vec![F::zero(); N]; for i in 0..self.M.len() { ops_row[i] = self.M[i].row; @@ -487,8 +486,8 @@ impl SparseMatPolynomial { let batch_size = sparse_polys.len(); let dense = SparseMatPolynomial::multi_sparse_to_dense_rep(sparse_polys); - let (comm_comb_ops, _blinds_comb_ops) = dense.comb_ops.commit(&gens.gens_ops, None); - let (comm_comb_mem, _blinds_comb_mem) = dense.comb_mem.commit(&gens.gens_mem, None); + let (comm_comb_ops, _blinds_comb_ops) = dense.comb_ops.commit(&gens.gens_ops); + let (comm_comb_mem, _blinds_comb_mem) = dense.comb_mem.commit(&gens.gens_mem); ( SparseMatPolyCommitment { @@ -578,7 +577,7 @@ impl Layers { // at read time, addr is given by addrs, value is given by derefs, and ts is given by read_ts hash_func(&addrs[i], &derefs[i], &read_ts[i]) - r_multiset_check }) - .collect::>(), + .collect::>(), ); poly_read_hashed_vec.push(poly_read_hashed); @@ -586,7 +585,7 @@ impl Layers { (0..num_ops) .map(|i| { // at write time, addr is given by addrs, value is given by derefs, and ts is given by write_ts = read_ts + 1 - hash_func(&addrs[i], &derefs[i], &(read_ts[i] + Scalar::one())) - r_multiset_check + hash_func(&addrs[i], &derefs[i], &(read_ts[i] + F::one())) - r_multiset_check }) .collect::>(), ); @@ -694,11 +693,11 @@ impl HashLayerProof { fn prove_helper( rand: (&Vec, &Vec), addr_timestamps: &AddrTimestamps, - ) -> (Vec, Vec, Scalar) { + ) -> (Vec, Vec, E::ScalarField) { let (rand_mem, rand_ops) = rand; // decommit ops-addr at rand_ops - let mut eval_ops_addr_vec: Vec = Vec::new(); + let mut eval_ops_addr_vec: Vec = Vec::new(); for i in 0..addr_timestamps.ops_addr.len() { let eval_ops_addr = addr_timestamps.ops_addr[i].evaluate(rand_ops); eval_ops_addr_vec.push(eval_ops_addr); @@ -787,7 +786,7 @@ impl HashLayerProof { ); // form a single decommitment using comb_comb_mem at rand_mem - let evals_mem: Vec = vec![eval_row_audit_ts, eval_col_audit_ts]; + let evals_mem: Vec = vec![eval_row_audit_ts, eval_col_audit_ts]; // evals_mem.append_to_transcript(b"claim_evals_mem", transcript); transcript.append_scalar_vector(&evals_mem); let challenges_mem = transcript.challenge_vector(evals_mem.len().log_2()); @@ -939,7 +938,7 @@ impl HashLayerProof { let (eval_row_addr_vec, eval_row_read_ts_vec, eval_row_audit_ts) = &self.eval_row; let (eval_col_addr_vec, eval_col_read_ts_vec, eval_col_audit_ts) = &self.eval_col; - let mut evals_ops: Vec = Vec::new(); + let mut evals_ops: Vec = Vec::new(); evals_ops.extend(eval_row_addr_vec); evals_ops.extend(eval_row_read_ts_vec); evals_ops.extend(eval_col_addr_vec); @@ -1232,16 +1231,7 @@ impl ProductLayerProof { num_cells: usize, eval: &[F], transcript: &mut PoseidonTranscript, - ) -> Result< - ( - Vec, - Vec, - Vec, - Vec, - Vec, - ), - ProofVerifyError, - > { + ) -> Result<(Vec, Vec, Vec, Vec, Vec), ProofVerifyError> { // transcript.append_protocol_name(ProductLayerProof::protocol_name()); let timer = Timer::new("verify_prod_proof"); @@ -1254,7 +1244,7 @@ impl ProductLayerProof { let ws: F = (0..row_eval_write.len()) .map(|i| row_eval_write[i]) .product(); - let rs: Scalar = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product(); + let rs: F = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product(); assert_eq!(ws * row_eval_init, rs * row_eval_audit); // row_eval_init.append_to_transcript(b"claim_row_eval_init", transcript); @@ -1335,7 +1325,7 @@ impl ProductLayerProof { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -struct PolyEvalNetworkProof{ +struct PolyEvalNetworkProof { proof_prod_layer: ProductLayerProof, proof_hash_layer: HashLayerProof, } @@ -1365,13 +1355,8 @@ impl PolyEvalNetworkProof { ); // proof of hash layer for row and col - let proof_hash_layer = HashLayerProof::prove( - (&rand_mem, &rand_ops), - dense, - derefs, - gens, - transcript, - ); + let proof_hash_layer = + HashLayerProof::prove((&rand_mem, &rand_ops), dense, derefs, gens, transcript); PolyEvalNetworkProof { proof_prod_layer, @@ -1454,7 +1439,10 @@ impl SparseMatPolyEvalProof { b"Sparse polynomial evaluation proof" } - fn equalize(rx: &[E::ScalarField], ry: &[E::ScalarField]) -> (Vec, Vec) { + fn equalize( + rx: &[E::ScalarField], + ry: &[E::ScalarField], + ) -> (Vec, Vec) { match rx.len().cmp(&ry.len()) { Ordering::Less => { let diff = ry.len() - rx.len(); @@ -1473,7 +1461,7 @@ impl SparseMatPolyEvalProof { } pub fn prove( - dense: &MultiSparseMatPolynomialAsDense, + dense: &MultiSparseMatPolynomialAsDense, rx: &[E::ScalarField], // point at which the polynomial is evaluated ry: &[E::ScalarField], evals: &[E::ScalarField], // a vector evaluation of \widetilde{M}(r = (rx,ry)) for each M @@ -1520,14 +1508,8 @@ impl SparseMatPolyEvalProof { timer_build_network.stop(); let timer_eval_network = Timer::new("evalproof_layered_network"); - let poly_eval_network_proof = PolyEvalNetworkProof::prove( - &mut net, - dense, - &derefs, - evals, - gens, - transcript, - ); + let poly_eval_network_proof = + PolyEvalNetworkProof::prove(&mut net, dense, &derefs, evals, gens, transcript); timer_eval_network.stop(); poly_eval_network_proof @@ -1582,7 +1564,7 @@ pub struct SparsePolyEntry { pub val: F, } -impl SparsePolyEntry{ +impl SparsePolyEntry { pub fn new(idx: usize, val: F) -> Self { SparsePolyEntry { idx, val } } @@ -1635,6 +1617,8 @@ mod tests { use ark_std::UniformRand; use rand::RngCore; + type F = ark_bls12_377::Fr; + type E = ark_bls12_377::Bls12_377; #[test] fn check_sparse_polyeval_proof() { let mut rng = ark_std::rand::thread_rng(); @@ -1651,7 +1635,7 @@ mod tests { M.push(SparseMatEntry::new( (rng.next_u64() % (num_rows as u64)) as usize, (rng.next_u64() % (num_cols as u64)) as usize, - Scalar::rand(&mut rng), + F::rand(&mut rng), )); } @@ -1668,27 +1652,19 @@ mod tests { let (poly_comm, dense) = SparseMatPolynomial::multi_commit(&[&poly_M, &poly_M, &poly_M], &gens); // evaluation - let rx: Vec = (0..num_vars_x) - .map(|_i| Scalar::rand(&mut rng)) - .collect::>(); - let ry: Vec = (0..num_vars_y) - .map(|_i| Scalar::rand(&mut rng)) - .collect::>(); + let rx: Vec = (0..num_vars_x) + .map(|_i| F::rand(&mut rng)) + .collect::>(); + let ry: Vec = (0..num_vars_y) + .map(|_i| F::rand(&mut rng)) + .collect::>(); let eval = SparseMatPolynomial::multi_evaluate(&[&poly_M], &rx, &ry); let evals = vec![eval[0], eval[0], eval[0]]; let params = poseidon_params(); - let mut random_tape = RandomTape::new(b"proof"); let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = SparseMatPolyEvalProof::prove( - &dense, - &rx, - &ry, - &evals, - &gens, - &mut prover_transcript, - &mut random_tape, - ); + let proof = + SparseMatPolyEvalProof::prove(&dense, &rx, &ry, &evals, &gens, &mut prover_transcript); let mut verifier_transcript = PoseidonTranscript::new(¶ms); assert!(proof diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 50fdc7fd..29821757 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -10,7 +10,6 @@ use rayon::prelude::{ IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator, }; -use super::scalar::Scalar; use crate::{ dense_mlpoly::DensePolynomial, math::Math, poseidon_transcript::PoseidonTranscript, timer::Timer, }; @@ -183,8 +182,7 @@ impl Polynomial { let a_vec: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); - let c_u = - ::msm_unchecked(a_vec.as_slice(), chis.as_slice()).into_affine(); + let c_u = ::msm_unchecked(&a_vec, &chis).into_affine(); timer_msm.stop(); let U: Commitment = Commitment { @@ -198,16 +196,8 @@ impl Polynomial { // construct MIPP proof that U is the inner product of the vector A // and the vector y, where A is the opening vector to T let timer_mipp_proof = Timer::new("mipp_prove"); - let mipp_proof = MippProof::::prove( - transcript, - ck, - a_vec, - chis.to_vec(), - h_vec, - &c_u, - t, - ) - .unwrap(); + let mipp_proof = + MippProof::::prove(transcript, ck, a_vec, chis.to_vec(), h_vec, &c_u, t).unwrap(); timer_mipp_proof.stop(); let timer_proof = Timer::new("pst_open"); @@ -238,14 +228,7 @@ impl Polynomial { let timer_mipp_verify = Timer::new("mipp_verify"); // verify that U = A^y where A is the opening vector of T - let res_mipp = MippProof::::verify( - vk, - transcript, - mipp_proof, - b.to_vec(), - &U.g_product, - T, - ); + let res_mipp = MippProof::::verify(vk, transcript, mipp_proof, b.to_vec(), &U.g_product, T); assert!(res_mipp == true); timer_mipp_verify.stop(); @@ -266,6 +249,8 @@ mod tests { use crate::parameters::poseidon_params; use super::*; + type F = ark_bls12_377::Fr; + type E = ark_bls12_377::Bls12_377; use ark_std::UniformRand; #[test] @@ -273,13 +258,13 @@ mod tests { let mut rng = ark_std::test_rng(); let num_vars = 8; let len = 2_usize.pow(num_vars); - let Z: Vec = (0..len) + let Z: Vec = (0..len) .into_iter() - .map(|_| Scalar::rand(&mut rng)) + .map(|_| F::rand(&mut rng)) .collect(); - let r: Vec = (0..num_vars) + let r: Vec = (0..num_vars) .into_iter() - .map(|_| Scalar::rand(&mut rng)) + .map(|_| F::rand(&mut rng)) .collect(); let p = DensePolynomial::new(Z.clone()); @@ -296,17 +281,17 @@ mod tests { let mut rng = ark_std::test_rng(); let num_vars = 4; let len = 2_usize.pow(num_vars); - let Z: Vec = (0..len) + let Z: Vec = (0..len) .into_iter() - .map(|_| Scalar::rand(&mut rng)) + .map(|_| F::rand(&mut rng)) .collect(); - let r: Vec = (0..num_vars) + let r: Vec = (0..num_vars) .into_iter() - .map(|_| Scalar::rand(&mut rng)) + .map(|_| F::rand(&mut rng)) .collect(); - let gens = MultilinearPC::::setup(2, &mut rng); - let (ck, vk) = MultilinearPC::::trim(&gens, 2); + let gens = MultilinearPC::::setup(2, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&gens, 2); let mut pl = Polynomial::from_evaluations(&Z.clone()); diff --git a/src/sumcheck.rs b/src/sumcheck.rs index 34e31edc..4d4afca3 100644 --- a/src/sumcheck.rs +++ b/src/sumcheck.rs @@ -5,7 +5,6 @@ use super::errors::ProofVerifyError; use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::TranscriptWriter; -use super::scalar::Scalar; use super::unipoly::UniPoly; use ark_ff::PrimeField; diff --git a/src/unipoly.rs b/src/unipoly.rs index 65d75431..b9fd9a77 100644 --- a/src/unipoly.rs +++ b/src/unipoly.rs @@ -1,5 +1,3 @@ -use super::scalar::Scalar; -use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::{Transcript, TranscriptWriter}; use ark_ff::{Field, PrimeField}; use ark_serialize::*; @@ -104,7 +102,7 @@ impl TranscriptWriter for UniPoly { fn write_to_transcript(&self, transcript: &mut impl Transcript) { // transcript.append_message(label, b"UniPoly_begin"); for i in 0..self.coeffs.len() { - transcript.append(&self.coeffs[i],"coeffs"); + transcript.append(&self.coeffs[i], "coeffs"); } // transcript.append_message(label, b"UniPoly_end"); } @@ -117,21 +115,23 @@ mod tests { use super::*; + type F = ark_bls12_377::Fr; + #[test] - fn test_from_evals_quad() { + fn test_from_evals_quad() { // polynomial is 2x^2 + 3x + 1 - let e0 = Scalar::one(); - let e1 = Scalar::from(6); - let e2 = Scalar::from(15); + let e0 = F::one(); + let e1 = F::from(6); + let e2 = F::from(15); let evals = vec![e0, e1, e2]; let poly = UniPoly::from_evals(&evals); assert_eq!(poly.eval_at_zero(), e0); assert_eq!(poly.eval_at_one(), e1); assert_eq!(poly.coeffs.len(), 3); - assert_eq!(poly.coeffs[0], Scalar::one()); - assert_eq!(poly.coeffs[1], Scalar::from(3)); - assert_eq!(poly.coeffs[2], Scalar::from(2)); + assert_eq!(poly.coeffs[0], F::one()); + assert_eq!(poly.coeffs[1], F::from(3)); + assert_eq!(poly.coeffs[2], F::from(2)); let hint = e0 + e1; let compressed_poly = poly.compress(); @@ -140,27 +140,27 @@ mod tests { assert_eq!(decompressed_poly.coeffs[i], poly.coeffs[i]); } - let e3 = Scalar::from(28); - assert_eq!(poly.evaluate(&Scalar::from(3)), e3); + let e3 = F::from(28); + assert_eq!(poly.evaluate(&F::from(3)), e3); } #[test] fn test_from_evals_cubic() { // polynomial is x^3 + 2x^2 + 3x + 1 - let e0 = Scalar::one(); - let e1 = Scalar::from(7); - let e2 = Scalar::from(23); - let e3 = Scalar::from(55); + let e0 = F::one(); + let e1 = F::from(7); + let e2 = F::from(23); + let e3 = F::from(55); let evals = vec![e0, e1, e2, e3]; let poly = UniPoly::from_evals(&evals); assert_eq!(poly.eval_at_zero(), e0); assert_eq!(poly.eval_at_one(), e1); assert_eq!(poly.coeffs.len(), 4); - assert_eq!(poly.coeffs[0], Scalar::one()); - assert_eq!(poly.coeffs[1], Scalar::from(3)); - assert_eq!(poly.coeffs[2], Scalar::from(2)); - assert_eq!(poly.coeffs[3], Scalar::from(1)); + assert_eq!(poly.coeffs[0], F::one()); + assert_eq!(poly.coeffs[1], F::from(3)); + assert_eq!(poly.coeffs[2], F::from(2)); + assert_eq!(poly.coeffs[3], F::from(1)); let hint = e0 + e1; let compressed_poly = poly.compress(); @@ -169,7 +169,7 @@ mod tests { assert_eq!(decompressed_poly.coeffs[i], poly.coeffs[i]); } - let e4 = Scalar::from(109); - assert_eq!(poly.evaluate(&Scalar::from(4)), e4); + let e4 = F::from(109); + assert_eq!(poly.evaluate(&F::from(4)), e4); } } From 0884de220bfe1ae7dff238b18eba0a0fde705e9d Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Wed, 8 Feb 2023 10:55:55 +0100 Subject: [PATCH 21/64] wip --- src/commitments.rs | 9 +- src/constraints.rs | 74 ++++++-------- src/dense_mlpoly.rs | 123 ++++++++++------------- src/lib.rs | 195 +++++++++++++++++-------------------- src/nizk/bullet.rs | 19 ++-- src/nizk/mod.rs | 18 ++-- src/poseidon_transcript.rs | 7 +- src/product_tree.rs | 15 +-- src/r1csinstance.rs | 130 ++++++++++++------------- src/r1csproof.rs | 153 ++++++++++++++++------------- src/sparse_mlpoly.rs | 4 +- src/sqrt_pst.rs | 12 +-- src/unipoly.rs | 2 +- 13 files changed, 357 insertions(+), 404 deletions(-) diff --git a/src/commitments.rs b/src/commitments.rs index 3b6c7afd..6ce52103 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -1,6 +1,3 @@ -use super::group::{GroupElement, GroupElementAffine, VartimeMultiscalarMul, GROUP_BASEPOINT}; -use super::scalar::Scalar; -use crate::group::CompressGroupElement; use crate::parameters::*; use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::PrimeField; @@ -21,14 +18,14 @@ impl MultiCommitGens { let params = poseidon_params(); let mut sponge = PoseidonSponge::new(¶ms); sponge.absorb(&label); - sponge.absorb(&GROUP_BASEPOINT.compress().0); + sponge.absorb(&G::generator().0); let gens = (0..=n) .map(|i| { - let mut el_aff: Option = None; + let mut el_aff: Option = None; while el_aff.is_none() { let uniform_bytes = sponge.squeeze_bytes(64); - el_aff = GroupElementAffine::from_random_bytes(&uniform_bytes); + el_aff = G::from_random_bytes(&uniform_bytes); } el_aff.unwrap().clear_cofactor() }) diff --git a/src/constraints.rs b/src/constraints.rs index 7a5931e7..d31ef70f 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -1,34 +1,25 @@ use ark_ec::pairing::Pairing; -use std::{borrow::Borrow, vec}; +use std::{borrow::Borrow}; use crate::{ - group::Fq, math::Math, sparse_mlpoly::{SparsePolyEntry, SparsePolynomial}, unipoly::UniPoly, }; -use ark_crypto_primitives::snark::{BooleanInputVar, SNARKGadget}; -use ark_snark::{CircuitSpecificSetupSNARK, SNARK}; - -use ark_ff::{BitIteratorLE, PrimeField, Zero}; -use ark_groth16::{ - constraints::{Groth16VerifierGadget, PreparedVerifyingKeyVar, ProofVar}, - Groth16, PreparedVerifyingKey, Proof as GrothProof, -}; +use ark_ff::{PrimeField, Zero}; use ark_crypto_primitives::sponge::{ constraints::CryptographicSpongeVar, - poseidon::{constraints::PoseidonSpongeVar, PoseidonConfig, PoseidonSponge}, + poseidon::{constraints::PoseidonSpongeVar, PoseidonConfig}, }; use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_r1cs_std::{ alloc::{AllocVar, AllocationMode}, fields::fp::FpVar, - prelude::{Boolean, EqGadget, FieldVar}, + prelude::{EqGadget, FieldVar}, }; use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystemRef, Namespace, SynthesisError}; -use rand::{CryptoRng, Rng}; pub struct PoseidonTranscripVar where @@ -39,7 +30,7 @@ where //pub params: PoseidonConfig, } -impl PoseidonTranscripVar +impl PoseidonTranscripVar where F: PrimeField, { @@ -54,7 +45,6 @@ where Self { cs, sponge, - //params: params.clone(), } } @@ -85,8 +75,8 @@ pub struct UniPolyVar { pub coeffs: Vec>, } -impl AllocVar, F> for UniPolyVar { - fn new_variable>( +impl AllocVar, F> for UniPolyVar { + fn new_variable>>( cs: impl Into>, f: impl FnOnce() -> Result, mode: AllocationMode, @@ -118,7 +108,7 @@ impl UniPolyVar { } // TODO check if mul without reduce can help - pub fn evaluate(&self, r: &FpVar) -> FpVar { + pub fn evaluate(&self, r: &FpVar) -> FpVar { let mut eval = self.coeffs[0].clone(); let mut power = r.clone(); @@ -141,10 +131,10 @@ impl SumcheckVerificationCircuit { &self, poly_vars: &[UniPolyVar], claim_var: &FpVar, - transcript_var: &mut PoseidonTranscripVar, - ) -> Result<(FpVar, Vec>), SynthesisError> { + transcript_var: &mut PoseidonTranscripVar, + ) -> Result<(FpVar, Vec>), SynthesisError> { let mut e_var = claim_var.clone(); - let mut r_vars: Vec> = Vec::new(); + let mut r_vars: Vec> = Vec::new(); for (poly_var, _poly) in poly_vars.iter().zip(self.polys.iter()) { let res = poly_var.eval_at_one() + poly_var.eval_at_zero(); @@ -165,8 +155,8 @@ pub struct SparsePolyEntryVar { val_var: FpVar, } -impl AllocVar for SparsePolyEntryVar { - fn new_variable>( +impl AllocVar, F> for SparsePolyEntryVar { + fn new_variable>>( cs: impl Into>, f: impl FnOnce() -> Result, _mode: AllocationMode, @@ -189,8 +179,8 @@ pub struct SparsePolynomialVar { Z_var: Vec>, } -impl AllocVar for SparsePolynomialVar { - fn new_variable>( +impl AllocVar, F> for SparsePolynomialVar { + fn new_variable>>( cs: impl Into>, f: impl FnOnce() -> Result, mode: AllocationMode, @@ -241,7 +231,7 @@ pub struct R1CSVerificationCircuit { pub num_vars: usize, pub num_cons: usize, pub input: Vec, - pub input_as_sparse_poly: SparsePolynomial, + pub input_as_sparse_poly: SparsePolynomial, pub evals: (F, F, F), pub params: PoseidonConfig, pub prev_challenge: F, @@ -251,11 +241,11 @@ pub struct R1CSVerificationCircuit { pub sc_phase2: SumcheckVerificationCircuit, // The point on which the polynomial was evaluated by the prover. pub claimed_ry: Vec, - pub claimed_transcript_sat_state: Fr, + pub claimed_transcript_sat_state: F, } impl R1CSVerificationCircuit { - fn new(config: &VerifierConfig) -> Self { + pub fn new(config: &VerifierConfig) -> Self { Self { num_vars: config.num_vars, num_cons: config.num_cons, @@ -317,7 +307,7 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { let tau_vars = transcript_var.challenge_vector(num_rounds_x)?; - let claim_phase1_var = FpVar::::new_witness(cs.clone(), || Ok(Fr::zero()))?; + let claim_phase1_var = FpVar::::new_witness(cs.clone(), || Ok(F::zero()))?; let (claim_post_phase1_var, rx_var) = self @@ -403,21 +393,21 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { } #[derive(Clone)] -pub struct VerifierConfig { - pub comm: Commitment, +pub struct VerifierConfig { + pub comm: Commitment, pub num_vars: usize, pub num_cons: usize, - pub input: Vec, - pub input_as_sparse_poly: SparsePolynomial, - pub evals: (Fr, Fr, Fr), - pub params: PoseidonConfig, - pub prev_challenge: Fr, - pub claims_phase2: (Fr, Fr, Fr, Fr), - pub eval_vars_at_ry: Fr, - pub polys_sc1: Vec, - pub polys_sc2: Vec, - pub ry: Vec, - pub transcript_sat_state: Scalar, + pub input: Vec, + pub input_as_sparse_poly: SparsePolynomial, + pub evals: (E::ScalarField, E::ScalarField, E::ScalarField), + pub params: PoseidonConfig, + pub prev_challenge: E::ScalarField, + pub claims_phase2: (E::ScalarField, E::ScalarField, E::ScalarField, E::ScalarField), + pub eval_vars_at_ry: E::ScalarField, + pub polys_sc1: Vec>, + pub polys_sc2: Vec>, + pub ry: Vec, + pub transcript_sat_state: E::ScalarField, } // Skeleton for the polynomial commitment verification circuit diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 4460412a..68c1a105 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -1,24 +1,18 @@ #![allow(clippy::too_many_arguments)] -use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; +use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::{Transcript, TranscriptWriter}; use super::commitments::{Commitments, MultiCommitGens}; use super::errors::ProofVerifyError; -use super::group::{ - CompressGroupElement, CompressedGroup, DecompressGroupElement, GroupElement, - VartimeMultiscalarMul, -}; use super::math::Math; use super::nizk::{DotProductProofGens, DotProductProofLog}; -use super::scalar::Scalar; use ark_ec::scalar_mul::variable_base::VariableBaseMSM; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ff::Field; use ark_ff::{One, PrimeField, UniformRand, Zero}; -use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; +use ark_poly::{MultilinearExtension}; use ark_poly_commit::multilinear_pc::data_structures::{ - Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, + CommitterKey, VerifierKey, }; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::*; @@ -62,11 +56,11 @@ impl MultilinearExtension for DensePolynomial { unimplemented!() } - fn fix_variables(&self, _partial_point: &[Scalar]) -> Self { + fn fix_variables(&self, _partial_point: &[F]) -> Self { unimplemented!() } - fn to_evaluations(&self) -> Vec { + fn to_evaluations(&self) -> Vec { self.Z.to_vec() } } @@ -132,8 +126,8 @@ impl<'a, 'b, F: PrimeField> AddAssign<&'a DensePolynomial> for DensePolynomia } } -impl<'a, 'b, F: PrimeField> AddAssign<(Scalar, &'a DensePolynomial)> for DensePolynomial { - fn add_assign(&mut self, (scalar, other): (Scalar, &'a DensePolynomial)) { +impl<'a, 'b, F: PrimeField> AddAssign<(F, &'a DensePolynomial)> for DensePolynomial { + fn add_assign(&mut self, (scalar, other): (F, &'a DensePolynomial)) { let other = Self { num_vars: other.num_vars, len: 1 << other.num_vars, @@ -200,8 +194,8 @@ impl PolyCommitmentGens { // Generates the SRS and trims it based on the number of variables in the // multilinear polynomial. let mut rng = ark_std::test_rng(); - let pst_gens = MultilinearPC::::setup(num_vars / 2, &mut rng); - let (ck, vk) = MultilinearPC::::trim(&pst_gens, num_vars / 2); + let pst_gens = MultilinearPC::::setup(num_vars / 2, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&pst_gens, num_vars / 2); PolyCommitmentGens { gens, ck, vk } } @@ -308,7 +302,7 @@ impl DensePolynomial { self.len } - pub fn clone(&self) -> Self { + pub fn clone(&self) -> Self { DensePolynomial::new(self.Z[0..self.len].to_vec()) } @@ -436,7 +430,7 @@ impl DensePolynomial { where I: IntoIterator>, { - let mut Z: Vec = Vec::new(); + let mut Z: Vec = Vec::new(); for poly in polys.into_iter() { Z.extend(poly.vec()); } @@ -457,7 +451,7 @@ impl DensePolynomial { } impl Index for DensePolynomial { - type Output = Scalar; + type Output = F; #[inline(always)] fn index(&self, _index: usize) -> &F { @@ -505,7 +499,7 @@ where let R_size = right_num_vars.pow2(); let default_blinds = PolyCommitmentBlinds { - blinds: vec![Scalar::zero(); L_size], + blinds: vec![E::ScalarField::zero(); L_size], }; let blinds = blinds_opt.map_or(&default_blinds, |p| p); @@ -529,7 +523,7 @@ where let (proof, _C_LR, C_Zr_prime) = DotProductProofLog::prove( &gens.gens, transcript, - &LZ, + LZ.as_slice(), &LZ_blind, &R, Zr, @@ -543,8 +537,8 @@ where &self, gens: &PolyCommitmentGens, transcript: &mut PoseidonTranscript, - r: &[E::ScalarField], // point at which the polynomial is evaluated - C_Zr: &E::G1, // commitment to \widetilde{Z}(r) + r: &[E::ScalarField], // point at which the polynomial is evaluated + C_Zr: &E::G1, // commitment to \widetilde{Z}(r) comm: &PolyCommitment, ) -> Result<(), ProofVerifyError> { // transcript.append_protocol_name(PolyEvalProof::protocol_name()); @@ -554,13 +548,9 @@ where let (L, R) = eq.compute_factored_evals(); // compute a weighted sum of commitments and L - let C_decompressed = comm - .C - .iter() - .map(|pt| GroupElement::decompress(pt).unwrap()) - .collect::>(); + let C_decompressed = comm.C; - let C_LZ = GroupElement::vartime_multiscalar_mul(&L, C_decompressed.as_slice()).compress(); + let C_LZ = ::msm(&C_decompressed, &L).compress(); self .proof @@ -576,7 +566,9 @@ where comm: &PolyCommitment, ) -> Result<(), ProofVerifyError> { // compute a commitment to Zr with a blind of zero - let C_Zr = Zr.commit(&Scalar::zero(), &gens.gens.gens_1).compress(); + let C_Zr = Zr + .commit(&E::ScalarField::zero(), &gens.gens.gens_1) + .compress(); self.verify(gens, transcript, r, &C_Zr, comm) } @@ -590,7 +582,10 @@ mod tests { use super::*; use ark_std::UniformRand; - fn evaluate_with_LR(Z: &[Scalar], r: &[Scalar]) -> Scalar { + type F = ark_bls12_377::Fr; + type E = ark_bls12_377::Bls12_377; + + fn evaluate_with_LR(Z: &[F], r: &[F]) -> F { let eq = EqPolynomial::new(r.to_vec()); let (L, R) = eq.compute_factored_evals(); @@ -605,7 +600,7 @@ mod tests { // compute vector-matrix product between L and Z viewed as a matrix let LZ = (0..m) .map(|i| (0..m).map(|j| L[j] * Z[j * m + i]).sum()) - .collect::>(); + .collect::>(); // compute dot product between LZ and R DotProductProofLog::compute_dotproduct(&LZ, &R) @@ -614,27 +609,22 @@ mod tests { #[test] fn check_polynomial_evaluation() { // Z = [1, 2, 1, 4] - let Z = vec![ - Scalar::one(), - Scalar::from(2), - Scalar::from(1), - Scalar::from(4), - ]; + let Z = vec![F::one(), F::from(2), F::from(1), F::from(4)]; // r = [4,3] - let r = vec![Scalar::from(4), Scalar::from(3)]; + let r = vec![F::from(4), F::from(3)]; let eval_with_LR = evaluate_with_LR(&Z, &r); let poly = DensePolynomial::new(Z); let eval = poly.evaluate(&r); - assert_eq!(eval, Scalar::from(28)); + assert_eq!(eval, F::from(28)); assert_eq!(eval_with_LR, eval); } - pub fn compute_factored_chis_at_r(r: &[Scalar]) -> (Vec, Vec) { - let mut L: Vec = Vec::new(); - let mut R: Vec = Vec::new(); + pub fn compute_factored_chis_at_r(r: &[F]) -> (Vec, Vec) { + let mut L: Vec = Vec::new(); + let mut R: Vec = Vec::new(); let ell = r.len(); assert!(ell % 2 == 0); // ensure ell is even @@ -643,13 +633,13 @@ mod tests { // compute row vector L for i in 0..m { - let mut chi_i = Scalar::one(); + let mut chi_i = F::one(); for j in 0..ell / 2 { let bit_j = ((m * i) & (1 << (r.len() - j - 1))) > 0; if bit_j { chi_i *= r[j]; } else { - chi_i *= Scalar::one() - r[j]; + chi_i *= F::one() - r[j]; } } L.push(chi_i); @@ -657,13 +647,13 @@ mod tests { // compute column vector R for i in 0..m { - let mut chi_i = Scalar::one(); + let mut chi_i = F::one(); for j in ell / 2..ell { let bit_j = (i & (1 << (r.len() - j - 1))) > 0; if bit_j { chi_i *= r[j]; } else { - chi_i *= Scalar::one() - r[j]; + chi_i *= F::one() - r[j]; } } R.push(chi_i); @@ -671,18 +661,18 @@ mod tests { (L, R) } - pub fn compute_chis_at_r(r: &[Scalar]) -> Vec { + pub fn compute_chis_at_r(r: &[F]) -> Vec { let ell = r.len(); let n = ell.pow2(); - let mut chis: Vec = Vec::new(); + let mut chis: Vec = Vec::new(); for i in 0..n { - let mut chi_i = Scalar::one(); + let mut chi_i = F::one(); for j in 0..r.len() { let bit_j = (i & (1 << (r.len() - j - 1))) > 0; if bit_j { chi_i *= r[j]; } else { - chi_i *= Scalar::one() - r[j]; + chi_i *= F::one() - r[j]; } } chis.push(chi_i); @@ -690,14 +680,14 @@ mod tests { chis } - pub fn compute_outerproduct(L: Vec, R: Vec) -> Vec { + pub fn compute_outerproduct(L: Vec, R: Vec) -> Vec { assert_eq!(L.len(), R.len()); (0..L.len()) - .map(|i| (0..R.len()).map(|j| L[i] * R[j]).collect::>()) - .collect::>>() + .map(|i| (0..R.len()).map(|j| L[i] * R[j]).collect::>()) + .collect::>>() .into_iter() .flatten() - .collect::>() + .collect::>() } #[test] @@ -705,9 +695,9 @@ mod tests { let mut rng = ark_std::rand::thread_rng(); let s = 10; - let mut r: Vec = Vec::new(); + let mut r: Vec = Vec::new(); for _i in 0..s { - r.push(Scalar::rand(&mut rng)); + r.push(F::rand(&mut rng)); } let chis = tests::compute_chis_at_r(&r); let chis_m = EqPolynomial::new(r).evals(); @@ -719,9 +709,9 @@ mod tests { let mut rng = ark_std::rand::thread_rng(); let s = 10; - let mut r: Vec = Vec::new(); + let mut r: Vec = Vec::new(); for _i in 0..s { - r.push(Scalar::rand(&mut rng)); + r.push(F::rand(&mut rng)); } let chis = EqPolynomial::new(r.clone()).evals(); let (L, R) = EqPolynomial::new(r).compute_factored_evals(); @@ -734,9 +724,9 @@ mod tests { let mut rng = ark_std::rand::thread_rng(); let s = 10; - let mut r: Vec = Vec::new(); + let mut r: Vec = Vec::new(); for _i in 0..s { - r.push(Scalar::rand(&mut rng)); + r.push(F::rand(&mut rng)); } let (L, R) = tests::compute_factored_chis_at_r(&r); let eq = EqPolynomial::new(r); @@ -747,23 +737,17 @@ mod tests { #[test] fn check_polynomial_commit() { - let Z = vec![ - Scalar::from(1), - Scalar::from(2), - Scalar::from(1), - Scalar::from(4), - ]; + let Z = vec![F::from(1), F::from(2), F::from(1), F::from(4)]; let poly = DensePolynomial::new(Z); // r = [4,3] - let r = vec![Scalar::from(4), Scalar::from(3)]; + let r = vec![F::from(4), F::from(3)]; let eval = poly.evaluate(&r); - assert_eq!(eval, Scalar::from(28)); + assert_eq!(eval, F::from(28)); let gens = PolyCommitmentGens::new(poly.get_num_vars(), b"test-two"); let (poly_commitment, blinds) = poly.commit(&gens, None); - let mut random_tape = RandomTape::new(b"proof"); let params = poseidon_params(); let mut prover_transcript = PoseidonTranscript::new(¶ms); let (proof, C_Zr) = PolyEvalProof::prove( @@ -774,7 +758,6 @@ mod tests { None, &gens, &mut prover_transcript, - &mut random_tape, ); let mut verifier_transcript = PoseidonTranscript::new(¶ms); diff --git a/src/lib.rs b/src/lib.rs index 38d1bf2a..f65d38af 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -29,7 +29,6 @@ mod nizk; mod product_tree; mod r1csinstance; mod r1csproof; -mod scalar; mod sparse_mlpoly; mod sqrt_pst; mod sumcheck; @@ -49,39 +48,40 @@ use ark_std::Zero; use core::cmp::max; use errors::{ProofVerifyError, R1CSError}; -use poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; +use poseidon_transcript::{PoseidonTranscript}; +use transcript::TranscriptWriter; use r1csinstance::{ R1CSCommitment, R1CSCommitmentGens, R1CSDecommitment, R1CSEvalProof, R1CSInstance, }; use r1csproof::{R1CSGens, R1CSProof}; -use random::RandomTape; -use scalar::Scalar; +use ark_ec::CurveGroup; use timer::Timer; /// `ComputationCommitment` holds a public preprocessed NP statement (e.g., R1CS) -pub struct ComputationCommitment { - comm: R1CSCommitment, +pub struct ComputationCommitment { + comm: R1CSCommitment, } +use ark_ff::PrimeField; /// `ComputationDecommitment` holds information to decommit `ComputationCommitment` -pub struct ComputationDecommitment { - decomm: R1CSDecommitment, +pub struct ComputationDecommitment { + decomm: R1CSDecommitment, } /// `Assignment` holds an assignment of values to either the inputs or variables in an `Instance` #[derive(Clone)] -pub struct Assignment { - assignment: Vec, +pub struct Assignment { + assignment: Vec, } -impl Assignment { +impl Assignment { /// Constructs a new `Assignment` from a vector - pub fn new(assignment: &Vec>) -> Result { - let bytes_to_scalar = |vec: &Vec>| -> Result, R1CSError> { - let mut vec_scalar: Vec = Vec::new(); + pub fn new(assignment: &Vec>) -> Result { + let bytes_to_scalar = |vec: &Vec>| -> Result, R1CSError> { + let mut vec_scalar: Vec = Vec::new(); for v in vec { - let val = Scalar::from_random_bytes(v.as_slice()); + let val = F::from_random_bytes(v.as_slice()); if let Some(v) = val { vec_scalar.push(v); } else { @@ -104,13 +104,13 @@ impl Assignment { } /// pads Assignment to the specified length - fn pad(&self, len: usize) -> VarsAssignment { + fn pad(&self, len: usize) -> VarsAssignment { // check that the new length is higher than current length assert!(len > self.assignment.len()); let padded_assignment = { let mut padded_assignment = self.assignment.clone(); - padded_assignment.extend(vec![Scalar::zero(); len - self.assignment.len()]); + padded_assignment.extend(vec![F::zero(); len - self.assignment.len()]); padded_assignment }; @@ -121,19 +121,19 @@ impl Assignment { } /// `VarsAssignment` holds an assignment of values to variables in an `Instance` -pub type VarsAssignment = Assignment; +pub type VarsAssignment = Assignment; /// `InputsAssignment` holds an assignment of values to variables in an `Instance` -pub type InputsAssignment = Assignment; +pub type InputsAssignment = Assignment; /// `Instance` holds the description of R1CS matrices and a hash of the matrices #[derive(Debug)] -pub struct Instance { - inst: R1CSInstance, +pub struct Instance { + inst: R1CSInstance, digest: Vec, } -impl Instance { +impl Instance { /// Constructs a new `Instance` and an associated satisfying assignment pub fn new( num_cons: usize, @@ -142,7 +142,7 @@ impl Instance { A: &[(usize, usize, Vec)], B: &[(usize, usize, Vec)], C: &[(usize, usize, Vec)], - ) -> Result { + ) -> Result { let (num_vars_padded, num_cons_padded) = { let num_vars_padded = { let mut num_vars_padded = num_vars; @@ -176,8 +176,8 @@ impl Instance { }; let bytes_to_scalar = - |tups: &[(usize, usize, Vec)]| -> Result, R1CSError> { - let mut mat: Vec<(usize, usize, Scalar)> = Vec::new(); + |tups: &[(usize, usize, Vec)]| -> Result, R1CSError> { + let mut mat: Vec<(usize, usize, F)> = Vec::new(); for (row, col, val_bytes) in tups { // row must be smaller than num_cons if *row >= num_cons { @@ -189,7 +189,7 @@ impl Instance { return Err(R1CSError::InvalidIndex); } - let val = Scalar::from_random_bytes(val_bytes.as_slice()); + let val = F::from_random_bytes(val_bytes.as_slice()); if let Some(v) = val { // if col >= num_vars, it means that it is referencing a 1 or input in the satisfying // assignment @@ -207,7 +207,7 @@ impl Instance { // we do not need to pad otherwise because the dummy constraints are implicit in the sum-check protocol if num_cons == 0 || num_cons == 1 { for i in tups.len()..num_cons_padded { - mat.push((i, num_vars, Scalar::zero())); + mat.push((i, num_vars, F::zero())); } } @@ -246,8 +246,8 @@ impl Instance { /// Checks if a given R1CSInstance is satisfiable with a given variables and inputs assignments pub fn is_sat( &self, - vars: &VarsAssignment, - inputs: &InputsAssignment, + vars: &VarsAssignment, + inputs: &InputsAssignment, ) -> Result { if vars.assignment.len() > self.inst.get_num_vars() { return Err(R1CSError::InvalidNumberOfInputs); @@ -280,7 +280,7 @@ impl Instance { num_cons: usize, num_vars: usize, num_inputs: usize, - ) -> (Instance, VarsAssignment, InputsAssignment) { + ) -> (Instance, VarsAssignment, InputsAssignment) { let (inst, vars, inputs) = R1CSInstance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); let digest = inst.get_digest(); ( @@ -292,12 +292,12 @@ impl Instance { } /// `SNARKGens` holds public parameters for producing and verifying proofs with the Spartan SNARK -pub struct SNARKGens { - gens_r1cs_sat: R1CSGens, - gens_r1cs_eval: R1CSCommitmentGens, +pub struct SNARKGens { + gens_r1cs_sat: R1CSGens, + gens_r1cs_eval: R1CSCommitmentGens, } -impl SNARKGens { +impl SNARKGens { /// Constructs a new `SNARKGens` given the size of the R1CS statement /// `num_nz_entries` specifies the maximum number of non-zero entries in any of the three R1CS matrices pub fn new(num_cons: usize, num_vars: usize, num_inputs: usize, num_nz_entries: usize) -> Self { @@ -324,26 +324,30 @@ impl SNARKGens { } } +use ark_ec::pairing::Pairing; /// `SNARK` holds a proof produced by Spartan SNARK #[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct SNARK { - r1cs_sat_proof: R1CSProof, - inst_evals: (Scalar, Scalar, Scalar), - r1cs_eval_proof: R1CSEvalProof, - rx: Vec, - ry: Vec, +pub struct SNARK { + r1cs_sat_proof: R1CSProof, + inst_evals: (E::ScalarField, E::ScalarField, E::ScalarField), + r1cs_eval_proof: R1CSEvalProof, + rx: Vec, + ry: Vec, } -impl SNARK { +impl SNARK { fn protocol_name() -> &'static [u8] { b"Spartan SNARK proof" } /// A public computation to create a commitment to an R1CS instance pub fn encode( - inst: &Instance, - gens: &SNARKGens, - ) -> (ComputationCommitment, ComputationDecommitment) { + inst: &Instance, + gens: &SNARKGens, + ) -> ( + ComputationCommitment, + ComputationDecommitment, + ) { let timer_encode = Timer::new("SNARK::encode"); let (comm, decomm) = inst.inst.commit(&gens.gens_r1cs_eval); timer_encode.stop(); @@ -355,20 +359,16 @@ impl SNARK { /// A method to produce a SNARK proof of the satisfiability of an R1CS instance pub fn prove( - inst: &Instance, - comm: &ComputationCommitment, - decomm: &ComputationDecommitment, - vars: VarsAssignment, - inputs: &InputsAssignment, - gens: &SNARKGens, - transcript: &mut PoseidonTranscript, + inst: &Instance, + comm: &ComputationCommitment, + decomm: &ComputationDecommitment, + vars: VarsAssignment, + inputs: &InputsAssignment, + gens: &SNARKGens, + transcript: &mut PoseidonTranscript, ) -> Self { let timer_prove = Timer::new("SNARK::prove"); - // we create a Transcript object seeded with a random Scalar - // to aid the prover produce its randomness - let mut random_tape = RandomTape::new(b"proof"); - // transcript.append_protocol_name(SNARK::protocol_name()); comm.comm.append_to_poseidon(transcript); @@ -391,7 +391,6 @@ impl SNARK { &inputs.assignment, &gens.gens_r1cs_sat, transcript, - // &mut random_tape, ) }; @@ -431,7 +430,6 @@ impl SNARK { &inst_evals, &gens.gens_r1cs_eval, transcript, - &mut random_tape, ); let mut proof_encoded: Vec = Vec::new(); @@ -455,10 +453,10 @@ impl SNARK { /// A method to verify the SNARK proof of the satisfiability of an R1CS instance pub fn verify( &self, - comm: &ComputationCommitment, - input: &InputsAssignment, - transcript: &mut PoseidonTranscript, - gens: &SNARKGens, + comm: &ComputationCommitment, + input: &InputsAssignment, + transcript: &mut PoseidonTranscript, + gens: &SNARKGens, ) -> Result<(u128, u128, u128), ProofVerifyError> { let timer_verify = Timer::new("SNARK::verify"); // transcript.append_protocol_name(SNARK::protocol_name()); @@ -507,11 +505,11 @@ impl SNARK { #[derive(Clone)] /// `NIZKGens` holds public parameters for producing and verifying proofs with the Spartan NIZK -pub struct NIZKGens { - gens_r1cs_sat: R1CSGens, +pub struct NIZKGens { + gens_r1cs_sat: R1CSGens, } -impl NIZKGens { +impl NIZKGens { /// Constructs a new `NIZKGens` given the size of the R1CS statement pub fn new(num_cons: usize, num_vars: usize, num_inputs: usize) -> Self { let num_vars_padded = { @@ -529,29 +527,25 @@ impl NIZKGens { /// `NIZK` holds a proof produced by Spartan NIZK #[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct NIZK { - r1cs_sat_proof: R1CSProof, - r: (Vec, Vec), +pub struct NIZK { + r1cs_sat_proof: R1CSProof, + r: (Vec, Vec), } -impl NIZK { +impl NIZK { fn protocol_name() -> &'static [u8] { b"Spartan NIZK proof" } /// A method to produce a NIZK proof of the satisfiability of an R1CS instance pub fn prove( - inst: &Instance, - vars: VarsAssignment, - input: &InputsAssignment, - gens: &NIZKGens, - transcript: &mut PoseidonTranscript, + inst: &Instance, + vars: VarsAssignment, + input: &InputsAssignment, + gens: &NIZKGens, + transcript: &mut PoseidonTranscript, ) -> Self { let timer_prove = Timer::new("NIZK::prove"); - // we create a Transcript object seeded with a random Scalar - // to aid the prover produce its randomness - let _random_tape = RandomTape::new(b"proof"); - // transcript.append_protocol_name(NIZK::protocol_name()); transcript.append_bytes(&inst.digest); @@ -573,7 +567,6 @@ impl NIZK { &input.assignment, &gens.gens_r1cs_sat, transcript, - // &mut random_tape, ); let mut proof_encoded = Vec::new(); proof @@ -593,10 +586,10 @@ impl NIZK { /// A method to verify a NIZK proof of the satisfiability of an R1CS instance pub fn verify( &self, - inst: &Instance, - input: &InputsAssignment, - transcript: &mut PoseidonTranscript, - gens: &NIZKGens, + inst: &Instance, + input: &InputsAssignment, + transcript: &mut PoseidonTranscript, + gens: &NIZKGens, ) -> Result { let timer_verify = Timer::new("NIZK::verify"); @@ -633,10 +626,10 @@ impl NIZK { /// A method to verify a NIZK proof of the satisfiability of an R1CS instance with Groth16 pub fn verify_groth16( &self, - inst: &Instance, - input: &InputsAssignment, - transcript: &mut PoseidonTranscript, - gens: &NIZKGens, + inst: &Instance, + input: &InputsAssignment, + transcript: &mut PoseidonTranscript, + gens: &NIZKGens, ) -> Result<(u128, u128, u128), ProofVerifyError> { let timer_verify = Timer::new("NIZK::verify"); @@ -678,6 +671,8 @@ mod tests { use super::*; use ark_ff::{BigInteger, One, PrimeField}; + type F = ark_bls12_377::Fr; + type E = ark_bls12_377::Bls12_377; #[test] pub fn check_snark() { @@ -775,28 +770,20 @@ mod tests { let mut C: Vec<(usize, usize, Vec)> = Vec::new(); // Create a^2 + b + 13 - A.push((0, num_vars + 2, (Scalar::one().into_bigint().to_bytes_le()))); // 1*a - B.push((0, num_vars + 2, Scalar::one().into_bigint().to_bytes_le())); // 1*a - C.push((0, num_vars + 1, Scalar::one().into_bigint().to_bytes_le())); // 1*z - C.push(( - 0, - num_vars, - (-Scalar::from(13u64)).into_bigint().to_bytes_le(), - )); // -13*1 - C.push(( - 0, - num_vars + 3, - (-Scalar::one()).into_bigint().to_bytes_le(), - )); // -1*b + A.push((0, num_vars + 2, (F::one().into_bigint().to_bytes_le()))); // 1*a + B.push((0, num_vars + 2, F::one().into_bigint().to_bytes_le())); // 1*a + C.push((0, num_vars + 1, F::one().into_bigint().to_bytes_le())); // 1*z + C.push((0, num_vars, (-F::from(13u64)).into_bigint().to_bytes_le())); // -13*1 + C.push((0, num_vars + 3, (-F::one()).into_bigint().to_bytes_le())); // -1*b // Var Assignments (Z_0 = 16 is the only output) - let vars = vec![Scalar::zero().into_bigint().to_bytes_le(); num_vars]; + let vars = vec![F::zero().into_bigint().to_bytes_le(); num_vars]; // create an InputsAssignment (a = 1, b = 2) - let mut inputs = vec![Scalar::zero().into_bigint().to_bytes_le(); num_inputs]; - inputs[0] = Scalar::from(16u64).into_bigint().to_bytes_le(); - inputs[1] = Scalar::from(1u64).into_bigint().to_bytes_le(); - inputs[2] = Scalar::from(2u64).into_bigint().to_bytes_le(); + let mut inputs = vec![F::zero().into_bigint().to_bytes_le(); num_inputs]; + inputs[0] = F::from(16u64).into_bigint().to_bytes_le(); + inputs[1] = F::from(1u64).into_bigint().to_bytes_le(); + inputs[2] = F::from(2u64).into_bigint().to_bytes_le(); let assignment_inputs = InputsAssignment::new(&inputs).unwrap(); let assignment_vars = VarsAssignment::new(&vars).unwrap(); diff --git a/src/nizk/bullet.rs b/src/nizk/bullet.rs index b1cbd5ae..29742b60 100644 --- a/src/nizk/bullet.rs +++ b/src/nizk/bullet.rs @@ -7,7 +7,6 @@ use crate::math::Math; use crate::poseidon_transcript::PoseidonTranscript; use super::super::errors::ProofVerifyError; -use super::super::scalar::Scalar; use ark_ec::CurveGroup; use ark_ff::Field; use ark_serialize::*; @@ -33,7 +32,7 @@ impl BulletReductionProof { /// The lengths of the vectors must all be the same, and must all be /// either 0 or a power of 2. pub fn prove( - transcript: &mut PoseidonTranscript, + transcript: &mut PoseidonTranscript, Q: &G, G_vec: &[G], H: &G, @@ -95,7 +94,8 @@ impl BulletReductionProof { .chain(iter::once(&c_L)) .chain(iter::once(blind_L)) .copied() - .collect::>(), + .collect::>() + .as_slice(), ); let R = G::msm( @@ -110,7 +110,8 @@ impl BulletReductionProof { .chain(iter::once(&c_R)) .chain(iter::once(blind_R)) .copied() - .collect::>(), + .collect::>() + .as_slice(), ); transcript.append_point(&L.compress()); @@ -181,10 +182,10 @@ impl BulletReductionProof { } // 2. Compute 1/(u_k...u_1) and 1/u_k, ..., 1/u_1 - let mut challenges_inv: Vec = challenges.clone(); + let mut challenges_inv: Vec = challenges.clone(); ark_ff::fields::batch_inversion(&mut challenges_inv); - let mut allinv: Scalar = Scalar::one(); + let mut allinv = G::ScalarField::one(); for c in challenges.iter().filter(|s| !s.is_zero()) { allinv.mul_assign(c); } @@ -239,7 +240,7 @@ impl BulletReductionProof { let a_hat = inner_product(a, &s); let Gamma_hat = G::msm( - Ls.iter() + Ls.iter() .chain(Rs.iter()) .chain(iter::once(Gamma)) .copied() @@ -251,7 +252,7 @@ impl BulletReductionProof { .chain(iter::once(&G::ScalarField::one())) .copied() .collect::>() - .as_slice() + .as_slice(), ); Ok((G_hat, Gamma_hat, a_hat)) @@ -268,7 +269,7 @@ fn inner_product(a: &[F], b: &[F]) -> F { a.len() == b.len(), "inner_product(a,b): lengths of vectors do not match" ); - let mut out = Scalar::zero(); + let mut out = F::zero(); for i in 0..a.len() { out += a[i] * b[i]; } diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index f394278f..c0559f1f 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -4,8 +4,6 @@ use crate::poseidon_transcript::PoseidonTranscript; use super::commitments::{Commitments, MultiCommitGens}; use super::errors::ProofVerifyError; -use super::group::{CompressGroupElement, CompressedGroup, UnpackGroupElement}; -use super::scalar::Scalar; use ark_ec::CurveGroup; use ark_serialize::*; @@ -33,8 +31,8 @@ pub struct DotProductProofLog { bullet_reduction_proof: BulletReductionProof, delta: G, beta: G, - z1: Scalar, - z2: Scalar, + z1: G::ScalarField, + z2: G::ScalarField, } impl DotProductProofLog { @@ -192,6 +190,8 @@ mod tests { use super::*; use ark_std::UniformRand; + type F = ark_bls12_377::Fr; + #[test] fn check_dotproductproof_log() { @@ -201,20 +201,18 @@ mod tests { let gens = DotProductProofGens::new(n, b"test-1024"); - let x: Vec = (0..n).map(|_i| Scalar::rand(&mut rng)).collect(); - let a: Vec = (0..n).map(|_i| Scalar::rand(&mut rng)).collect(); + let x: Vec = (0..n).map(|_i| F::rand(&mut rng)).collect(); + let a: Vec = (0..n).map(|_i| F::rand(&mut rng)).collect(); let y = DotProductProofLog::compute_dotproduct(&x, &a); - let r_x = Scalar::rand(&mut rng); - let r_y = Scalar::rand(&mut rng); + let r_x = F::rand(&mut rng); + let r_y = F::rand(&mut rng); let params = poseidon_params(); - let mut random_tape = RandomTape::new(b"proof"); let mut prover_transcript = PoseidonTranscript::new(¶ms); let (proof, Cx, Cy) = DotProductProofLog::prove( &gens, &mut prover_transcript, - &mut random_tape, &x, &r_x, &a, diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 22144fc7..5b039767 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -1,14 +1,9 @@ -use super::scalar::Scalar; -use crate::group::{CompressedGroup, Fr}; use crate::transcript::Transcript; -use ark_bls12_377::{Bls12_377 as I, G1Affine}; use ark_crypto_primitives::sponge::{ poseidon::{PoseidonConfig, PoseidonSponge}, CryptographicSponge, }; -use ark_ec::pairing::Pairing; use ark_ff::PrimeField; -use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; use ark_serialize::Compress; #[derive(Clone)] @@ -50,4 +45,4 @@ impl PoseidonTranscript { self.sponge = PoseidonSponge::new(&self.params); self.append_scalar(challenge); } -} \ No newline at end of file +} diff --git a/src/product_tree.rs b/src/product_tree.rs index bf8ff27e..761c77a0 100644 --- a/src/product_tree.rs +++ b/src/product_tree.rs @@ -4,7 +4,6 @@ use crate::poseidon_transcript::PoseidonTranscript; use super::dense_mlpoly::DensePolynomial; use super::dense_mlpoly::EqPolynomial; use super::math::Math; -use super::scalar::Scalar; use super::sumcheck::SumcheckInstanceProof; use ark_ff::PrimeField; use ark_serialize::*; @@ -28,7 +27,6 @@ impl ProductCircuit { let outp_right = (len / 4..len / 2) .map(|i| inp_left[i] * inp_right[i]) .collect::>(); - ( DensePolynomial::new(outp_left), DensePolynomial::new(outp_right), @@ -278,10 +276,9 @@ impl ProductCircuitEvalProofBatched { assert_eq!(poly_C_par.len(), len / 2); let num_rounds_prod = poly_C_par.len().log_2(); - let comb_func_prod = |poly_A_comp: &F, - poly_B_comp: &F, - poly_C_comp: &F| - -> F { (*poly_A_comp) * poly_B_comp * poly_C_comp }; + let comb_func_prod = |poly_A_comp: &F, poly_B_comp: &F, poly_C_comp: &F| -> F { + (*poly_A_comp) * poly_B_comp * poly_C_comp + }; let mut poly_A_batched_par: Vec<&mut DensePolynomial> = Vec::new(); let mut poly_B_batched_par: Vec<&mut DensePolynomial> = Vec::new(); @@ -419,10 +416,8 @@ impl ProductCircuitEvalProofBatched { } assert_eq!(rand.len(), rand_prod.len()); - let eq: F= (0..rand.len()) - .map(|i| { - rand[i] * rand_prod[i] + (F::one() - rand[i]) * (F::one() - rand_prod[i]) - }) + let eq: F = (0..rand.len()) + .map(|i| rand[i] * rand_prod[i] + (F::one() - rand[i]) * (F::one() - rand_prod[i])) .product(); let mut claim_expected: F = (0..claims_prod_vec.len()) .map(|i| coeff_vec[i] * (claims_prod_left[i] * claims_prod_right[i] * eq)) diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index 0cd8b747..a9229a9c 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -1,43 +1,44 @@ -use crate::poseidon_transcript::{AppendToPoseidon, PoseidonTranscript}; - use super::dense_mlpoly::DensePolynomial; use super::errors::ProofVerifyError; use super::math::Math; -use super::random::RandomTape; -use super::scalar::Scalar; use super::sparse_mlpoly::{ MultiSparseMatPolynomialAsDense, SparseMatEntry, SparseMatPolyCommitment, SparseMatPolyCommitmentGens, SparseMatPolyEvalProof, SparseMatPolynomial, }; use super::timer::Timer; +use crate::poseidon_transcript::PoseidonTranscript; +use crate::transcript::{Transcript, TranscriptWriter}; +use ark_ec::pairing::Pairing; +use ark_ec::CurveGroup; use ark_ff::Field; +use ark_ff::PrimeField; use ark_serialize::*; use ark_std::{One, UniformRand, Zero}; use digest::{ExtendableOutput, Input}; use sha3::Shake256; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] -pub struct R1CSInstance { +pub struct R1CSInstance { num_cons: usize, num_vars: usize, num_inputs: usize, - A: SparseMatPolynomial, - B: SparseMatPolynomial, - C: SparseMatPolynomial, + A: SparseMatPolynomial, + B: SparseMatPolynomial, + C: SparseMatPolynomial, } -pub struct R1CSCommitmentGens { - gens: SparseMatPolyCommitmentGens, +pub struct R1CSCommitmentGens { + gens: SparseMatPolyCommitmentGens, } -impl R1CSCommitmentGens { +impl R1CSCommitmentGens { pub fn new( label: &'static [u8], num_cons: usize, num_vars: usize, num_inputs: usize, num_nz_entries: usize, - ) -> R1CSCommitmentGens { + ) -> Self { assert!(num_inputs < num_vars); let num_poly_vars_x = num_cons.log_2(); let num_poly_vars_y = (2 * num_vars).log_2(); @@ -48,27 +49,27 @@ impl R1CSCommitmentGens { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct R1CSCommitment { +pub struct R1CSCommitment { num_cons: usize, num_vars: usize, num_inputs: usize, - comm: SparseMatPolyCommitment, + comm: SparseMatPolyCommitment, } -impl AppendToPoseidon for R1CSCommitment { - fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { - transcript.append_u64(self.num_cons as u64); - transcript.append_u64(self.num_vars as u64); - transcript.append_u64(self.num_inputs as u64); - self.comm.append_to_poseidon(transcript); +impl TranscriptWriter for R1CSCommitment { + fn write_to_transcript(&self, transcript: &mut impl Transcript) { + transcript.append(self.num_cons as u64, ""); + transcript.append(self.num_vars as u64, ""); + transcript.append(self.num_inputs as u64, ""); + self.comm.write_to_transcript(transcript); } } -pub struct R1CSDecommitment { - dense: MultiSparseMatPolynomialAsDense, +pub struct R1CSDecommitment { + dense: MultiSparseMatPolynomialAsDense, } -impl R1CSCommitment { +impl R1CSCommitment { pub fn get_num_cons(&self) -> usize { self.num_cons } @@ -82,15 +83,15 @@ impl R1CSCommitment { } } -impl R1CSInstance { +impl R1CSInstance { pub fn new( num_cons: usize, num_vars: usize, num_inputs: usize, - A: &[(usize, usize, Scalar)], - B: &[(usize, usize, Scalar)], - C: &[(usize, usize, Scalar)], - ) -> R1CSInstance { + A: &[(usize, usize, F)], + B: &[(usize, usize, F)], + C: &[(usize, usize, F)], + ) -> Self { Timer::print(&format!("number_of_constraints {}", num_cons)); Timer::print(&format!("number_of_variables {}", num_vars)); Timer::print(&format!("number_of_inputs {}", num_inputs)); @@ -113,13 +114,13 @@ impl R1CSInstance { let mat_A = (0..A.len()) .map(|i| SparseMatEntry::new(A[i].0, A[i].1, A[i].2)) - .collect::>(); + .collect::>(); let mat_B = (0..B.len()) .map(|i| SparseMatEntry::new(B[i].0, B[i].1, B[i].2)) - .collect::>(); + .collect::>(); let mat_C = (0..C.len()) .map(|i| SparseMatEntry::new(C[i].0, C[i].1, C[i].2)) - .collect::>(); + .collect::>(); let poly_A = SparseMatPolynomial::new(num_poly_vars_x, num_poly_vars_y, mat_A); let poly_B = SparseMatPolynomial::new(num_poly_vars_x, num_poly_vars_y, mat_B); @@ -149,7 +150,7 @@ impl R1CSInstance { pub fn get_digest(&self) -> Vec { let mut bytes = Vec::new(); - self.serialize_with_mode(&mut bytes,Compress::Yes).unwrap(); + self.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); let mut shake = Shake256::default(); shake.input(bytes); let mut reader = shake.xof_result(); @@ -162,7 +163,7 @@ impl R1CSInstance { num_cons: usize, num_vars: usize, num_inputs: usize, - ) -> (R1CSInstance, Vec, Vec) { + ) -> (Self, Vec, Vec) { Timer::print(&format!("number_of_constraints {}", num_cons)); Timer::print(&format!("number_of_variables {}", num_vars)); Timer::print(&format!("number_of_inputs {}", num_inputs)); @@ -181,10 +182,8 @@ impl R1CSInstance { // produce a random satisfying assignment let Z = { - let mut Z: Vec = (0..size_z) - .map(|_i| Scalar::rand(&mut rng)) - .collect::>(); - Z[num_vars] = Scalar::one(); // set the constant term to 1 + let mut Z: Vec = (0..size_z).map(|_i| F::rand(&mut rng)).collect::>(); + Z[num_vars] = F::one(); // set the constant term to 1 Z }; @@ -192,7 +191,7 @@ impl R1CSInstance { let mut A: Vec = Vec::new(); let mut B: Vec = Vec::new(); let mut C: Vec = Vec::new(); - let one = Scalar::one(); + let one = F::one(); for i in 0..num_cons { let A_idx = i % size_z; let B_idx = (i + 2) % size_z; @@ -203,7 +202,7 @@ impl R1CSInstance { let C_idx = (i + 3) % size_z; let C_val = Z[C_idx]; - if C_val == Scalar::zero() { + if C_val == F::zero() { C.push(SparseMatEntry::new(i, num_vars, AB_val)); } else { C.push(SparseMatEntry::new( @@ -238,13 +237,13 @@ impl R1CSInstance { (inst, Z[..num_vars].to_vec(), Z[num_vars + 1..].to_vec()) } - pub fn is_sat(&self, vars: &[Scalar], input: &[Scalar]) -> bool { + pub fn is_sat(&self, vars: &[F], input: &[F]) -> bool { assert_eq!(vars.len(), self.num_vars); assert_eq!(input.len(), self.num_inputs); let z = { let mut z = vars.to_vec(); - z.extend(&vec![Scalar::one()]); + z.extend(&vec![F::one()]); z.extend(input); z }; @@ -274,8 +273,8 @@ impl R1CSInstance { &self, num_rows: usize, num_cols: usize, - z: &[Scalar], - ) -> (DensePolynomial, DensePolynomial, DensePolynomial) { + z: &[F], + ) -> (DensePolynomial, DensePolynomial, DensePolynomial) { assert_eq!(num_rows, self.num_cons); assert_eq!(z.len(), num_cols); assert!(num_cols > self.num_vars); @@ -290,8 +289,8 @@ impl R1CSInstance { &self, num_rows: usize, num_cols: usize, - evals: &[Scalar], - ) -> (Vec, Vec, Vec) { + evals: &[F], + ) -> (Vec, Vec, Vec) { assert_eq!(num_rows, self.num_cons); assert!(num_cols > self.num_vars); @@ -302,12 +301,15 @@ impl R1CSInstance { (evals_A, evals_B, evals_C) } - pub fn evaluate(&self, rx: &[Scalar], ry: &[Scalar]) -> (Scalar, Scalar, Scalar) { + pub fn evaluate(&self, rx: &[F], ry: &[F]) -> (F, F, F) { let evals = SparseMatPolynomial::multi_evaluate(&[&self.A, &self.B, &self.C], rx, ry); (evals[0], evals[1], evals[2]) } - pub fn commit(&self, gens: &R1CSCommitmentGens) -> (R1CSCommitment, R1CSDecommitment) { + pub fn commit>( + &self, + gens: &R1CSCommitmentGens, + ) -> (R1CSCommitment, R1CSDecommitment) { let (comm, dense) = SparseMatPolynomial::multi_commit(&[&self.A, &self.B, &self.C], &gens.gens); let r1cs_comm = R1CSCommitment { num_cons: self.num_cons, @@ -323,20 +325,19 @@ impl R1CSInstance { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] -pub struct R1CSEvalProof { - proof: SparseMatPolyEvalProof, +pub struct R1CSEvalProof { + proof: SparseMatPolyEvalProof, } -impl R1CSEvalProof { +impl R1CSEvalProof { pub fn prove( - decomm: &R1CSDecommitment, - rx: &[Scalar], // point at which the polynomial is evaluated - ry: &[Scalar], - evals: &(Scalar, Scalar, Scalar), - gens: &R1CSCommitmentGens, - transcript: &mut PoseidonTranscript, - random_tape: &mut RandomTape, - ) -> R1CSEvalProof { + decomm: &R1CSDecommitment, + rx: &[E::ScalarField], // point at which the polynomial is evaluated + ry: &[E::ScalarField], + evals: &(E::ScalarField, E::ScalarField, E::ScalarField), + gens: &R1CSCommitmentGens, + transcript: &mut PoseidonTranscript, + ) -> Self { let timer = Timer::new("R1CSEvalProof::prove"); let proof = SparseMatPolyEvalProof::prove( &decomm.dense, @@ -345,7 +346,6 @@ impl R1CSEvalProof { &[evals.0, evals.1, evals.2], &gens.gens, transcript, - random_tape, ); timer.stop(); @@ -354,12 +354,12 @@ impl R1CSEvalProof { pub fn verify( &self, - comm: &R1CSCommitment, - rx: &[Scalar], // point at which the R1CS matrix polynomials are evaluated - ry: &[Scalar], - evals: &(Scalar, Scalar, Scalar), - gens: &R1CSCommitmentGens, - transcript: &mut PoseidonTranscript, + comm: &R1CSCommitment, + rx: &[E::ScalarField], // point at which the R1CS matrix polynomials are evaluated + ry: &[E::ScalarField], + evals: &(E::ScalarField, E::ScalarField, E::ScalarField), + gens: &R1CSCommitmentGens, + transcript: &mut PoseidonTranscript, ) -> Result<(), ProofVerifyError> { self.proof.verify( &comm.comm, diff --git a/src/r1csproof.rs b/src/r1csproof.rs index c9be1112..b62340fb 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -2,7 +2,6 @@ use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; use super::errors::ProofVerifyError; use crate::constraints::{R1CSVerificationCircuit, VerifierConfig}; -use crate::group::{Fq, Fr}; use crate::math::Math; use crate::mipp::MippProof; use crate::parameters::poseidon_params; @@ -10,14 +9,12 @@ use crate::poseidon_transcript::PoseidonTranscript; use crate::sqrt_pst::Polynomial; use crate::sumcheck::SumcheckInstanceProof; use ark_bls12_377::Bls12_377 as I; -use ark_bw6_761::BW6_761 as P; use ark_ec::pairing::Pairing; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; use super::r1csinstance::R1CSInstance; -use super::scalar::Scalar; use super::sparse_mlpoly::{SparsePolyEntry, SparsePolynomial}; use super::timer::Timer; use ark_snark::{CircuitSpecificSetupSNARK, SNARK}; @@ -30,28 +27,33 @@ use ark_std::{One, Zero}; use std::time::Instant; #[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct R1CSProof { +pub struct R1CSProof { // The PST commitment to the multilinear extension of the witness. - comm: Commitment, - sc_proof_phase1: SumcheckInstanceProof, - claims_phase2: (Scalar, Scalar, Scalar, Scalar), - sc_proof_phase2: SumcheckInstanceProof, - eval_vars_at_ry: Scalar, - proof_eval_vars_at_ry: Proof, - rx: Vec, - ry: Vec, + comm: Commitment, + sc_proof_phase1: SumcheckInstanceProof, + claims_phase2: ( + E::ScalarField, + E::ScalarField, + E::ScalarField, + E::ScalarField, + ), + sc_proof_phase2: SumcheckInstanceProof, + eval_vars_at_ry: E::ScalarField, + proof_eval_vars_at_ry: Proof, + rx: Vec, + ry: Vec, // The transcript state after the satisfiability proof was computed. - pub transcript_sat_state: Scalar, - pub t: ::TargetField, - pub mipp_proof: MippProof, + pub transcript_sat_state: E::ScalarField, + pub t: E::TargetField, + pub mipp_proof: MippProof, } #[derive(Clone)] -pub struct R1CSGens { - gens_pc: PolyCommitmentGens, +pub struct R1CSGens { + gens_pc: PolyCommitmentGens, } -impl R1CSGens { +impl R1CSGens { pub fn new(label: &'static [u8], _num_cons: usize, num_vars: usize) -> Self { let num_poly_vars = num_vars.log_2(); let gens_pc = PolyCommitmentGens::new(num_poly_vars, label); @@ -59,24 +61,28 @@ impl R1CSGens { } } -impl R1CSProof { +impl R1CSProof { fn prove_phase_one( num_rounds: usize, - evals_tau: &mut DensePolynomial, - evals_Az: &mut DensePolynomial, - evals_Bz: &mut DensePolynomial, - evals_Cz: &mut DensePolynomial, - transcript: &mut PoseidonTranscript, - ) -> (SumcheckInstanceProof, Vec, Vec) { + evals_tau: &mut DensePolynomial, + evals_Az: &mut DensePolynomial, + evals_Bz: &mut DensePolynomial, + evals_Cz: &mut DensePolynomial, + transcript: &mut PoseidonTranscript, + ) -> ( + SumcheckInstanceProof, + Vec, + Vec, + ) { let comb_func = - |poly_tau_comp: &Scalar, - poly_A_comp: &Scalar, - poly_B_comp: &Scalar, - poly_C_comp: &Scalar| - -> Scalar { (*poly_tau_comp) * ((*poly_A_comp) * poly_B_comp - poly_C_comp) }; + |poly_tau_comp: &E::ScalarField, + poly_A_comp: &E::ScalarField, + poly_B_comp: &E::ScalarField, + poly_C_comp: &E::ScalarField| + -> E::ScalarField { (*poly_tau_comp) * ((*poly_A_comp) * poly_B_comp - poly_C_comp) }; let (sc_proof_phase_one, r, claims) = SumcheckInstanceProof::prove_cubic_with_additive_term( - &Scalar::zero(), // claim is zero + &E::ScalarField::zero(), // claim is zero num_rounds, evals_tau, evals_Az, @@ -91,13 +97,18 @@ impl R1CSProof { fn prove_phase_two( num_rounds: usize, - claim: &Scalar, - evals_z: &mut DensePolynomial, - evals_ABC: &mut DensePolynomial, - transcript: &mut PoseidonTranscript, - ) -> (SumcheckInstanceProof, Vec, Vec) { - let comb_func = - |poly_A_comp: &Scalar, poly_B_comp: &Scalar| -> Scalar { (*poly_A_comp) * poly_B_comp }; + claim: &E::ScalarField, + evals_z: &mut DensePolynomial, + evals_ABC: &mut DensePolynomial, + transcript: &mut PoseidonTranscript, + ) -> ( + SumcheckInstanceProof, + Vec, + Vec, + ) { + let comb_func = |poly_A_comp: &E::ScalarField, + poly_B_comp: &E::ScalarField| + -> E::ScalarField { (*poly_A_comp) * poly_B_comp }; let (sc_proof_phase_two, r, claims) = SumcheckInstanceProof::prove_quad( claim, num_rounds, evals_z, evals_ABC, comb_func, transcript, ); @@ -110,12 +121,12 @@ impl R1CSProof { } pub fn prove( - inst: &R1CSInstance, - vars: Vec, - input: &[Scalar], - gens: &R1CSGens, - transcript: &mut PoseidonTranscript, - ) -> (R1CSProof, Vec, Vec) { + inst: &R1CSInstance, + vars: Vec, + input: &[E::ScalarField], + gens: &R1CSGens, + transcript: &mut PoseidonTranscript, + ) -> (Self, Vec, Vec) { let timer_prove = Timer::new("R1CSProof::prove"); // we currently require the number of |inputs| + 1 to be at most number of vars assert!(input.len() < vars.len()); @@ -148,9 +159,9 @@ impl R1CSProof { let num_inputs = input.len(); let num_vars = vars.len(); let mut z = vars; - z.extend(&vec![Scalar::one()]); // add constant term in z + z.extend(&vec![E::ScalarField::one()]); // add constant term in z z.extend(input); - z.extend(&vec![Scalar::zero(); num_vars - num_inputs - 1]); // we will pad with zeros + z.extend(&vec![E::ScalarField::zero(); num_vars - num_inputs - 1]); // we will pad with zeros z }; @@ -201,7 +212,7 @@ impl R1CSProof { assert_eq!(evals_A.len(), evals_C.len()); (0..evals_A.len()) .map(|i| r_A * evals_A[i] + r_B * evals_B[i] + r_C * evals_C[i]) - .collect::>() + .collect::>() }; // another instance of the sum-check protocol @@ -257,10 +268,10 @@ impl R1CSProof { &self, num_vars: usize, num_cons: usize, - input: &[Scalar], - evals: &(Scalar, Scalar, Scalar), - transcript: &mut PoseidonTranscript, - gens: &R1CSGens, + input: &[E::ScalarField], + evals: &(E::ScalarField, E::ScalarField, E::ScalarField), + transcript: &mut PoseidonTranscript, + gens: &R1CSGens, ) -> Result<(u128, u128, u128), ProofVerifyError> { // serialise and add the IPP commitment to the transcript let mut bytes = Vec::new(); @@ -272,7 +283,7 @@ impl R1CSProof { let c = transcript.challenge_scalar(); - let mut input_as_sparse_poly_entries = vec![SparsePolyEntry::new(0, Scalar::one())]; + let mut input_as_sparse_poly_entries = vec![SparsePolyEntry::new(0, E::ScalarField::one())]; //remaining inputs input_as_sparse_poly_entries.extend( (0..input.len()) @@ -353,10 +364,10 @@ impl R1CSProof { &self, num_vars: usize, num_cons: usize, - input: &[Scalar], - evals: &(Scalar, Scalar, Scalar), - transcript: &mut PoseidonTranscript, - _gens: &R1CSGens, + input: &[E::ScalarField], + evals: &(E::ScalarField, E::ScalarField, E::ScalarField), + transcript: &mut PoseidonTranscript, + _gens: &R1CSGens, ) -> Result { // serialise and add the IPP commitment to the transcript let mut bytes = Vec::new(); @@ -368,7 +379,7 @@ impl R1CSProof { let c = transcript.challenge_scalar(); - let mut input_as_sparse_poly_entries = vec![SparsePolyEntry::new(0, Scalar::one())]; + let mut input_as_sparse_poly_entries = vec![SparsePolyEntry::new(0, E::ScalarField::one())]; //remaining inputs input_as_sparse_poly_entries.extend( (0..input.len()) @@ -399,7 +410,7 @@ impl R1CSProof { let _rng = ark_std::test_rng(); let circuit = R1CSVerificationCircuit::new(&config); - let cs = ConstraintSystem::::new_ref(); + let cs = ConstraintSystem::::new_ref(); circuit.generate_constraints(cs.clone()).unwrap(); assert!(cs.is_satisfied().unwrap()); @@ -429,10 +440,12 @@ mod tests { use crate::parameters::poseidon_params; use super::*; + type F = ark_bls12_377::Fr; + type E = ark_bls12_377::Bls12_377; use ark_std::UniformRand; - fn produce_tiny_r1cs() -> (R1CSInstance, Vec, Vec) { + fn produce_tiny_r1cs() -> (R1CSInstance, Vec, Vec) { // three constraints over five variables Z1, Z2, Z3, Z4, and Z5 // rounded to the nearest power of two let num_cons = 128; @@ -440,11 +453,11 @@ mod tests { let num_inputs = 2; // encode the above constraints into three matrices - let mut A: Vec<(usize, usize, Scalar)> = Vec::new(); - let mut B: Vec<(usize, usize, Scalar)> = Vec::new(); - let mut C: Vec<(usize, usize, Scalar)> = Vec::new(); + let mut A: Vec<(usize, usize, F)> = Vec::new(); + let mut B: Vec<(usize, usize, F)> = Vec::new(); + let mut C: Vec<(usize, usize, F)> = Vec::new(); - let one = Scalar::one(); + let one = F::one(); // constraint 0 entries // (Z1 + Z2) * I0 - Z3 = 0; A.push((0, 0, one)); @@ -467,22 +480,22 @@ mod tests { // compute a satisfying assignment let mut rng = ark_std::rand::thread_rng(); - let i0 = Scalar::rand(&mut rng); - let i1 = Scalar::rand(&mut rng); - let z1 = Scalar::rand(&mut rng); - let z2 = Scalar::rand(&mut rng); + let i0 = F::rand(&mut rng); + let i1 = F::rand(&mut rng); + let z1 = F::rand(&mut rng); + let z2 = F::rand(&mut rng); let z3 = (z1 + z2) * i0; // constraint 1: (Z1 + Z2) * I0 - Z3 = 0; let z4 = (z1 + i1) * z3; // constraint 2: (Z1 + I1) * (Z3) - Z4 = 0 - let z5 = Scalar::zero(); //constraint 3 + let z5 = F::zero(); //constraint 3 - let mut vars = vec![Scalar::zero(); num_vars]; + let mut vars = vec![F::zero(); num_vars]; vars[0] = z1; vars[1] = z2; vars[2] = z3; vars[3] = z4; vars[4] = z5; - let mut input = vec![Scalar::zero(); num_inputs]; + let mut input = vec![F::zero(); num_inputs]; input[0] = i0; input[1] = i1; diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index 5cda447d..492cc47d 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -503,7 +503,7 @@ impl SparseMatPolynomial { } impl MultiSparseMatPolynomialAsDense { - pub fn deref(&self, row_mem_val: &[F], col_mem_val: &[F]) -> Derefs { + pub fn deref(&self, row_mem_val: &[F], col_mem_val: &[F]) -> Derefs { let row_ops_val = self.row.deref(row_mem_val); let col_ops_val = self.col.deref(col_mem_val); @@ -1467,7 +1467,7 @@ impl SparseMatPolyEvalProof { evals: &[E::ScalarField], // a vector evaluation of \widetilde{M}(r = (rx,ry)) for each M gens: &SparseMatPolyCommitmentGens, transcript: &mut PoseidonTranscript, - ) -> SparseMatPolyEvalProof { + ) -> SparseMatPolyEvalProof { // transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name()); // ensure there is one eval for each polynomial in dense diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 6269bd7a..15788afa 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -180,7 +180,7 @@ impl Polynomial { let a_vec: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); - let c_u = ::msm_unchecked(&a_vec, &chis).into_affine(); + let c_u = ::msm_unchecked(&a_vec, chis.as_slice()).into_affine(); timer_msm.stop(); let U: Commitment = Commitment { @@ -256,10 +256,7 @@ mod tests { let mut rng = ark_std::test_rng(); let num_vars = 8; let len = 2_usize.pow(num_vars); - let Z: Vec = (0..len) - .into_iter() - .map(|_| F::rand(&mut rng)) - .collect(); + let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); let r: Vec = (0..num_vars) .into_iter() .map(|_| F::rand(&mut rng)) @@ -279,10 +276,7 @@ mod tests { let mut rng = ark_std::test_rng(); let num_vars = 4; let len = 2_usize.pow(num_vars); - let Z: Vec = (0..len) - .into_iter() - .map(|_| F::rand(&mut rng)) - .collect(); + let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); let r: Vec = (0..num_vars) .into_iter() .map(|_| F::rand(&mut rng)) diff --git a/src/unipoly.rs b/src/unipoly.rs index b9fd9a77..ae80c160 100644 --- a/src/unipoly.rs +++ b/src/unipoly.rs @@ -72,7 +72,7 @@ impl UniPoly { eval } - pub fn compress(&self) -> CompressedUniPoly { + pub fn compress(&self) -> CompressedUniPoly{ let coeffs_except_linear_term = [&self.coeffs[..1], &self.coeffs[2..]].concat(); assert_eq!(coeffs_except_linear_term.len() + 1, self.coeffs.len()); CompressedUniPoly { From f2ed1a8212df7827b5564a3e159372609696f6b5 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Wed, 8 Feb 2023 11:19:59 +0100 Subject: [PATCH 22/64] sip --- src/commitments.rs | 18 +----------------- src/constraints.rs | 24 +++++++++++++----------- src/lib.rs | 1 - src/poseidon_transcript.rs | 2 +- src/r1csinstance.rs | 2 +- src/sparse_mlpoly.rs | 20 ++++++++++---------- 6 files changed, 26 insertions(+), 41 deletions(-) diff --git a/src/commitments.rs b/src/commitments.rs index 6ce52103..ed21149b 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -1,7 +1,5 @@ use crate::parameters::*; use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; -use ark_ff::PrimeField; -use std::ops::Mul; use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_crypto_primitives::sponge::CryptographicSponge; @@ -74,18 +72,4 @@ impl Commitments for G::ScalarField { assert_eq!(gens_n.n, 1); ::msm(&[*self, *blind], &[gens_n.G[0], gens_n.h]) } -} - -impl Commitments for Vec { - fn commit(&self, blind: &G::ScalarField, gens_n: &MultiCommitGens) -> G { - assert_eq!(gens_n.n, self.len()); - ::msm(self, &gens_n.G) + gens_n.h.mul(blind) - } -} - -impl Commitments for [G::ScalarField] { - fn commit(&self, blind: &G::ScalarField, gens_n: &MultiCommitGens) -> G { - assert_eq!(gens_n.n, self.len()); - ::msm(self, &gens_n.G) + gens_n.h.mul(blind) - } -} +} \ No newline at end of file diff --git a/src/constraints.rs b/src/constraints.rs index d31ef70f..6bc17394 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -1,5 +1,5 @@ use ark_ec::pairing::Pairing; -use std::{borrow::Borrow}; +use std::borrow::Borrow; use crate::{ math::Math, @@ -42,10 +42,7 @@ where sponge.absorb(&c_var).unwrap(); } - Self { - cs, - sponge, - } + Self { cs, sponge } } fn append(&mut self, input: &FpVar) -> Result<(), SynthesisError> { @@ -83,7 +80,7 @@ impl AllocVar, F> for UniPolyVar { ) -> Result { f().and_then(|c| { let cs = cs.into(); - let cp: &UniPoly = c.borrow(); + let cp: &UniPoly = c.borrow(); let mut coeffs_var = Vec::new(); for coeff in cp.coeffs.iter() { let coeff_var = FpVar::::new_variable(cs.clone(), || Ok(coeff), mode)?; @@ -163,7 +160,7 @@ impl AllocVar, F> for SparsePolyEntryVar { ) -> Result { f().and_then(|s| { let cs = cs.into(); - let spe: &SparsePolyEntry = s.borrow(); + let spe: &SparsePolyEntry = s.borrow(); let val_var = FpVar::::new_witness(cs, || Ok(spe.val))?; Ok(Self { idx: spe.idx, @@ -187,7 +184,7 @@ impl AllocVar, F> for SparsePolynomialVar ) -> Result { f().and_then(|s| { let cs = cs.into(); - let sp: &SparsePolynomial = s.borrow(); + let sp: &SparsePolynomial = s.borrow(); let mut Z_var = Vec::new(); for spe in sp.Z.iter() { let spe_var = SparsePolyEntryVar::new_variable(cs.clone(), || Ok(spe), mode)?; @@ -245,7 +242,7 @@ pub struct R1CSVerificationCircuit { } impl R1CSVerificationCircuit { - pub fn new(config: &VerifierConfig) -> Self { + pub fn new>(config: &VerifierConfig) -> Self { Self { num_vars: config.num_vars, num_cons: config.num_cons, @@ -269,7 +266,7 @@ impl R1CSVerificationCircuit { } /// This section implements the sumcheck verification part of Spartan -impl ConstraintSynthesizer for R1CSVerificationCircuit { +impl ConstraintSynthesizer for R1CSVerificationCircuit { fn generate_constraints(self, cs: ConstraintSystemRef) -> ark_relations::r1cs::Result<()> { let mut transcript_var = PoseidonTranscripVar::new(cs.clone(), &self.params, Some(self.prev_challenge)); @@ -402,7 +399,12 @@ pub struct VerifierConfig { pub evals: (E::ScalarField, E::ScalarField, E::ScalarField), pub params: PoseidonConfig, pub prev_challenge: E::ScalarField, - pub claims_phase2: (E::ScalarField, E::ScalarField, E::ScalarField, E::ScalarField), + pub claims_phase2: ( + E::ScalarField, + E::ScalarField, + E::ScalarField, + E::ScalarField, + ), pub eval_vars_at_ry: E::ScalarField, pub polys_sc1: Vec>, pub polys_sc2: Vec>, diff --git a/src/lib.rs b/src/lib.rs index f65d38af..0904622e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -49,7 +49,6 @@ use core::cmp::max; use errors::{ProofVerifyError, R1CSError}; use poseidon_transcript::{PoseidonTranscript}; -use transcript::TranscriptWriter; use r1csinstance::{ R1CSCommitment, R1CSCommitmentGens, R1CSDecommitment, R1CSEvalProof, R1CSInstance, }; diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 5b039767..bccba0f7 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -8,7 +8,7 @@ use ark_serialize::CanonicalSerialize; use ark_serialize::Compress; #[derive(Clone)] /// TODO -pub struct PoseidonTranscript { +pub struct PoseidonTranscript { sponge: PoseidonSponge, params: PoseidonConfig, } diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index a9229a9c..05c55e3f 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -331,7 +331,7 @@ pub struct R1CSEvalProof { impl R1CSEvalProof { pub fn prove( - decomm: &R1CSDecommitment, + decomm: &R1CSDecommitment, rx: &[E::ScalarField], // point at which the polynomial is evaluated ry: &[E::ScalarField], evals: &(E::ScalarField, E::ScalarField, E::ScalarField), diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index 492cc47d..1c45e903 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -49,10 +49,10 @@ pub struct DerefsCommitment { comm_ops_val: PolyCommitment, } -impl Derefs { +impl Derefs { pub fn new( - row_ops_val: Vec>, - col_ops_val: Vec>, + row_ops_val: Vec>, + col_ops_val: Vec>, ) -> Self { assert_eq!(row_ops_val.len(), col_ops_val.len()); @@ -70,7 +70,7 @@ impl Derefs { derefs } - pub fn commit(&self, gens: &PolyCommitmentGens) -> DerefsCommitment { + pub fn commit(&self, gens: &PolyCommitmentGens) -> DerefsCommitment where E: Pairing{ let (comm_ops_val, _blinds) = self.comb.commit(gens); DerefsCommitment { comm_ops_val } } @@ -90,7 +90,7 @@ impl DerefsEvalProof { joint_poly: &DensePolynomial, r: &[E::ScalarField], evals: Vec, - gens: &PolyCommitmentGens, + gens: &PolyCommitmentGens, transcript: &mut PoseidonTranscript, ) -> PolyEvalProof { assert_eq!(joint_poly.get_num_vars(), r.len() + evals.len().log_2()); @@ -135,7 +135,7 @@ impl DerefsEvalProof { eval_row_ops_val_vec: &[E::ScalarField], eval_col_ops_val_vec: &[E::ScalarField], r: &[E::ScalarField], - gens: &PolyCommitmentGens, + gens: &PolyCommitmentGens, transcript: &mut PoseidonTranscript, ) -> Self { // transcript.append_protocol_name(DerefsEvalProof::protocol_name()); @@ -187,7 +187,7 @@ impl DerefsEvalProof { r: &[E::ScalarField], eval_row_ops_val_vec: &[E::ScalarField], eval_col_ops_val_vec: &[E::ScalarField], - gens: &PolyCommitmentGens, + gens: &PolyCommitmentGens, comm: &DerefsCommitment, transcript: &mut PoseidonTranscript, ) -> Result<(), ProofVerifyError> { @@ -896,7 +896,7 @@ impl HashLayerProof { ), claims_dotp: &[E::ScalarField], comm: &SparseMatPolyCommitment, - gens: &SparseMatPolyCommitmentGens, + gens: &SparseMatPolyCommitmentGens, comm_derefs: &DerefsCommitment, rx: &[E::ScalarField], ry: &[E::ScalarField], @@ -1340,7 +1340,7 @@ impl PolyEvalNetworkProof { dense: &MultiSparseMatPolynomialAsDense, derefs: &Derefs, evals: &[E::ScalarField], - gens: &SparseMatPolyCommitmentGens, + gens: &SparseMatPolyCommitmentGens, transcript: &mut PoseidonTranscript, ) -> Self { // transcript.append_protocol_name(PolyEvalNetworkProof::protocol_name()); @@ -1369,7 +1369,7 @@ impl PolyEvalNetworkProof { comm: &SparseMatPolyCommitment, comm_derefs: &DerefsCommitment, evals: &[E::ScalarField], - gens: &SparseMatPolyCommitmentGens, + gens: &SparseMatPolyCommitmentGens, rx: &[E::ScalarField], ry: &[E::ScalarField], r_mem_check: &(E::ScalarField, E::ScalarField), From 1afb8567494c510a6175484b792b5f65d5600427 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Tue, 7 Feb 2023 23:38:32 +0000 Subject: [PATCH 23/64] prallelise commitment and groth16 verification --- src/commitments.rs | 2 +- src/lib.rs | 8 ++++++-- src/macros.rs | 20 ++++++++++---------- src/nizk/mod.rs | 1 - src/poseidon_transcript.rs | 2 +- src/r1csinstance.rs | 2 +- src/r1csproof.rs | 26 ++++++++++++++------------ src/sqrt_pst.rs | 5 ++++- 8 files changed, 37 insertions(+), 29 deletions(-) diff --git a/src/commitments.rs b/src/commitments.rs index 8d21af24..b6d2e204 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -2,7 +2,7 @@ use super::group::{GroupElement, GroupElementAffine, VartimeMultiscalarMul, GROU use super::scalar::Scalar; use crate::group::CompressGroupElement; use crate::parameters::*; -use ark_ec::{AffineRepr}; +use ark_ec::AffineRepr; use std::ops::Mul; diff --git a/src/lib.rs b/src/lib.rs index 3982ceca..770f5058 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -777,14 +777,18 @@ mod tests { // Create a^2 + b + 13 A.push((0, num_vars + 2, (Scalar::one().into_bigint().to_bytes_le()))); // 1*a - B.push((0, num_vars + 2, Scalar::one().into_bigint().to_bytes_le()));// 1*a + B.push((0, num_vars + 2, Scalar::one().into_bigint().to_bytes_le())); // 1*a C.push((0, num_vars + 1, Scalar::one().into_bigint().to_bytes_le())); // 1*z C.push(( 0, num_vars, (-Scalar::from(13u64)).into_bigint().to_bytes_le(), )); // -13*1 - C.push((0, num_vars + 3, (-Scalar::one()).into_bigint().to_bytes_le())); // -1*b + C.push(( + 0, + num_vars + 3, + (-Scalar::one()).into_bigint().to_bytes_le(), + )); // -1*b // Var Assignments (Z_0 = 16 is the only output) let vars = vec![Scalar::zero().into_bigint().to_bytes_le(); num_vars]; diff --git a/src/macros.rs b/src/macros.rs index d6fc14a0..a3469c0b 100644 --- a/src/macros.rs +++ b/src/macros.rs @@ -56,17 +56,17 @@ macro_rules! par { } macro_rules! mul { - ($a:expr, $b:expr) => {{ - let mut a = $a; - a.mul_assign($b); - a - }}; + ($a:expr, $b:expr) => {{ + let mut a = $a; + a.mul_assign($b); + a + }}; } macro_rules! sub { - ($a:expr, $b:expr) => {{ - let mut a = $a; - a.sub_assign($b); - a - }}; + ($a:expr, $b:expr) => {{ + let mut a = $a; + a.sub_assign($b); + a + }}; } diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index 4c17ac33..f1284d35 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -8,7 +8,6 @@ use super::group::{CompressGroupElement, CompressedGroup, UnpackGroupElement}; use super::random::RandomTape; use super::scalar::Scalar; - use ark_serialize::*; use std::ops::Mul; diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index bcfefc88..f0cd98ac 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -7,7 +7,7 @@ use ark_crypto_primitives::sponge::{ CryptographicSponge, }; use ark_ec::pairing::Pairing; -use ark_ff::{PrimeField}; +use ark_ff::PrimeField; use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; use ark_serialize::Compress; diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index 0cd8b747..dd8b7d30 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -149,7 +149,7 @@ impl R1CSInstance { pub fn get_digest(&self) -> Vec { let mut bytes = Vec::new(); - self.serialize_with_mode(&mut bytes,Compress::Yes).unwrap(); + self.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); let mut shake = Shake256::default(); shake.input(bytes); let mut reader = shake.xof_result(); diff --git a/src/r1csproof.rs b/src/r1csproof.rs index c9be1112..a679516a 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -314,22 +314,23 @@ impl R1CSProof { let dp = start.elapsed().as_millis(); prove_outer.stop(); + let timer_verification = Timer::new("verification"); let start = Instant::now(); - let verifier_time = Timer::new("groth16_verification"); + let (v_A, v_B, v_C, v_AB) = self.claims_phase2; + let mut pubs = vec![]; pubs.extend(self.ry.clone()); pubs.extend(vec![self.eval_vars_at_ry, self.transcript_sat_state]); - let is_verified = Groth16::::verify(&vk, &pubs, &proof).unwrap(); - assert!(is_verified); - verifier_time.stop(); - let timer_verification = Timer::new("commitverification"); transcript.new_from_state(&self.transcript_sat_state); + par! { + // verifies the Groth16 proof for the spartan verifier + let is_verified = Groth16::::verify(&vk, &pubs, &proof).unwrap(), - // Verifies the proof of opening against the result of evaluating the - // witness polynomial at point ry. - let res = Polynomial::verify( + // verifies the proof of opening against the result of evaluating the + // witness polynomial at point ry + let res = Polynomial::verify( transcript, &gens.gens_pc.vk, &self.comm, @@ -338,11 +339,12 @@ impl R1CSProof { &self.proof_eval_vars_at_ry, &self.mipp_proof, &self.t, - ); - - timer_verification.stop(); - assert!(res == true); + ) + }; let dv = start.elapsed().as_millis(); + timer_verification.stop(); + + assert!(res == true && is_verified == true); Ok((ds, dp, dv)) } diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index e99cbd55..e8e59196 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -1,4 +1,7 @@ -use crate::mipp::MippProof; +use crate::{ + errors::ProofVerifyError, + mipp::{Error, MippProof}, +}; use ark_bls12_377::{Bls12_377 as I, G1Projective as G1}; use ark_ec::{pairing::Pairing, scalar_mul::variable_base::VariableBaseMSM, CurveGroup}; use ark_ff::One; From 393cf49e4bf1c5085d837b5f1dce5833b497cd6c Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Wed, 8 Feb 2023 12:28:23 +0000 Subject: [PATCH 24/64] finalise comments for mipp --- src/mipp.rs | 71 +++++++++++++++++++++++++---------------------------- 1 file changed, 34 insertions(+), 37 deletions(-) diff --git a/src/mipp.rs b/src/mipp.rs index c37b2e6c..0a61f9ad 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -72,20 +72,19 @@ impl MippProof { let (_rh_l, _rh_r) = (&h_l, &h_r); let (ra_l, ra_r) = (&a_l, &a_r); let (ry_l, ry_r) = (&y_l, &y_r); - // See section 3.3 for paper version with equivalent names - try_par! { - // MIPP part - // Compute cross commitments - // u_l = a[n':] ^ y[:n'] - // TODO to replace by bitsf_multiexp - let comm_u_l = multiexponentiation(ra_l, &ry_r), - // u_r = a[:n'] ^ y[n':] - let comm_u_r = multiexponentiation(ra_r, &ry_l) - // Compute the cross pairing products over the distinct halfs of A - }; + try_par! { + // MIPP part + // Compute cross commitments + // u_l = a[n':] ^ y[:n'] + // TODO to replace by bitsf_multiexp + let comm_u_l = multiexponentiation(ra_l, &ry_r), + // u_r = a[:n'] ^ y[n':] + let comm_u_r = multiexponentiation(ra_r, &ry_l) + }; par! { + // Compute the cross pairing products over the distinct halfs of A // t_l = a[n':] * h[:n'] let comm_t_l = pairings_product::(&a_l, h_r), // t_r = a[:n'] * h[n':] @@ -105,7 +104,7 @@ impl MippProof { // can't control bit size of c_inv let c = c_inv.inverse().unwrap(); - // Set up values for next step of recursion + // Set up values for next step of recursion by compressing as follows // a[n':] + a[:n']^x compress(&mut m_a, split, &c); // y[n':] + y[:n']^x_inv @@ -124,8 +123,8 @@ impl MippProof { let final_a = m_a[0]; let final_h = m_h[0]; - // get the structured polynomial f_h for which final_h = h^f_h(vec{t}) - // is the PST commitment given generator h and toxic waste t + // get the structured polynomial p_h for which final_h = h^p_h(vec{t}) + // is the PST commitment given generator h and toxic waste \vec{t} let poly = DenseMultilinearExtension::::from_evaluations_vec( xs_inv.len(), Self::polynomial_evaluations_from_transcript::(&xs_inv), @@ -133,7 +132,8 @@ impl MippProof { let c = MultilinearPC::::commit_g2(ck, &poly); debug_assert!(c.h_product == final_h); - // generate a proof of opening final_h at a random point + // generate a proof of opening final_h at the random point rs + // from the transcript let rs: Vec = (0..poly.num_vars) .into_iter() .map(|_| transcript.challenge_scalar::(b"random_point")) @@ -150,7 +150,7 @@ impl MippProof { }) } - // builds the polynomial f_h in Lagrange basis which uses the + // builds the polynomial p_h in Lagrange basis which uses the // inverses of transcript challenges this is the following // structured polynomial $\prod_i(1 - z_i + cs_inv[m - i - 1] * z_i)$ // where m is the length of cs_inv and z_i is the unknown @@ -158,14 +158,15 @@ impl MippProof { let m = cs_inv.len(); let pow_m = 2_usize.pow(m as u32); - // Constructs the list of evaluations over the boolean hypercube + // constructs the list of evaluations over the boolean hypercube \{0,1\}^m let evals = (0..pow_m) .into_par_iter() .map(|i| { let mut res = F::one(); for j in 0..m { - // We iterate (m - 1)th bit to 0th bit and, in case the bit is 1 - // we multiply by the corresponding challenge. + // we iterate from lsb to msb and, in case the bit is 1, + // we multiply by the corresponding challenge i.e whose + // index corresponds to the bit's position if (i >> j) & 1 == 1 { res *= cs_inv[m - j - 1]; } @@ -215,8 +216,8 @@ impl MippProof { xs.push(c); xs_inv.push(c_inv); - // the verifier computes the final_y by themselves given - // it's field operations so quite fast and parallelisation + // the verifier computes the final_y by themselves because + // this is field operations so it's quite fast and parallelisation // doesn't bring much improvement final_y *= E::ScalarField::one() + c_inv.mul(point[i]) - point[i]; } @@ -267,26 +268,31 @@ impl MippProof { let ref_final_res = &mut final_res; ref_final_res.merge(&res); - let mut point: Vec = Vec::new(); + // get the point rs from the transcript, used by the prover to generate + // the PST proof + let mut rs: Vec = Vec::new(); let m = xs_inv.len(); for _i in 0..m { let r = transcript.challenge_scalar::(b"random_point"); - point.push(r); + rs.push(r); } - // Given f_h is structured, the verifier can compute it's evaluation at - // the random point point in O(m) time by themselves and use a PST - // verification to ensure final_h is well formed. + // Given p_h is structured as defined above, the verifier can compute + // p_h(rs) by themselves in O(m) time let v = (0..m) .into_par_iter() - .map(|i| E::ScalarField::one() + point[i].mul(xs_inv[m - i - 1]) - point[i]) + .map(|i| E::ScalarField::one() + rs[i].mul(xs_inv[m - i - 1]) - rs[i]) .product(); let comm_h = CommitmentG2 { nv: m, h_product: proof.final_h, }; - let check_h = MultilinearPC::::check_2(vk, &comm_h, &point, v, &proof.pst_proof_h); + + // final_h is the commitment of p_h so the verifier can perform + // a PST verification at the random point rs, given the pst proof + // received from the prover prover + let check_h = MultilinearPC::::check_2(vk, &comm_h, &rs, v, &proof.pst_proof_h); let final_u = proof.final_a.mul(final_y); let final_t: ::TargetField = E::pairing(proof.final_a, proof.final_h).0; @@ -374,15 +380,6 @@ pub fn multiexponentiation( } pub fn pairings_product(gs: &[E::G1Affine], hs: &[E::G2Affine]) -> E::TargetField { - //let pairings: Vec<_> = gs - // .into_par_iter() - // .map(|g| ::G1Prepared::from(*g)) - // .zip( - // hs.into_par_iter() - // .map(|h| ::G2Prepared::from(*h)), - // ) - // .collect(); - E::multi_pairing(gs, hs).0 } From 55af3ad7723d3e4260fe022bff026dd19e2f8ff8 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Wed, 8 Feb 2023 14:53:43 +0100 Subject: [PATCH 25/64] wip --- src/commitments.rs | 35 ++++++++++++++++++++++++----------- src/dense_mlpoly.rs | 37 +++++++++++++++++-------------------- src/lib.rs | 21 +++++++++++++++------ src/nizk/bullet.rs | 41 +++++++++++++++++++++-------------------- src/nizk/mod.rs | 39 +++++++++++++-------------------------- 5 files changed, 90 insertions(+), 83 deletions(-) diff --git a/src/commitments.rs b/src/commitments.rs index ed21149b..1de774d9 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -3,6 +3,7 @@ use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_crypto_primitives::sponge::CryptographicSponge; +use std::ops::Mul; #[derive(Debug, Clone)] pub struct MultiCommitGens { @@ -16,14 +17,16 @@ impl MultiCommitGens { let params = poseidon_params(); let mut sponge = PoseidonSponge::new(¶ms); sponge.absorb(&label); - sponge.absorb(&G::generator().0); + let mut b = Vec::new(); + G::generator().serialize_compressed(&mut b).unwrap(); + sponge.absorb(&b); let gens = (0..=n) .map(|i| { - let mut el_aff: Option = None; + let mut el_aff: Option = None; while el_aff.is_none() { let uniform_bytes = sponge.squeeze_bytes(64); - el_aff = G::from_random_bytes(&uniform_bytes); + el_aff = G::Affine::from_random_bytes(&uniform_bytes); } el_aff.unwrap().clear_cofactor() }) @@ -62,14 +65,24 @@ impl MultiCommitGens { } } -// TODO replace that by arkworks CommitmentScheme probably exists -pub trait Commitments { - fn commit(&self, blind: &G::ScalarField, gens_n: &MultiCommitGens) -> G; -} +pub struct PedersenCommit; -impl Commitments for G::ScalarField { - fn commit(&self, blind: &G::ScalarField, gens_n: &MultiCommitGens) -> G { +impl PedersenCommit { + pub fn commit_scalar( + scalar: &G::ScalarField, + blind: &G::ScalarField, + gens_n: &MultiCommitGens, + ) -> G { assert_eq!(gens_n.n, 1); - ::msm(&[*self, *blind], &[gens_n.G[0], gens_n.h]) + ::msm_unchecked(&[gens_n.G[0], gens_n.h], &[*scalar, *blind]) + } + + pub fn commit_slice( + scalars: &[G::ScalarField], + blind: &G::ScalarField, + gens_n: &MultiCommitGens, + ) -> G { + assert_eq!(scalars.len(), gens_n.n); + ::msm_unchecked(&gens_n.G, scalars) + gens_n.h.mul(blind) } -} \ No newline at end of file +} diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 68c1a105..abeec38f 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -9,11 +9,9 @@ use super::math::Math; use super::nizk::{DotProductProofGens, DotProductProofLog}; use ark_ec::scalar_mul::variable_base::VariableBaseMSM; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ff::{One, PrimeField, UniformRand, Zero}; -use ark_poly::{MultilinearExtension}; -use ark_poly_commit::multilinear_pc::data_structures::{ - CommitterKey, VerifierKey, -}; +use ark_ff::{One, PrimeField, Zero}; +use ark_poly::MultilinearExtension; +use ark_poly_commit::multilinear_pc::data_structures::{CommitterKey, VerifierKey}; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::*; use core::ops::Index; @@ -188,7 +186,7 @@ impl PolyCommitmentGens { // num vars is the number of variables in the multilinear polynomial // this gives the maximum degree bound pub fn new(num_vars: usize, label: &'static [u8]) -> PolyCommitmentGens { - let (_left, right) = EqPolynomial::compute_factored_lens(num_vars); + let (_left, right) = EqPolynomial::::compute_factored_lens(num_vars); let gens = DotProductProofGens::new(right.pow2(), label); // Generates the SRS and trims it based on the number of variables in the @@ -258,7 +256,7 @@ impl EqPolynomial { pub fn compute_factored_evals(&self) -> (Vec, Vec) { let ell = self.r.len(); - let (left_num_vars, _right_num_vars) = EqPolynomial::compute_factored_lens(ell); + let (left_num_vars, _right_num_vars) = EqPolynomial::::compute_factored_lens(ell); let L = EqPolynomial::new(self.r[..left_num_vars].to_vec()).evals(); let R = EqPolynomial::new(self.r[left_num_vars..ell].to_vec()).evals(); @@ -324,11 +322,7 @@ impl DensePolynomial { assert_eq!(L_size * R_size, self.Z.len()); let C = (0..L_size) .into_par_iter() - .map(|i| { - self.Z[R_size * i..R_size * (i + 1)] - .commit(&blinds[i], gens) - .compress() - }) + .map(|i| self.Z[R_size * i..R_size * (i + 1)].commit(&blinds[i], gens)) .collect(); PolyCommitment { C } } @@ -361,7 +355,8 @@ impl DensePolynomial { let n = self.Z.len(); let ell = self.get_num_vars(); assert_eq!(n, ell.pow2()); - let (left_num_vars, right_num_vars) = EqPolynomial::compute_factored_lens(ell); + let (left_num_vars, right_num_vars) = + EqPolynomial::::compute_factored_lens(ell); let L_size = left_num_vars.pow2(); let R_size = right_num_vars.pow2(); assert_eq!(L_size * R_size, n); @@ -376,7 +371,8 @@ impl DensePolynomial { } pub fn bound(&self, L: &[F]) -> Vec { - let (left_num_vars, right_num_vars) = EqPolynomial::compute_factored_lens(self.get_num_vars()); + let (left_num_vars, right_num_vars) = + EqPolynomial::::compute_factored_lens(self.get_num_vars()); let L_size = left_num_vars.pow2(); let R_size = right_num_vars.pow2(); (0..R_size) @@ -408,7 +404,7 @@ impl DensePolynomial { assert_eq!(r.len(), self.get_num_vars()); let chis = EqPolynomial::new(r.to_vec()).evals(); assert_eq!(chis.len(), self.Z.len()); - DotProductProofLog::compute_dotproduct(&self.Z, &chis) + crate::dot_product(&self.Z, &chis) } fn vec(&self) -> &Vec { @@ -462,7 +458,7 @@ impl Index for DensePolynomial { impl TranscriptWriter for PolyCommitment { fn write_to_transcript(&self, transcript: &mut impl Transcript) { for i in 0..self.C.len() { - transcript.append_point(&self.C[i]); + transcript.append(b"", &self.C[i]); } } } @@ -494,7 +490,8 @@ where // assert vectors are of the right size assert_eq!(poly.get_num_vars(), r.len()); - let (left_num_vars, right_num_vars) = EqPolynomial::compute_factored_lens(r.len()); + let (left_num_vars, right_num_vars) = + EqPolynomial::::compute_factored_lens(r.len()); let L_size = left_num_vars.pow2(); let R_size = right_num_vars.pow2(); @@ -603,7 +600,7 @@ mod tests { .collect::>(); // compute dot product between LZ and R - DotProductProofLog::compute_dotproduct(&LZ, &R) + crate::dot_product(&LZ, &R) } #[test] @@ -746,11 +743,11 @@ mod tests { assert_eq!(eval, F::from(28)); let gens = PolyCommitmentGens::new(poly.get_num_vars(), b"test-two"); - let (poly_commitment, blinds) = poly.commit(&gens, None); + let (poly_commitment, blinds) = poly.commit(&gens); let params = poseidon_params(); let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, C_Zr) = PolyEvalProof::prove( + let (proof, C_Zr) = PolyEvalProof::::prove( &poly, Some(&blinds), &r, diff --git a/src/lib.rs b/src/lib.rs index 0904622e..9f98214e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -47,8 +47,9 @@ use ark_serialize::*; use ark_std::Zero; use core::cmp::max; use errors::{ProofVerifyError, R1CSError}; +use transcript::TranscriptWriter; -use poseidon_transcript::{PoseidonTranscript}; +use poseidon_transcript::PoseidonTranscript; use r1csinstance::{ R1CSCommitment, R1CSCommitmentGens, R1CSDecommitment, R1CSEvalProof, R1CSInstance, }; @@ -369,7 +370,7 @@ impl SNARK { let timer_prove = Timer::new("SNARK::prove"); // transcript.append_protocol_name(SNARK::protocol_name()); - comm.comm.append_to_poseidon(transcript); + comm.comm.write_to_transcript(transcript); let (r1cs_sat_proof, rx, ry) = { let (proof, rx, ry) = { @@ -414,9 +415,9 @@ impl SNARK { let timer_eval = Timer::new("eval_sparse_polys"); let inst_evals = { let (Ar, Br, Cr) = inst.inst.evaluate(&rx, &ry); - transcript.append_scalar(&Ar); - transcript.append_scalar(&Br); - transcript.append_scalar(&Cr); + transcript.appedn(&Ar); + transcript.append(&Br); + transcript.append(&Cr); (Ar, Br, Cr) }; timer_eval.stop(); @@ -592,7 +593,7 @@ impl NIZK { ) -> Result { let timer_verify = Timer::new("NIZK::verify"); - transcript.append_bytes(&inst.digest); + transcript.append(&inst.digest); // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check @@ -664,6 +665,14 @@ impl NIZK { } } +pub(crate) fn dot_product(a: &[F], b: &[F]) -> F { + let mut res = F::zero(); + for i in 0..a.len() { + res += &a[i] * &b[i]; + } + res +} + #[cfg(test)] mod tests { use crate::parameters::poseidon_params; diff --git a/src/nizk/bullet.rs b/src/nizk/bullet.rs index 29742b60..47570e3b 100644 --- a/src/nizk/bullet.rs +++ b/src/nizk/bullet.rs @@ -3,10 +3,10 @@ #![allow(non_snake_case)] #![allow(clippy::type_complexity)] #![allow(clippy::too_many_arguments)] +use super::super::errors::ProofVerifyError; use crate::math::Math; use crate::poseidon_transcript::PoseidonTranscript; - -use super::super::errors::ProofVerifyError; +use crate::transcript::Transcript; use ark_ec::CurveGroup; use ark_ff::Field; use ark_serialize::*; @@ -81,14 +81,15 @@ impl BulletReductionProof { let c_R = inner_product(a_R, b_L); let (blind_L, blind_R) = blinds_iter.next().unwrap(); + let gright_vec = G_R + .iter() + .chain(iter::once(Q)) + .chain(iter::once(H)) + .cloned() + .collect::>(); - let L = G::msm( - G_R - .iter() - .chain(iter::once(Q)) - .chain(iter::once(H)) - .map(G::Affine::from) - .collect::>(), + let L = G::msm_unchecked( + &G::normalize_batch(&gright_vec), a_L .iter() .chain(iter::once(&c_L)) @@ -97,14 +98,14 @@ impl BulletReductionProof { .collect::>() .as_slice(), ); - - let R = G::msm( - G_L - .iter() - .chain(iter::once(Q)) - .chain(iter::once(H)) - .map(G::Affine::from) - .collect::>(), + let gl_vec = G_L + .iter() + .chain(iter::once(Q)) + .chain(iter::once(H)) + .cloned() + .collect::>(); + let R = G::msm_unchecked( + &G::normalize_batch(&gl_vec), a_R .iter() .chain(iter::once(&c_R)) @@ -114,10 +115,10 @@ impl BulletReductionProof { .as_slice(), ); - transcript.append_point(&L.compress()); - transcript.append_point(&R.compress()); + transcript.append(b"", &L); + transcript.append(b"", &R); - let u = transcript.challenge_scalar(); + let u = transcript.challenge_scalar(b""); let u_inv = u.inverse().unwrap(); for i in 0..n { diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index c0559f1f..e753ea11 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -1,9 +1,10 @@ #![allow(clippy::too_many_arguments)] +use super::commitments::{MultiCommitGens, PedersenCommit}; +use super::errors::ProofVerifyError; +use crate::ark_std::UniformRand; use crate::math::Math; use crate::poseidon_transcript::PoseidonTranscript; - -use super::commitments::{Commitments, MultiCommitGens}; -use super::errors::ProofVerifyError; +use crate::transcript::Transcript; use ark_ec::CurveGroup; use ark_serialize::*; @@ -40,11 +41,6 @@ impl DotProductProofLog { b"dot product proof (log)" } - pub fn compute_dotproduct(a: &[G], b: &[G]) -> G { - assert_eq!(a.len(), b.len()); - (0..a.len()).map(|i| a[i] * b[i]).sum() - } - pub fn prove( gens: &DotProductProofGens, transcript: &mut PoseidonTranscript, @@ -75,16 +71,16 @@ impl DotProductProofLog { .collect::>(); }; - let Cx = x_vec.commit(blind_x, &gens.gens_n).compress(); - transcript.append_point(&Cx); + let Cx = PedersenCommit::commit_slice(x_vec, blind_x, &gens.gens_n); + transcript.append(b"", &Cx); - let Cy = y.commit(blind_y, &gens.gens_1).compress(); - transcript.append_point(&Cy); - transcript.append_scalar_vector(a_vec); + let Cy = PedersenCommit::commit_scalar(y, blind_y, &gens.gens_1); + transcript.append(b"", &Cy); + transcript.append(b"", &a_vec); let blind_Gamma = (*blind_x) + blind_y; let (bullet_reduction_proof, _Gamma_hat, x_hat, a_hat, g_hat, rhat_Gamma) = - BulletReductionProof::prove( + BulletReductionProof::::prove( transcript, &gens.gens_1.G[0], &gens.gens_n.G, @@ -170,8 +166,7 @@ impl DotProductProofLog { let z2_s = &self.z2; let lhs = (Gamma_hat.mul(c_s) + beta_s).mul(a_hat_s) + delta_s; - let rhs = - (g_hat + gens.gens_1.G[0].mul(a_hat_s)).mul(z1_s) + gens.gens_1.h.mul(z2_s); + let rhs = (g_hat + gens.gens_1.G[0].mul(a_hat_s)).mul(z1_s) + gens.gens_1.h.mul(z2_s); assert_eq!(lhs, rhs); @@ -191,7 +186,6 @@ mod tests { use super::*; use ark_std::UniformRand; type F = ark_bls12_377::Fr; - #[test] fn check_dotproductproof_log() { @@ -210,15 +204,8 @@ mod tests { let params = poseidon_params(); let mut prover_transcript = PoseidonTranscript::new(¶ms); - let (proof, Cx, Cy) = DotProductProofLog::prove( - &gens, - &mut prover_transcript, - &x, - &r_x, - &a, - &y, - &r_y, - ); + let (proof, Cx, Cy) = + DotProductProofLog::prove(&gens, &mut prover_transcript, &x, &r_x, &a, &y, &r_y); let mut verifier_transcript = PoseidonTranscript::new(¶ms); assert!(proof From 79feba13167a6436f0f1ef208b023619378e7f98 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Wed, 8 Feb 2023 12:28:23 +0000 Subject: [PATCH 26/64] finalise comments --- src/mipp.rs | 71 +++++++++++++++++++++++-------------------------- src/sqrt_pst.rs | 39 ++++++++++++++++----------- 2 files changed, 58 insertions(+), 52 deletions(-) diff --git a/src/mipp.rs b/src/mipp.rs index c37b2e6c..0a61f9ad 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -72,20 +72,19 @@ impl MippProof { let (_rh_l, _rh_r) = (&h_l, &h_r); let (ra_l, ra_r) = (&a_l, &a_r); let (ry_l, ry_r) = (&y_l, &y_r); - // See section 3.3 for paper version with equivalent names - try_par! { - // MIPP part - // Compute cross commitments - // u_l = a[n':] ^ y[:n'] - // TODO to replace by bitsf_multiexp - let comm_u_l = multiexponentiation(ra_l, &ry_r), - // u_r = a[:n'] ^ y[n':] - let comm_u_r = multiexponentiation(ra_r, &ry_l) - // Compute the cross pairing products over the distinct halfs of A - }; + try_par! { + // MIPP part + // Compute cross commitments + // u_l = a[n':] ^ y[:n'] + // TODO to replace by bitsf_multiexp + let comm_u_l = multiexponentiation(ra_l, &ry_r), + // u_r = a[:n'] ^ y[n':] + let comm_u_r = multiexponentiation(ra_r, &ry_l) + }; par! { + // Compute the cross pairing products over the distinct halfs of A // t_l = a[n':] * h[:n'] let comm_t_l = pairings_product::(&a_l, h_r), // t_r = a[:n'] * h[n':] @@ -105,7 +104,7 @@ impl MippProof { // can't control bit size of c_inv let c = c_inv.inverse().unwrap(); - // Set up values for next step of recursion + // Set up values for next step of recursion by compressing as follows // a[n':] + a[:n']^x compress(&mut m_a, split, &c); // y[n':] + y[:n']^x_inv @@ -124,8 +123,8 @@ impl MippProof { let final_a = m_a[0]; let final_h = m_h[0]; - // get the structured polynomial f_h for which final_h = h^f_h(vec{t}) - // is the PST commitment given generator h and toxic waste t + // get the structured polynomial p_h for which final_h = h^p_h(vec{t}) + // is the PST commitment given generator h and toxic waste \vec{t} let poly = DenseMultilinearExtension::::from_evaluations_vec( xs_inv.len(), Self::polynomial_evaluations_from_transcript::(&xs_inv), @@ -133,7 +132,8 @@ impl MippProof { let c = MultilinearPC::::commit_g2(ck, &poly); debug_assert!(c.h_product == final_h); - // generate a proof of opening final_h at a random point + // generate a proof of opening final_h at the random point rs + // from the transcript let rs: Vec = (0..poly.num_vars) .into_iter() .map(|_| transcript.challenge_scalar::(b"random_point")) @@ -150,7 +150,7 @@ impl MippProof { }) } - // builds the polynomial f_h in Lagrange basis which uses the + // builds the polynomial p_h in Lagrange basis which uses the // inverses of transcript challenges this is the following // structured polynomial $\prod_i(1 - z_i + cs_inv[m - i - 1] * z_i)$ // where m is the length of cs_inv and z_i is the unknown @@ -158,14 +158,15 @@ impl MippProof { let m = cs_inv.len(); let pow_m = 2_usize.pow(m as u32); - // Constructs the list of evaluations over the boolean hypercube + // constructs the list of evaluations over the boolean hypercube \{0,1\}^m let evals = (0..pow_m) .into_par_iter() .map(|i| { let mut res = F::one(); for j in 0..m { - // We iterate (m - 1)th bit to 0th bit and, in case the bit is 1 - // we multiply by the corresponding challenge. + // we iterate from lsb to msb and, in case the bit is 1, + // we multiply by the corresponding challenge i.e whose + // index corresponds to the bit's position if (i >> j) & 1 == 1 { res *= cs_inv[m - j - 1]; } @@ -215,8 +216,8 @@ impl MippProof { xs.push(c); xs_inv.push(c_inv); - // the verifier computes the final_y by themselves given - // it's field operations so quite fast and parallelisation + // the verifier computes the final_y by themselves because + // this is field operations so it's quite fast and parallelisation // doesn't bring much improvement final_y *= E::ScalarField::one() + c_inv.mul(point[i]) - point[i]; } @@ -267,26 +268,31 @@ impl MippProof { let ref_final_res = &mut final_res; ref_final_res.merge(&res); - let mut point: Vec = Vec::new(); + // get the point rs from the transcript, used by the prover to generate + // the PST proof + let mut rs: Vec = Vec::new(); let m = xs_inv.len(); for _i in 0..m { let r = transcript.challenge_scalar::(b"random_point"); - point.push(r); + rs.push(r); } - // Given f_h is structured, the verifier can compute it's evaluation at - // the random point point in O(m) time by themselves and use a PST - // verification to ensure final_h is well formed. + // Given p_h is structured as defined above, the verifier can compute + // p_h(rs) by themselves in O(m) time let v = (0..m) .into_par_iter() - .map(|i| E::ScalarField::one() + point[i].mul(xs_inv[m - i - 1]) - point[i]) + .map(|i| E::ScalarField::one() + rs[i].mul(xs_inv[m - i - 1]) - rs[i]) .product(); let comm_h = CommitmentG2 { nv: m, h_product: proof.final_h, }; - let check_h = MultilinearPC::::check_2(vk, &comm_h, &point, v, &proof.pst_proof_h); + + // final_h is the commitment of p_h so the verifier can perform + // a PST verification at the random point rs, given the pst proof + // received from the prover prover + let check_h = MultilinearPC::::check_2(vk, &comm_h, &rs, v, &proof.pst_proof_h); let final_u = proof.final_a.mul(final_y); let final_t: ::TargetField = E::pairing(proof.final_a, proof.final_h).0; @@ -374,15 +380,6 @@ pub fn multiexponentiation( } pub fn pairings_product(gs: &[E::G1Affine], hs: &[E::G2Affine]) -> E::TargetField { - //let pairings: Vec<_> = gs - // .into_par_iter() - // .map(|g| ::G1Prepared::from(*g)) - // .zip( - // hs.into_par_iter() - // .map(|h| ::G2Prepared::from(*h)), - // ) - // .collect(); - E::multi_pairing(gs, hs).0 } diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index e99cbd55..6b8f698e 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -24,9 +24,9 @@ pub struct Polynomial { impl Polynomial { // Given the evaluations over the boolean hypercube of a polynomial p of size // 2*m compute the sqrt-sized polynomials p_i as - // p_i(Y) = \sum_{j \in \{0,1\}^m} p(j, i) * chi_j(Y) - // where p(X,Y) = \sum_{i \in \{0,\1}^m} chi_i(X) * p_i(Y) - // + // p_i(X) = \sum_{j \in \{0,1\}^m} p(j, i) * chi_j(X) + // where p(X,Y) = \sum_{i \in \{0,\1}^m} + // (\sum_{j \in \{0, 1\}^{m}} p(j, i) * \chi_j(X)) * \chi_i(Y) // TODO: add case when the length of the list is not an even power of 2 pub fn from_evaluations(Z: &[Scalar]) -> Self { let pl_timer = Timer::new("poly_list_build"); @@ -40,7 +40,7 @@ impl Polynomial { let z: Vec = (0..pow_m) .into_par_iter() // viewing the list of evaluation as a square matrix - // we select by row i and column j + // we select by row j and column i .map(|j| Z[(j << m) | i]) .collect(); DensePolynomial::new(z) @@ -57,9 +57,10 @@ impl Polynomial { } // Given point = (\vec{a}, \vec{b}), compute the polynomial q as - // q(Y) = - // \sum_{j \in \{0,1\}^m}(\sum_{i \in \{0,1\}^m} p(j,i) * chi_i(b)) * chi_j(Y) - // and p(a,b) = q(b) where p is the initial polynomial + // q(X) = + // \sum_{j \in \{0,1\}^m}chi_j(X) * + // (\sum_{i \in \{0,1\}^m} p(j,i) * chi_i(\vec{b})) + // and p(\vec{a},\vec{b}) = q(\vec{b}) where p is the initial polynomial fn get_q(&mut self, point: &[Scalar]) { let q_timer = Timer::new("build_q"); debug_assert!(point.len() == 2 * self.m); @@ -83,7 +84,7 @@ impl Polynomial { } // Given point = (\vec{a}, \vec{b}) used to construct q - // compute q(b) = p(a,b). + // compute q(a) = p(a,b). pub fn eval(&mut self, point: &[Scalar]) -> Scalar { let a = &point[0..point.len() / 2]; let _b = &point[point.len() / 2..point.len()]; @@ -140,7 +141,8 @@ impl Polynomial { let mut prod = Scalar::one(); for j in 0..m { let b_j = b[j]; - // iterate from msb to lsb of i to build chi_i as defined above + // iterate from first (msb) to last (lsb) bit of i + // to build chi_i using the formula above if i >> (m - j - 1) & 1 == 1 { prod = prod * b_j; } else { @@ -169,8 +171,8 @@ impl Polynomial { let timer_open = Timer::new("sqrt_open"); // Compute the PST commitment to q obtained as the inner products of the - // commitments to the polynomials p_i and chi_i(a) for i ranging over the - // boolean hypercube of size m. + // commitments to the polynomials p_i and chi_i(\vec{b}) for i ranging over + // the boolean hypercube of size m. let _m = a.len(); let timer_msm = Timer::new("msm"); if self.chis_b.is_none() { @@ -180,7 +182,7 @@ impl Polynomial { let chis = self.chis_b.clone().unwrap(); assert!(chis.len() == comm_list.len()); - let a_vec: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); + let comms: Vec<_> = comm_list.par_iter().map(|c| c.g_product).collect(); let c_u = ::msm_unchecked(a_vec.as_slice(), chis.as_slice()).into_affine(); @@ -200,7 +202,7 @@ impl Polynomial { let mipp_proof = MippProof::::prove::( transcript, ck, - a_vec, + comms, chis.to_vec(), h_vec, &c_u, @@ -210,6 +212,10 @@ impl Polynomial { timer_mipp_proof.stop(); let timer_proof = Timer::new("pst_open"); + + // reversing a is necessary because the sumcheck code in spartan generates + // the point in reverse order compared to how the polynomial commitment + // expects let mut a_rev = a.to_vec().clone(); a_rev.reverse(); @@ -248,11 +254,14 @@ impl Polynomial { assert!(res_mipp == true); timer_mipp_verify.stop(); + // reversing a is necessary because the sumcheck code in spartan generates + // the point in reverse order compared to how the polynomial commitment + // expects let mut a_rev = a.to_vec().clone(); a_rev.reverse(); - let timer_pst_verify = Timer::new("pst_verify"); - // verify that q(a) is indeed v + let timer_pst_verify = Timer::new("pst_verify"); + // PST proof that q(a) is indeed equal to value claimed by the prover let res = MultilinearPC::::check(vk, U, &a_rev, v, pst_proof); timer_pst_verify.stop(); res From 0e071e0427742434c996b2b3daa608e40c7451e1 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Wed, 8 Feb 2023 15:59:11 +0100 Subject: [PATCH 27/64] wip --- src/commitments.rs | 2 +- src/dense_mlpoly.rs | 10 +++--- src/lib.rs | 40 +++++++++++------------ src/nizk/bullet.rs | 66 +++++++++++++++++++------------------- src/nizk/mod.rs | 37 ++++++++++----------- src/poseidon_transcript.rs | 4 +-- src/product_tree.rs | 45 +++++++++++++------------- src/r1csinstance.rs | 13 ++++---- src/r1csproof.rs | 30 +++++++++-------- 9 files changed, 125 insertions(+), 122 deletions(-) diff --git a/src/commitments.rs b/src/commitments.rs index 1de774d9..4acc20e4 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -85,4 +85,4 @@ impl PedersenCommit { assert_eq!(scalars.len(), gens_n.n); ::msm_unchecked(&gens_n.G, scalars) + gens_n.h.mul(blind) } -} +} \ No newline at end of file diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index abeec38f..3989224c 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -3,7 +3,7 @@ use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::{Transcript, TranscriptWriter}; -use super::commitments::{Commitments, MultiCommitGens}; +use super::commitments::{MultiCommitGens, PedersenCommit}; use super::errors::ProofVerifyError; use super::math::Math; use super::nizk::{DotProductProofGens, DotProductProofLog}; @@ -322,7 +322,9 @@ impl DensePolynomial { assert_eq!(L_size * R_size, self.Z.len()); let C = (0..L_size) .into_par_iter() - .map(|i| self.Z[R_size * i..R_size * (i + 1)].commit(&blinds[i], gens)) + .map(|i| { + PedersenCommit::commit_slice(&self.Z[R_size * i..R_size * (i + 1)], &blinds[i], gens) + }) .collect(); PolyCommitment { C } } @@ -563,9 +565,7 @@ where comm: &PolyCommitment, ) -> Result<(), ProofVerifyError> { // compute a commitment to Zr with a blind of zero - let C_Zr = Zr - .commit(&E::ScalarField::zero(), &gens.gens.gens_1) - .compress(); + let C_Zr = PedersenCommit::commit_scalar(Zr, &E::ScalarField::zero(), &gens.gens.gens_1); self.verify(gens, transcript, r, &C_Zr, comm) } diff --git a/src/lib.rs b/src/lib.rs index 9f98214e..6936d082 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -41,12 +41,10 @@ pub mod parameters; mod constraints; pub mod poseidon_transcript; -use ark_ff::Field; - use ark_serialize::*; -use ark_std::Zero; use core::cmp::max; use errors::{ProofVerifyError, R1CSError}; +use transcript::Transcript; use transcript::TranscriptWriter; use poseidon_transcript::PoseidonTranscript; @@ -415,9 +413,9 @@ impl SNARK { let timer_eval = Timer::new("eval_sparse_polys"); let inst_evals = { let (Ar, Br, Cr) = inst.inst.evaluate(&rx, &ry); - transcript.appedn(&Ar); - transcript.append(&Br); - transcript.append(&Cr); + transcript.append(b"", &Ar); + transcript.append(b"", &Br); + transcript.append(b"", &Cr); (Ar, Br, Cr) }; timer_eval.stop(); @@ -462,7 +460,7 @@ impl SNARK { // transcript.append_protocol_name(SNARK::protocol_name()); // append a commitment to the computation to the transcript - comm.comm.append_to_poseidon(transcript); + comm.comm.write_to_transcript(transcript); let timer_sat_proof = Timer::new("verify_sat_proof"); assert_eq!(input.assignment.len(), comm.comm.get_num_inputs()); @@ -485,9 +483,9 @@ impl SNARK { // transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); let (Ar, Br, Cr) = &self.inst_evals; - transcript.append_scalar(&Ar); - transcript.append_scalar(&Br); - transcript.append_scalar(&Cr); + transcript.append(b"", Ar); + transcript.append(b"", Br); + transcript.append(b"", Cr); self.r1cs_eval_proof.verify( &comm.comm, @@ -547,7 +545,7 @@ impl NIZK { ) -> Self { let timer_prove = Timer::new("NIZK::prove"); // transcript.append_protocol_name(NIZK::protocol_name()); - transcript.append_bytes(&inst.digest); + transcript.append(b"", &inst.digest); let (r1cs_sat_proof, rx, ry) = { // we might need to pad variables @@ -593,7 +591,7 @@ impl NIZK { ) -> Result { let timer_verify = Timer::new("NIZK::verify"); - transcript.append(&inst.digest); + transcript.append(b"", &inst.digest); // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check @@ -634,7 +632,7 @@ impl NIZK { let timer_verify = Timer::new("NIZK::verify"); // transcript.append_protocol_name(NIZK::protocol_name()); - transcript.append_bytes(&inst.digest); + transcript.append(b"", &inst.digest); // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check @@ -665,10 +663,11 @@ impl NIZK { } } +#[inline] pub(crate) fn dot_product(a: &[F], b: &[F]) -> F { let mut res = F::zero(); for i in 0..a.len() { - res += &a[i] * &b[i]; + res += a[i] * &b[i]; } res } @@ -678,6 +677,7 @@ mod tests { use crate::parameters::poseidon_params; use super::*; + use crate::ark_std::Zero; use ark_ff::{BigInteger, One, PrimeField}; type F = ark_bls12_377::Fr; type E = ark_bls12_377::Bls12_377; @@ -689,7 +689,7 @@ mod tests { let num_inputs = 10; // produce public generators - let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_cons); + let gens = SNARKGens::::new(num_cons, num_vars, num_inputs, num_cons); // produce a synthetic R1CSInstance let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); @@ -733,7 +733,7 @@ mod tests { let B = vec![(100, 1, zero.to_vec())]; let C = vec![(1, 1, zero.to_vec())]; - let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C); + let inst = Instance::::new(num_cons, num_vars, num_inputs, &A, &B, &C); assert!(inst.is_err()); assert_eq!(inst.err(), Some(R1CSError::InvalidIndex)); } @@ -758,7 +758,7 @@ mod tests { let B = vec![(1, 1, larger_than_mod.to_vec())]; let C = vec![(1, 1, zero.to_vec())]; - let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C); + let inst = Instance::::new(num_cons, num_vars, num_inputs, &A, &B, &C); assert!(inst.is_err()); assert_eq!(inst.err(), Some(R1CSError::InvalidScalar)); } @@ -793,7 +793,7 @@ mod tests { inputs[1] = F::from(1u64).into_bigint().to_bytes_le(); inputs[2] = F::from(2u64).into_bigint().to_bytes_le(); - let assignment_inputs = InputsAssignment::new(&inputs).unwrap(); + let assignment_inputs = InputsAssignment::::new(&inputs).unwrap(); let assignment_vars = VarsAssignment::new(&vars).unwrap(); // Check if instance is satisfiable @@ -802,7 +802,7 @@ mod tests { assert!(res.unwrap(), "should be satisfied"); // SNARK public params - let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_non_zero_entries); + let gens = SNARKGens::::new(num_cons, num_vars, num_inputs, num_non_zero_entries); // create a commitment to the R1CS instance let (comm, decomm) = SNARK::encode(&inst, &gens); @@ -828,7 +828,7 @@ mod tests { .is_ok()); // NIZK public params - let gens = NIZKGens::new(num_cons, num_vars, num_inputs); + let gens = NIZKGens::::new(num_cons, num_vars, num_inputs); let params = poseidon_params(); diff --git a/src/nizk/bullet.rs b/src/nizk/bullet.rs index 47570e3b..5c8857c1 100644 --- a/src/nizk/bullet.rs +++ b/src/nizk/bullet.rs @@ -4,6 +4,7 @@ #![allow(clippy::type_complexity)] #![allow(clippy::too_many_arguments)] use super::super::errors::ProofVerifyError; +use ark_ec::AffineRepr; use crate::math::Math; use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::Transcript; @@ -12,6 +13,7 @@ use ark_ff::Field; use ark_serialize::*; use ark_std::{One, Zero}; use core::iter; +use std::ops::Mul; use std::ops::MulAssign; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] @@ -33,9 +35,9 @@ impl BulletReductionProof { /// either 0 or a power of 2. pub fn prove( transcript: &mut PoseidonTranscript, - Q: &G, - G_vec: &[G], - H: &G, + Q: &G::Affine, + G_vec: &[G::Affine], + H: &G::Affine, a_vec: &[G::ScalarField], b_vec: &[G::ScalarField], blind: &G::ScalarField, @@ -73,8 +75,8 @@ impl BulletReductionProof { while n != 1 { n /= 2; - let (a_L, a_R) = a.split_at_mut(n); - let (b_L, b_R) = b.split_at_mut(n); + let (mut a_L, mut a_R) = a.split_at_mut(n); + let (mut b_L, mut b_R) = b.split_at_mut(n); let (G_L, G_R) = G.split_at_mut(n); let c_L = inner_product(a_L, b_R); @@ -86,10 +88,10 @@ impl BulletReductionProof { .chain(iter::once(Q)) .chain(iter::once(H)) .cloned() - .collect::>(); + .collect::>(); let L = G::msm_unchecked( - &G::normalize_batch(&gright_vec), + &gright_vec, a_L .iter() .chain(iter::once(&c_L)) @@ -103,9 +105,9 @@ impl BulletReductionProof { .chain(iter::once(Q)) .chain(iter::once(H)) .cloned() - .collect::>(); + .collect::>(); let R = G::msm_unchecked( - &G::normalize_batch(&gl_vec), + &gl_vec, a_R .iter() .chain(iter::once(&c_R)) @@ -118,33 +120,33 @@ impl BulletReductionProof { transcript.append(b"", &L); transcript.append(b"", &R); - let u = transcript.challenge_scalar(b""); + let u: G::ScalarField = transcript.challenge_scalar(b""); let u_inv = u.inverse().unwrap(); for i in 0..n { a_L[i] = a_L[i] * u + u_inv * a_R[i]; b_L[i] = b_L[i] * u_inv + u * b_R[i]; - G_L[i] = G::msm(&[G_L[i], G_R[i]], &[u_inv, u]); + G_L[i] = (G_L[i].mul(u_inv) + G_R[i].mul(u)).into_affine(); } blind_fin = blind_fin + u * u * blind_L + u_inv * u_inv * blind_R; - L_vec.push(L.compress()); - R_vec.push(R.compress()); + L_vec.push(L); + R_vec.push(R); a = a_L; b = b_L; G = G_L; } - let Gamma_hat = G::msm(&[G[0], *Q, *H], &[a[0], a[0] * b[0], blind_fin]); + let Gamma_hat = G::msm_unchecked(&[G[0], *Q, *H], &[a[0], a[0] * b[0], blind_fin]); ( BulletReductionProof { L_vec, R_vec }, Gamma_hat, a[0], b[0], - G[0], + G[0].into_group(), blind_fin, ) } @@ -177,9 +179,9 @@ impl BulletReductionProof { // 1. Recompute x_k,...,x_1 based on the proof transcript let mut challenges = Vec::with_capacity(lg_n); for (L, R) in self.L_vec.iter().zip(self.R_vec.iter()) { - transcript.append_point(L); - transcript.append_point(R); - challenges.push(transcript.challenge_scalar()); + transcript.append(b"", L); + transcript.append(b"", R); + challenges.push(transcript.challenge_scalar(b"")); } // 2. Compute 1/(u_k...u_1) and 1/u_k, ..., 1/u_1 @@ -225,28 +227,25 @@ impl BulletReductionProof { a: &[G::ScalarField], transcript: &mut PoseidonTranscript, Gamma: &G, - G: &[G], + Gs: &[G::Affine], ) -> Result<(G, G, G::ScalarField), ProofVerifyError> { let (u_sq, u_inv_sq, s) = self.verification_scalars(n, transcript)?; - let Ls = self - .L_vec - .iter() - .map(|p| G::decompress(p).ok_or(ProofVerifyError::InternalError)) - .collect::, _>>()?; - + let Ls = self.L_vec; let Rs = self.R_vec; - let G_hat = G::msm(G, s.as_slice()); + let G_hat = G::msm(Gs, s.as_slice()).map_err(|_| ProofVerifyError::InternalError)?; let a_hat = inner_product(a, &s); let Gamma_hat = G::msm( - Ls.iter() - .chain(Rs.iter()) - .chain(iter::once(Gamma)) - .copied() - .collect::>() - .as_slice(), + &G::normalize_batch( + &Ls + .iter() + .chain(Rs.iter()) + .chain(iter::once(Gamma)) + .copied() + .collect::>(), + ), u_sq .iter() .chain(u_inv_sq.iter()) @@ -254,7 +253,8 @@ impl BulletReductionProof { .copied() .collect::>() .as_slice(), - ); + ) + .map_err(|_| ProofVerifyError::InternalError)?; Ok((G_hat, Gamma_hat, a_hat)) } diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index e753ea11..29b801ce 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -57,18 +57,18 @@ impl DotProductProofLog { assert_eq!(gens.n, n); // produce randomness for generating a proof - let d = G::ScalarField::rand(&mut rand::thread_rng()).into(); + let d = G::ScalarField::rand(&mut rand::thread_rng()); let r_delta = G::ScalarField::rand(&mut rand::thread_rng()).into(); let r_beta = G::ScalarField::rand(&mut rand::thread_rng()).into(); let blinds_vec = { - let v1 = (0..2 * n.log_2()) + (0..2 * n.log_2()) .map(|_| { ( G::ScalarField::rand(&mut rand::thread_rng()).into(), G::ScalarField::rand(&mut rand::thread_rng()).into(), ) }) - .collect::>(); + .collect::>() }; let Cx = PedersenCommit::commit_slice(x_vec, blind_x, &gens.gens_n); @@ -95,17 +95,17 @@ impl DotProductProofLog { let delta = { let gens_hat = MultiCommitGens { n: 1, - G: vec![g_hat], + G: vec![g_hat.into_affine()], h: gens.gens_1.h, }; - d.commit(&r_delta, &gens_hat).compress() + PedersenCommit::commit_scalar(&d, &r_delta, &gens_hat) }; - transcript.append_point(&delta); + transcript.append(b"", &delta); - let beta = d.commit(&r_beta, &gens.gens_1).compress(); - transcript.append_point(&beta); + let beta = PedersenCommit::commit_scalar(&d, &r_beta, &gens.gens_1); + transcript.append(b"", &beta); - let c = transcript.challenge_scalar(); + let c: G::ScalarField = transcript.challenge_scalar(b""); let z1 = d + c * y_hat; let z2 = a_hat * (c * rhat_Gamma + r_beta) + r_delta; @@ -140,11 +140,11 @@ impl DotProductProofLog { // Cy.append_to_poseidon( transcript); // a.append_to_poseidon( transcript); - transcript.append_point(Cx); - transcript.append_point(Cy); - transcript.append_scalar_vector(a); + transcript.append(b"", Cx); + transcript.append(b"", Cy); + transcript.append(b"", &a); - let Gamma = Cx + Cy; + let Gamma = Cx.add(Cy); let (g_hat, Gamma_hat, a_hat) = self @@ -153,10 +153,10 @@ impl DotProductProofLog { // self.delta.append_to_poseidon( transcript); // self.beta.append_to_poseidon( transcript); - transcript.append_point(&self.delta); - transcript.append_point(&self.beta); + transcript.append(b"", &self.delta); + transcript.append(b"", &self.beta); - let c = transcript.challenge_scalar(); + let c = transcript.challenge_scalar(b""); let c_s = &c; let beta_s = self.beta; @@ -186,6 +186,7 @@ mod tests { use super::*; use ark_std::UniformRand; type F = ark_bls12_377::Fr; + type G = ark_bls12_377::G1Projective; #[test] fn check_dotproductproof_log() { @@ -193,11 +194,11 @@ mod tests { let n = 1024; - let gens = DotProductProofGens::new(n, b"test-1024"); + let gens = DotProductProofGens::::new(n, b"test-1024"); let x: Vec = (0..n).map(|_i| F::rand(&mut rng)).collect(); let a: Vec = (0..n).map(|_i| F::rand(&mut rng)).collect(); - let y = DotProductProofLog::compute_dotproduct(&x, &a); + let y = crate::dot_product(&x, &a); let r_x = F::rand(&mut rng); let r_y = F::rand(&mut rng); diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index bccba0f7..ba7e0fd1 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -26,7 +26,7 @@ impl Transcript for PoseidonTranscript { self.sponge.absorb(&buf); } - fn challenge_scalar(&mut self, _label: &'static [u8]) -> F { + fn challenge_scalar(&mut self, _label: &'static [u8]) -> FF { self.sponge.squeeze_field_elements(1).remove(0) } } @@ -43,6 +43,6 @@ impl PoseidonTranscript { pub fn new_from_state(&mut self, challenge: &F) { self.sponge = PoseidonSponge::new(&self.params); - self.append_scalar(challenge); + self.append(b"",challenge); } } diff --git a/src/product_tree.rs b/src/product_tree.rs index 761c77a0..817c487d 100644 --- a/src/product_tree.rs +++ b/src/product_tree.rs @@ -1,13 +1,12 @@ #![allow(dead_code)] -use crate::poseidon_transcript::PoseidonTranscript; - use super::dense_mlpoly::DensePolynomial; use super::dense_mlpoly::EqPolynomial; use super::math::Math; use super::sumcheck::SumcheckInstanceProof; +use crate::poseidon_transcript::PoseidonTranscript; +use crate::transcript::Transcript; use ark_ff::PrimeField; use ark_serialize::*; -use ark_std::One; #[derive(Debug)] pub struct ProductCircuit { @@ -200,11 +199,11 @@ impl ProductCircuitEvalProof { transcript, ); - transcript.append_scalar(&claims_prod[0]); - transcript.append_scalar(&claims_prod[1]); + transcript.append(b"", &claims_prod[0]); + transcript.append(b"", &claims_prod[1]); // produce a random challenge - let r_layer = transcript.challenge_scalar(); + let r_layer = transcript.challenge_scalar(b""); claim = claims_prod[0] + r_layer * (claims_prod[1] - claims_prod[0]); let mut ext = vec![r_layer]; @@ -230,8 +229,8 @@ impl ProductCircuitEvalProof { let (claim_last, rand_prod) = self.proof[i].verify(claim, num_rounds, 3, transcript); let claims_prod = &self.proof[i].claims; - transcript.append_scalar(&claims_prod[0]); - transcript.append_scalar(&claims_prod[1]); + transcript.append(b"", &claims_prod[0]); + transcript.append(b"", &claims_prod[1]); assert_eq!(rand.len(), rand_prod.len()); let eq: F = (0..rand.len()) @@ -240,7 +239,7 @@ impl ProductCircuitEvalProof { assert_eq!(claims_prod[0] * claims_prod[1] * eq, claim_last); // produce a random challenge - let r_layer = transcript.challenge_scalar(); + let r_layer = transcript.challenge_scalar(b""); claim = (F::one() - r_layer) * claims_prod[0] + r_layer * claims_prod[1]; let mut ext = vec![r_layer]; ext.extend(rand_prod); @@ -318,7 +317,7 @@ impl ProductCircuitEvalProofBatched { ); // produce a fresh set of coeffs and a joint claim - let coeff_vec = transcript.challenge_vector(claims_to_verify.len()); + let coeff_vec = transcript.challenge_scalar_vec(b"", claims_to_verify.len()); let claim = (0..claims_to_verify.len()) .map(|i| claims_to_verify[i] * coeff_vec[i]) .sum(); @@ -335,22 +334,22 @@ impl ProductCircuitEvalProofBatched { let (claims_prod_left, claims_prod_right, _claims_eq) = claims_prod; for i in 0..prod_circuit_vec.len() { - transcript.append_scalar(&claims_prod_left[i]); - transcript.append_scalar(&claims_prod_right[i]); + transcript.append(b"", &claims_prod_left[i]); + transcript.append(b"", &claims_prod_right[i]); } if layer_id == 0 && !dotp_circuit_vec.is_empty() { let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = claims_dotp; for i in 0..dotp_circuit_vec.len() { - transcript.append_scalar(&claims_dotp_left[i]); - transcript.append_scalar(&claims_dotp_right[i]); - transcript.append_scalar(&claims_dotp_weight[i]); + transcript.append(b"", &claims_dotp_left[i]); + transcript.append(b"", &claims_dotp_right[i]); + transcript.append(b"", &claims_dotp_weight[i]); } claims_dotp_final = (claims_dotp_left, claims_dotp_right, claims_dotp_weight); } // produce a random challenge to condense two claims into a single claim - let r_layer = transcript.challenge_scalar(); + let r_layer = transcript.challenge_scalar(b""); claims_to_verify = (0..prod_circuit_vec.len()) .map(|i| claims_prod_left[i] + r_layer * (claims_prod_right[i] - claims_prod_left[i])) @@ -396,7 +395,7 @@ impl ProductCircuitEvalProofBatched { } // produce random coefficients, one for each instance - let coeff_vec = transcript.challenge_vector(claims_to_verify.len()); + let coeff_vec: Vec = transcript.challenge_scalar_vec(b"", claims_to_verify.len()); // produce a joint claim let claim = (0..claims_to_verify.len()) @@ -411,8 +410,8 @@ impl ProductCircuitEvalProofBatched { assert_eq!(claims_prod_right.len(), claims_prod_vec.len()); for i in 0..claims_prod_vec.len() { - transcript.append_scalar(&claims_prod_left[i]); - transcript.append_scalar(&claims_prod_right[i]); + transcript.append(b"", &claims_prod_left[i]); + transcript.append(b"", &claims_prod_right[i]); } assert_eq!(rand.len(), rand_prod.len()); @@ -428,9 +427,9 @@ impl ProductCircuitEvalProofBatched { let num_prod_instances = claims_prod_vec.len(); let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = &self.claims_dotp; for i in 0..claims_dotp_left.len() { - transcript.append_scalar(&claims_dotp_left[i]); - transcript.append_scalar(&claims_dotp_right[i]); - transcript.append_scalar(&claims_dotp_weight[i]); + transcript.append(b"", &claims_dotp_left[i]); + transcript.append(b"", &claims_dotp_right[i]); + transcript.append(b"", &claims_dotp_weight[i]); claim_expected += coeff_vec[i + num_prod_instances] * claims_dotp_left[i] @@ -442,7 +441,7 @@ impl ProductCircuitEvalProofBatched { assert_eq!(claim_expected, claim_last); // produce a random challenge - let r_layer = transcript.challenge_scalar(); + let r_layer = transcript.challenge_scalar(b""); claims_to_verify = (0..claims_prod_left.len()) .map(|i| claims_prod_left[i] + r_layer * (claims_prod_right[i] - claims_prod_left[i])) diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index 05c55e3f..c5e32768 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -10,7 +10,6 @@ use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::{Transcript, TranscriptWriter}; use ark_ec::pairing::Pairing; use ark_ec::CurveGroup; -use ark_ff::Field; use ark_ff::PrimeField; use ark_serialize::*; use ark_std::{One, UniformRand, Zero}; @@ -58,9 +57,9 @@ pub struct R1CSCommitment { impl TranscriptWriter for R1CSCommitment { fn write_to_transcript(&self, transcript: &mut impl Transcript) { - transcript.append(self.num_cons as u64, ""); - transcript.append(self.num_vars as u64, ""); - transcript.append(self.num_inputs as u64, ""); + transcript.append(b"", &self.num_cons as u64); + transcript.append(b"", &self.num_vars as u64); + transcript.append(b"", &self.num_inputs as u64); self.comm.write_to_transcript(transcript); } } @@ -188,9 +187,9 @@ impl R1CSInstance { }; // three sparse matrices - let mut A: Vec = Vec::new(); - let mut B: Vec = Vec::new(); - let mut C: Vec = Vec::new(); + let mut A: Vec> = Vec::new(); + let mut B: Vec> = Vec::new(); + let mut C: Vec> = Vec::new(); let one = F::one(); for i in 0..num_cons { let A_idx = i % size_z; diff --git a/src/r1csproof.rs b/src/r1csproof.rs index b62340fb..2c44a9cb 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -8,7 +8,9 @@ use crate::parameters::poseidon_params; use crate::poseidon_transcript::PoseidonTranscript; use crate::sqrt_pst::Polynomial; use crate::sumcheck::SumcheckInstanceProof; +use crate::transcript::{Transcript, TranscriptWriter}; use ark_bls12_377::Bls12_377 as I; +use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; use ark_ec::pairing::Pairing; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; @@ -142,15 +144,15 @@ impl R1CSProof { let mut bytes = Vec::new(); t.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); - transcript.append_bytes(&bytes); + transcript.append(b"", &bytes); // comm.append_to_poseidon(transcript); timer_commit.stop(); - let c = transcript.challenge_scalar(); + let c = transcript.challenge_scalar(b""); transcript.new_from_state(&c); - transcript.append_scalar_vector(input); + transcript.append(b"", &input); let timer_sc_proof_phase1 = Timer::new("prove_sc_phase_one"); @@ -167,7 +169,7 @@ impl R1CSProof { // derive the verifier's challenge tau let (num_rounds_x, num_rounds_y) = (inst.get_num_cons().log_2(), z.len().log_2()); - let tau = transcript.challenge_vector(num_rounds_x); + let tau = transcript.challenge_scalar_vec(b"", num_rounds_x); // compute the initial evaluation table for R(\tau, x) let mut poly_tau = DensePolynomial::new(EqPolynomial::new(tau).evals()); let (mut poly_Az, mut poly_Bz, mut poly_Cz) = @@ -197,9 +199,9 @@ impl R1CSProof { let timer_sc_proof_phase2 = Timer::new("prove_sc_phase_two"); // combine the three claims into a single claim - let r_A = transcript.challenge_scalar(); - let r_B = transcript.challenge_scalar(); - let r_C = transcript.challenge_scalar(); + let r_A: E::ScalarField = transcript.challenge_scalar(b""); + let r_B: E::ScalarField = transcript.challenge_scalar(b""); + let r_C: E::ScalarField = transcript.challenge_scalar(b""); let claim_phase2 = r_A * Az_claim + r_B * Bz_claim + r_C * Cz_claim; let evals_ABC = { @@ -224,7 +226,7 @@ impl R1CSProof { transcript, ); timer_sc_proof_phase2.stop(); - let c = transcript.challenge_scalar(); + let c = transcript.challenge_scalar(b""); transcript.new_from_state(&c); // TODO: modify the polynomial evaluation in Spartan to be consistent @@ -272,6 +274,7 @@ impl R1CSProof { evals: &(E::ScalarField, E::ScalarField, E::ScalarField), transcript: &mut PoseidonTranscript, gens: &R1CSGens, + poseidon: PoseidonConfig, ) -> Result<(u128, u128, u128), ProofVerifyError> { // serialise and add the IPP commitment to the transcript let mut bytes = Vec::new(); @@ -279,16 +282,16 @@ impl R1CSProof { .t .serialize_with_mode(&mut bytes, Compress::Yes) .unwrap(); - transcript.append_bytes(&bytes); + transcript.append(b"", &bytes); - let c = transcript.challenge_scalar(); + let c = transcript.challenge_scalar(b""); let mut input_as_sparse_poly_entries = vec![SparsePolyEntry::new(0, E::ScalarField::one())]; //remaining inputs input_as_sparse_poly_entries.extend( (0..input.len()) .map(|i| SparsePolyEntry::new(i + 1, input[i])) - .collect::>(), + .collect::>>(), ); let n = num_vars; @@ -300,7 +303,7 @@ impl R1CSProof { num_cons, input: input.to_vec(), evals: *evals, - params: poseidon_params(), + params: poseidon, prev_challenge: c, claims_phase2: self.claims_phase2, polys_sc1: self.sc_proof_phase1.polys.clone(), @@ -315,8 +318,9 @@ impl R1CSProof { let circuit = R1CSVerificationCircuit::new(&config); // this is universal, we don't measure it + // TODO put this _outside_ the verification let start = Instant::now(); - let (pk, vk) = Groth16::::setup(circuit.clone(), &mut rand::thread_rng()).unwrap(); + let (pk, vk) = Groth16::::setup(circuit.clone(), &mut rand::thread_rng()).unwrap(); let ds = start.elapsed().as_millis(); let prove_outer = Timer::new("provecircuit"); From 627ecfbb6c0ebb6e14d7e2c16584551181a49243 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Wed, 8 Feb 2023 18:16:33 +0100 Subject: [PATCH 28/64] compiling but test failing --- examples/cubic.rs | 41 +++++++++++++++++---------- profiler/nizk.rs | 10 +++++-- profiler/snark.rs | 9 ++++-- src/commitments.rs | 4 +-- src/constraints.rs | 2 +- src/dense_mlpoly.rs | 12 +++----- src/lib.rs | 37 +++++++++++++++++++------ src/macros.rs | 16 ----------- src/mipp.rs | 3 +- src/nizk/bullet.rs | 10 +++---- src/nizk/mod.rs | 3 -- src/poseidon_transcript.rs | 2 -- src/r1csinstance.rs | 1 - src/r1csproof.rs | 8 ++---- src/sparse_mlpoly.rs | 57 ++++++++++++++------------------------ src/sqrt_pst.rs | 6 ++-- src/sumcheck.rs | 14 +++++----- src/unipoly.rs | 26 ++++++++--------- 18 files changed, 125 insertions(+), 136 deletions(-) diff --git a/examples/cubic.rs b/examples/cubic.rs index 6d068d8a..12cfc4c3 100644 --- a/examples/cubic.rs +++ b/examples/cubic.rs @@ -8,7 +8,7 @@ //! `(Z3 + 5) * 1 - I0 = 0` //! //! [here]: https://medium.com/@VitalikButerin/quadratic-arithmetic-programs-from-zero-to-hero-f6d558cea649 -use ark_bls12_377::Fr as Scalar; +use ark_ec::pairing::Pairing; use ark_ff::{BigInteger, PrimeField}; use ark_std::{One, UniformRand, Zero}; use libspartan::{ @@ -17,14 +17,14 @@ use libspartan::{ }; #[allow(non_snake_case)] -fn produce_r1cs() -> ( +fn produce_r1cs() -> ( usize, usize, usize, usize, - Instance, - VarsAssignment, - InputsAssignment, + Instance, + VarsAssignment, + InputsAssignment, ) { // parameters of the R1CS instance let num_cons = 4; @@ -38,7 +38,7 @@ fn produce_r1cs() -> ( let mut B: Vec<(usize, usize, Vec)> = Vec::new(); let mut C: Vec<(usize, usize, Vec)> = Vec::new(); - let one = Scalar::one().into_bigint().to_bytes_le(); + let one = E::ScalarField::one().into_bigint().to_bytes_le(); // R1CS is a set of three sparse matrices A B C, where is a row for every // constraint and a column for every entry in z = (vars, 1, inputs) @@ -67,22 +67,26 @@ fn produce_r1cs() -> ( // constraint 3 entries in (A,B,C) // constraint 3 is (Z3 + 5) * 1 - I0 = 0. A.push((3, 3, one.clone())); - A.push((3, num_vars, Scalar::from(5u32).into_bigint().to_bytes_le())); + A.push(( + 3, + num_vars, + E::ScalarField::from(5u32).into_bigint().to_bytes_le(), + )); B.push((3, num_vars, one.clone())); C.push((3, num_vars + 1, one)); - let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C).unwrap(); + let inst = Instance::::new(num_cons, num_vars, num_inputs, &A, &B, &C).unwrap(); // compute a satisfying assignment let mut rng = ark_std::rand::thread_rng(); - let z0 = Scalar::rand(&mut rng); + let z0 = E::ScalarField::rand(&mut rng); let z1 = z0 * z0; // constraint 0 let z2 = z1 * z0; // constraint 1 let z3 = z2 + z0; // constraint 2 - let i0 = z3 + Scalar::from(5u32); // constraint 3 + let i0 = z3 + E::ScalarField::from(5u32); // constraint 3 // create a VarsAssignment - let mut vars = vec![Scalar::zero().into_bigint().to_bytes_le(); num_vars]; + let mut vars = vec![E::ScalarField::zero().into_bigint().to_bytes_le(); num_vars]; vars[0] = z0.into_bigint().to_bytes_le(); vars[1] = z1.into_bigint().to_bytes_le(); vars[2] = z2.into_bigint().to_bytes_le(); @@ -90,7 +94,7 @@ fn produce_r1cs() -> ( let assignment_vars = VarsAssignment::new(&vars).unwrap(); // create an InputsAssignment - let mut inputs = vec![Scalar::zero().into_bigint().to_bytes_le(); num_inputs]; + let mut inputs = vec![E::ScalarField::zero().into_bigint().to_bytes_le(); num_inputs]; inputs[0] = i0.into_bigint().to_bytes_le(); let assignment_inputs = InputsAssignment::new(&inputs).unwrap(); @@ -109,6 +113,7 @@ fn produce_r1cs() -> ( ) } +type E = ark_bls12_377::Bls12_377; fn main() { // produce an R1CS instance let ( @@ -119,12 +124,12 @@ fn main() { inst, assignment_vars, assignment_inputs, - ) = produce_r1cs(); + ) = produce_r1cs::(); let params = poseidon_params(); // produce public parameters - let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_non_zero_entries); + let gens = SNARKGens::::new(num_cons, num_vars, num_inputs, num_non_zero_entries); // create a commitment to the R1CS instance let (comm, decomm) = SNARK::encode(&inst, &gens); @@ -144,7 +149,13 @@ fn main() { // verify the proof of satisfiability let mut verifier_transcript = PoseidonTranscript::new(¶ms); assert!(proof - .verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens) + .verify( + &comm, + &assignment_inputs, + &mut verifier_transcript, + &gens, + params + ) .is_ok()); println!("proof verification successful!"); } diff --git a/profiler/nizk.rs b/profiler/nizk.rs index 7fc47d21..794b4783 100644 --- a/profiler/nizk.rs +++ b/profiler/nizk.rs @@ -10,6 +10,9 @@ use libspartan::parameters::poseidon_params; use libspartan::poseidon_transcript::PoseidonTranscript; use libspartan::{Instance, NIZKGens, NIZK}; +type F = ark_bls12_377::Fr; +type E = ark_bls12_377::Bls12_377; + fn print(msg: &str) { let star = "* "; println!("{:indent$}{}{}", "", star, msg, indent = 2); @@ -26,10 +29,11 @@ pub fn main() { let num_inputs = 10; // produce a synthetic R1CSInstance - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); + let (inst, vars, inputs) = + Instance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); // produce public generators - let gens = NIZKGens::new(num_cons, num_vars, num_inputs); + let gens = NIZKGens::::new(num_cons, num_vars, num_inputs); let params = poseidon_params(); // produce a proof of satisfiability @@ -46,7 +50,7 @@ pub fn main() { // verify the proof of satisfiability let mut verifier_transcript = PoseidonTranscript::new(¶ms); assert!(proof - .verify(&inst, &inputs, &mut verifier_transcript, &gens) + .verify(&inst, &inputs, &mut verifier_transcript, &gens, params) .is_ok()); println!(); diff --git a/profiler/snark.rs b/profiler/snark.rs index 6ab22980..a913fdfc 100644 --- a/profiler/snark.rs +++ b/profiler/snark.rs @@ -8,6 +8,8 @@ use ark_serialize::*; use libspartan::parameters::poseidon_params; use libspartan::poseidon_transcript::PoseidonTranscript; use libspartan::{Instance, SNARKGens, SNARK}; +type F = ark_bls12_377::Fr; +type E = ark_bls12_377::Bls12_377; fn print(msg: &str) { let star = "* "; @@ -25,10 +27,11 @@ pub fn main() { let num_inputs = 10; // produce a synthetic R1CSInstance - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); + let (inst, vars, inputs) = + Instance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); // produce public generators - let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_cons); + let gens = SNARKGens::::new(num_cons, num_vars, num_inputs, num_cons); // create a commitment to R1CSInstance let (comm, decomm) = SNARK::encode(&inst, &gens); @@ -57,7 +60,7 @@ pub fn main() { // verify the proof of satisfiability let mut verifier_transcript = PoseidonTranscript::new(¶ms); assert!(proof - .verify(&comm, &inputs, &mut verifier_transcript, &gens) + .verify(&comm, &inputs, &mut verifier_transcript, &gens, params) .is_ok()); println!(); diff --git a/src/commitments.rs b/src/commitments.rs index 4acc20e4..e67fac9a 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -22,7 +22,7 @@ impl MultiCommitGens { sponge.absorb(&b); let gens = (0..=n) - .map(|i| { + .map(|_| { let mut el_aff: Option = None; while el_aff.is_none() { let uniform_bytes = sponge.squeeze_bytes(64); @@ -85,4 +85,4 @@ impl PedersenCommit { assert_eq!(scalars.len(), gens_n.n); ::msm_unchecked(&gens_n.G, scalars) + gens_n.h.mul(blind) } -} \ No newline at end of file +} diff --git a/src/constraints.rs b/src/constraints.rs index 4e3f897b..9ebf1704 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -7,7 +7,7 @@ use crate::{ unipoly::UniPoly, }; -use ark_ff::{PrimeField, Zero}; +use ark_ff::PrimeField; use ark_crypto_primitives::sponge::{ constraints::CryptographicSpongeVar, diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 3989224c..f9a820fe 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -2,20 +2,20 @@ use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::{Transcript, TranscriptWriter}; - use super::commitments::{MultiCommitGens, PedersenCommit}; use super::errors::ProofVerifyError; use super::math::Math; use super::nizk::{DotProductProofGens, DotProductProofLog}; use ark_ec::scalar_mul::variable_base::VariableBaseMSM; use ark_ec::{pairing::Pairing, CurveGroup}; -use ark_ff::{One, PrimeField, Zero}; +use ark_ff::{PrimeField, Zero}; use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{CommitterKey, VerifierKey}; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::*; use core::ops::Index; use std::ops::{Add, AddAssign, Neg, Sub, SubAssign}; +use ark_std::One; #[cfg(feature = "multicore")] use rayon::prelude::*; @@ -474,10 +474,6 @@ impl PolyEvalProof where E: Pairing, { - fn protocol_name() -> &'static [u8] { - b"polynomial evaluation proof" - } - pub fn prove( poly: &DensePolynomial, blinds_opt: Option<&PolyCommitmentBlinds>, @@ -547,9 +543,9 @@ where let (L, R) = eq.compute_factored_evals(); // compute a weighted sum of commitments and L - let C_decompressed = comm.C; + let C_decompressed = &comm.C; - let C_LZ = ::msm(&C_decompressed, &L).compress(); + let C_LZ = ::msm(&::normalize_batch(C_decompressed), &L).unwrap(); self .proof diff --git a/src/lib.rs b/src/lib.rs index 6936d082..920904d6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -41,6 +41,7 @@ pub mod parameters; mod constraints; pub mod poseidon_transcript; +use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; use ark_serialize::*; use core::cmp::max; use errors::{ProofVerifyError, R1CSError}; @@ -334,9 +335,6 @@ pub struct SNARK { } impl SNARK { - fn protocol_name() -> &'static [u8] { - b"Spartan SNARK proof" - } /// A public computation to create a commitment to an R1CS instance pub fn encode( @@ -455,6 +453,7 @@ impl SNARK { input: &InputsAssignment, transcript: &mut PoseidonTranscript, gens: &SNARKGens, + poseidon: PoseidonConfig, ) -> Result<(u128, u128, u128), ProofVerifyError> { let timer_verify = Timer::new("SNARK::verify"); // transcript.append_protocol_name(SNARK::protocol_name()); @@ -472,6 +471,7 @@ impl SNARK { &self.inst_evals, transcript, &gens.gens_r1cs_sat, + poseidon, )?; timer_sat_proof.stop(); @@ -531,9 +531,6 @@ pub struct NIZK { } impl NIZK { - fn protocol_name() -> &'static [u8] { - b"Spartan NIZK proof" - } /// A method to produce a NIZK proof of the satisfiability of an R1CS instance pub fn prove( @@ -588,6 +585,7 @@ impl NIZK { input: &InputsAssignment, transcript: &mut PoseidonTranscript, gens: &NIZKGens, + poseidon: PoseidonConfig, ) -> Result { let timer_verify = Timer::new("NIZK::verify"); @@ -610,6 +608,7 @@ impl NIZK { &inst_evals, transcript, &gens.gens_r1cs_sat, + poseidon, )?; // verify if claimed rx and ry are correct @@ -628,6 +627,7 @@ impl NIZK { input: &InputsAssignment, transcript: &mut PoseidonTranscript, gens: &NIZKGens, + poseidon: PoseidonConfig, ) -> Result<(u128, u128, u128), ProofVerifyError> { let timer_verify = Timer::new("NIZK::verify"); @@ -651,6 +651,7 @@ impl NIZK { &inst_evals, transcript, &gens.gens_r1cs_sat, + poseidon, )?; // verify if claimed rx and ry are correct @@ -714,7 +715,13 @@ mod tests { // verify the proof let mut verifier_transcript = PoseidonTranscript::new(¶ms); assert!(proof - .verify(&comm, &inputs, &mut verifier_transcript, &gens) + .verify( + &comm, + &inputs, + &mut verifier_transcript, + &gens, + poseidon_params() + ) .is_ok()); } @@ -824,7 +831,13 @@ mod tests { // verify the SNARK let mut verifier_transcript = PoseidonTranscript::new(¶ms); assert!(proof - .verify(&comm, &assignment_inputs, &mut verifier_transcript, &gens) + .verify( + &comm, + &assignment_inputs, + &mut verifier_transcript, + &gens, + poseidon_params() + ) .is_ok()); // NIZK public params @@ -845,7 +858,13 @@ mod tests { // verify the NIZK let mut verifier_transcript = PoseidonTranscript::new(¶ms); assert!(proof - .verify_groth16(&inst, &assignment_inputs, &mut verifier_transcript, &gens) + .verify_groth16( + &inst, + &assignment_inputs, + &mut verifier_transcript, + &gens, + poseidon_params() + ) .is_ok()); } } diff --git a/src/macros.rs b/src/macros.rs index a3469c0b..f3aac906 100644 --- a/src/macros.rs +++ b/src/macros.rs @@ -54,19 +54,3 @@ macro_rules! par { )+ } } - -macro_rules! mul { - ($a:expr, $b:expr) => {{ - let mut a = $a; - a.mul_assign($b); - a - }}; -} - -macro_rules! sub { - ($a:expr, $b:expr) => {{ - let mut a = $a; - a.sub_assign($b); - a - }}; -} diff --git a/src/mipp.rs b/src/mipp.rs index cfcbcff5..161b2265 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -1,4 +1,3 @@ -use crate::constraints::PoseidonTranscripVar; use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::Transcript; use ark_ec::scalar_mul::variable_base::VariableBaseMSM; @@ -10,7 +9,7 @@ use ark_poly_commit::multilinear_pc::data_structures::{ CommitmentG2, CommitterKey, ProofG1, VerifierKey, }; use ark_poly_commit::multilinear_pc::MultilinearPC; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError, Write}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; use ark_std::One; use ark_std::Zero; use rayon::iter::ParallelIterator; diff --git a/src/nizk/bullet.rs b/src/nizk/bullet.rs index 5c8857c1..ab1d8ea0 100644 --- a/src/nizk/bullet.rs +++ b/src/nizk/bullet.rs @@ -4,10 +4,10 @@ #![allow(clippy::type_complexity)] #![allow(clippy::too_many_arguments)] use super::super::errors::ProofVerifyError; -use ark_ec::AffineRepr; use crate::math::Math; use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::Transcript; +use ark_ec::AffineRepr; use ark_ec::CurveGroup; use ark_ff::Field; use ark_serialize::*; @@ -75,8 +75,8 @@ impl BulletReductionProof { while n != 1 { n /= 2; - let (mut a_L, mut a_R) = a.split_at_mut(n); - let (mut b_L, mut b_R) = b.split_at_mut(n); + let (a_L, a_R) = a.split_at_mut(n); + let (b_L, b_R) = b.split_at_mut(n); let (G_L, G_R) = G.split_at_mut(n); let c_L = inner_product(a_L, b_R); @@ -231,8 +231,8 @@ impl BulletReductionProof { ) -> Result<(G, G, G::ScalarField), ProofVerifyError> { let (u_sq, u_inv_sq, s) = self.verification_scalars(n, transcript)?; - let Ls = self.L_vec; - let Rs = self.R_vec; + let Ls = &self.L_vec; + let Rs = &self.R_vec; let G_hat = G::msm(Gs, s.as_slice()).map_err(|_| ProofVerifyError::InternalError)?; let a_hat = inner_product(a, &s); diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index e2017e7b..9858b5c6 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -37,9 +37,6 @@ pub struct DotProductProofLog { } impl DotProductProofLog { - fn protocol_name() -> &'static [u8] { - b"dot product proof (log)" - } pub fn prove( gens: &DotProductProofGens, diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 389fc96d..ba7e0fd1 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -4,8 +4,6 @@ use ark_crypto_primitives::sponge::{ CryptographicSponge, }; use ark_ff::PrimeField; -use ark_ec::pairing::Pairing; -use ark_poly_commit::multilinear_pc::data_structures::Commitment; use ark_serialize::CanonicalSerialize; use ark_serialize::Compress; #[derive(Clone)] diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index afac675d..93157f58 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -12,7 +12,6 @@ use ark_ec::pairing::Pairing; use ark_ec::CurveGroup; use ark_ff::PrimeField; use ark_serialize::*; -use ark_std::{One, UniformRand, Zero}; use digest::{ExtendableOutput, Input}; use sha3::Shake256; diff --git a/src/r1csproof.rs b/src/r1csproof.rs index eabaa74d..48eac2f3 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -4,12 +4,10 @@ use super::errors::ProofVerifyError; use crate::constraints::{R1CSVerificationCircuit, VerifierConfig}; use crate::math::Math; use crate::mipp::MippProof; -use crate::parameters::poseidon_params; use crate::poseidon_transcript::PoseidonTranscript; use crate::sqrt_pst::Polynomial; use crate::sumcheck::SumcheckInstanceProof; -use crate::transcript::{Transcript, TranscriptWriter}; -use ark_bls12_377::Bls12_377 as I; +use crate::transcript::{Transcript}; use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; use ark_ec::pairing::Pairing; @@ -118,9 +116,6 @@ impl R1CSProof { (sc_proof_phase_two, r, claims) } - fn protocol_name() -> &'static [u8] { - b"R1CS proof" - } pub fn prove( inst: &R1CSInstance, @@ -332,6 +327,7 @@ impl R1CSProof { let timer_verification = Timer::new("verification"); let start = Instant::now(); + /// TODO : they are not necessary ? let (v_A, v_B, v_C, v_AB) = self.claims_phase2; let mut pubs = vec![]; diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index c5027730..e3f4b93f 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -82,9 +82,6 @@ pub struct DerefsEvalProof { } impl DerefsEvalProof { - fn protocol_name() -> &'static [u8] { - b"Derefs evaluation proof" - } fn prove_single( joint_poly: &DensePolynomial, @@ -686,9 +683,6 @@ struct HashLayerProof { } impl HashLayerProof { - fn protocol_name() -> &'static [u8] { - b"Sparse polynomial hash layer proof" - } fn prove_helper( rand: (&Vec, &Vec), @@ -747,9 +741,9 @@ impl HashLayerProof { // evaluate row_addr, row_read-ts, col_addr, col_read-ts, val at rand_ops // evaluate row_audit_ts and col_audit_ts at rand_mem let (eval_row_addr_vec, eval_row_read_ts_vec, eval_row_audit_ts) = - HashLayerProof::prove_helper((rand_mem, rand_ops), &dense.row); + HashLayerProof::::prove_helper((rand_mem, rand_ops), &dense.row); let (eval_col_addr_vec, eval_col_read_ts_vec, eval_col_audit_ts) = - HashLayerProof::prove_helper((rand_mem, rand_ops), &dense.col); + HashLayerProof::::prove_helper((rand_mem, rand_ops), &dense.col); let eval_val_vec = (0..dense.val.len()) .map(|i| dense.val[i].evaluate(rand_ops)) .collect::>(); @@ -788,7 +782,7 @@ impl HashLayerProof { // form a single decommitment using comb_comb_mem at rand_mem let evals_mem: Vec = vec![eval_row_audit_ts, eval_col_audit_ts]; // evals_mem.append_to_transcript(b"claim_evals_mem", transcript); - transcript.append(&evals_mem); + transcript.append(b"", &evals_mem); let challenges_mem = transcript.challenge_scalar_vec(b"", evals_mem.len().log_2()); let mut poly_evals_mem = DensePolynomial::new(evals_mem); @@ -985,7 +979,7 @@ impl HashLayerProof { let mut r_joint_mem = challenges_mem; r_joint_mem.extend(rand_mem); // joint_claim_eval_mem.append_to_transcript(b"joint_claim_eval_mem", transcript); - transcript.append(b"",&joint_claim_eval_mem); + transcript.append(b"", &joint_claim_eval_mem); self.proof_mem.verify_plain( &gens.gens_mem, transcript, @@ -996,7 +990,7 @@ impl HashLayerProof { // verify the claims from the product layer let (eval_ops_addr, eval_read_ts, eval_audit_ts) = &self.eval_row; - HashLayerProof::verify_helper( + HashLayerProof::::verify_helper( &(rand_mem, rand_ops), claims_row, eval_row_ops_val, @@ -1009,7 +1003,7 @@ impl HashLayerProof { )?; let (eval_ops_addr, eval_read_ts, eval_audit_ts) = &self.eval_col; - HashLayerProof::verify_helper( + HashLayerProof::::verify_helper( &(rand_mem, rand_ops), claims_col, eval_col_ops_val, @@ -1036,9 +1030,6 @@ struct ProductLayerProof { } impl ProductLayerProof { - fn protocol_name() -> &'static [u8] { - b"Sparse polynomial product layer proof" - } pub fn prove( row_prod_layer: &mut ProductLayer, @@ -1066,9 +1057,9 @@ impl ProductLayerProof { let rs: F = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product(); assert_eq!(row_eval_init * ws, rs * row_eval_audit); - transcript.append(b"",&row_eval_init); + transcript.append(b"", &row_eval_init); transcript.append(b"", &row_eval_read); - transcript.append(b""; &row_eval_write); + transcript.append(b"", &row_eval_write); transcript.append(b"", &row_eval_audit); let col_eval_init = col_prod_layer.init.evaluate(); @@ -1087,10 +1078,10 @@ impl ProductLayerProof { let rs: F = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product(); assert_eq!(col_eval_init * ws, rs * col_eval_audit); - transcript.append(&col_eval_init); - transcript.append(&col_eval_read); - transcript.append(&col_eval_write); - transcript.append(&col_eval_audit); + transcript.append(b"", &col_eval_init); + transcript.append(b"", &col_eval_read); + transcript.append(b"", &col_eval_write); + transcript.append(b"", &col_eval_audit); // prepare dotproduct circuit for batching then with ops-related product circuits assert_eq!(eval.len(), derefs.row_ops_val.len()); @@ -1115,8 +1106,8 @@ impl ProductLayerProof { // eval_dotp_left.append_to_transcript(b"claim_eval_dotp_left", transcript); // eval_dotp_right.append_to_transcript(b"claim_eval_dotp_right", transcript); - transcript.append(&eval_dotp_left); - transcript.append(&eval_dotp_right); + transcript.append(b"", &eval_dotp_left); + transcript.append(b"", &eval_dotp_right); assert_eq!(eval_dotp_left + eval_dotp_right, eval[i]); eval_dotp_left_vec.push(eval_dotp_left); eval_dotp_right_vec.push(eval_dotp_right); @@ -1252,10 +1243,10 @@ impl ProductLayerProof { // row_eval_write.append_to_transcript(b"claim_row_eval_write", transcript); // row_eval_audit.append_to_transcript(b"claim_row_eval_audit", transcript); - transcript.append(row_eval_init); - transcript.append(row_eval_read); - transcript.append(row_eval_write); - transcript.append(row_eval_audit); + transcript.append(b"", row_eval_init); + transcript.append(b"", row_eval_read); + transcript.append(b"", row_eval_write); + transcript.append(b"", row_eval_audit); // subset check let (col_eval_init, col_eval_read, col_eval_write, col_eval_audit) = &self.eval_col; @@ -1331,9 +1322,6 @@ struct PolyEvalNetworkProof { } impl PolyEvalNetworkProof { - fn protocol_name() -> &'static [u8] { - b"Sparse polynomial evaluation proof" - } pub fn prove( network: &mut PolyEvalNetwork, @@ -1435,9 +1423,6 @@ pub struct SparseMatPolyEvalProof { } impl SparseMatPolyEvalProof { - fn protocol_name() -> &'static [u8] { - b"Sparse polynomial evaluation proof" - } fn equalize( rx: &[E::ScalarField], @@ -1475,7 +1460,7 @@ impl SparseMatPolyEvalProof { let (mem_rx, mem_ry) = { // equalize the lengths of rx and ry - let (rx_ext, ry_ext) = SparseMatPolyEvalProof::equalize(rx, ry); + let (rx_ext, ry_ext) = SparseMatPolyEvalProof::::equalize(rx, ry); let poly_rx = EqPolynomial::new(rx_ext).evals(); let poly_ry = EqPolynomial::new(ry_ext).evals(); (poly_rx, poly_ry) @@ -1533,7 +1518,7 @@ impl SparseMatPolyEvalProof { // transcript.append_protocol_name(SparseMatPolyEvalProof::protocol_name()); // equalize the lengths of rx and ry - let (rx_ext, ry_ext) = SparseMatPolyEvalProof::equalize(rx, ry); + let (rx_ext, ry_ext) = SparseMatPolyEvalProof::::equalize(rx, ry); let (nz, num_mem_cells) = (comm.num_ops, comm.num_mem_cells); assert_eq!(rx_ext.len().pow2(), num_mem_cells); @@ -1640,7 +1625,7 @@ mod tests { } let poly_M = SparseMatPolynomial::new(num_vars_x, num_vars_y, M); - let gens = SparseMatPolyCommitmentGens::new( + let gens = SparseMatPolyCommitmentGens::::new( b"gens_sparse_poly", num_vars_x, num_vars_y, diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 6d785624..4ae06d2e 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -5,8 +5,6 @@ use ark_poly_commit::multilinear_pc::{ data_structures::{Commitment, CommitterKey, Proof, VerifierKey}, MultilinearPC, }; -use std::ops::Mul; - use rayon::prelude::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; use crate::{ @@ -92,7 +90,7 @@ impl Polynomial { let q = self.q.clone().unwrap(); (0..q.Z.len()) .into_par_iter() - .map(|j| q.Z[j].mul(Polynomial::get_chi_i(&a, j))) + .map(|j| q.Z[j] * Polynomial::::get_chi_i(&a, j)) .sum() } @@ -273,7 +271,7 @@ mod tests { let p = DensePolynomial::new(Z.clone()); let res1 = p.evaluate(&r); - let mut pl = Polynomial::from_evaluations(&Z.clone()); + let mut pl = Polynomial::::from_evaluations(&Z.clone()); let res2 = pl.eval(&r); assert!(res1 == res2); diff --git a/src/sumcheck.rs b/src/sumcheck.rs index 18e209d0..a6c82e3e 100644 --- a/src/sumcheck.rs +++ b/src/sumcheck.rs @@ -3,12 +3,12 @@ use super::dense_mlpoly::DensePolynomial; use super::errors::ProofVerifyError; use crate::poseidon_transcript::PoseidonTranscript; +use crate::transcript::Transcript; use crate::transcript::TranscriptWriter; use super::unipoly::UniPoly; - use ark_ff::PrimeField; -use ark_ff::Zero; + use ark_serialize::*; use itertools::izip; @@ -48,7 +48,7 @@ impl SumcheckInstanceProof { poly.write_to_transcript(transcript); //derive the verifier's challenge for the next round - let r_i = transcript.challenge_scalar(); + let r_i = transcript.challenge_scalar(b""); r.push(r_i); @@ -120,7 +120,7 @@ impl SumcheckInstanceProof { // append the prover's message to the transcript poly.write_to_transcript(transcript); //derive the verifier's challenge for the next round - let r_j = transcript.challenge_scalar(); + let r_j = transcript.challenge_scalar(b""); r.push(r_j); // bound all tables to the verifier's challenege @@ -192,7 +192,7 @@ impl SumcheckInstanceProof { poly.write_to_transcript(transcript); //derive the verifier's challenge for the next round - let r_j = transcript.challenge_scalar(); + let r_j = transcript.challenge_scalar(b""); r.push(r_j); // bound all tables to the verifier's challenege poly_A.bound_poly_var_top(&r_j); @@ -325,7 +325,7 @@ impl SumcheckInstanceProof { poly.write_to_transcript(transcript); //derive the verifier's challenge for the next round - let r_j = transcript.challenge_scalar(); + let r_j = transcript.challenge_scalar(b""); r.push(r_j); // bound all tables to the verifier's challenege @@ -413,7 +413,7 @@ impl SumcheckInstanceProof { poly.write_to_transcript(transcript); //derive the verifier's challenge for the next round - let r_j = transcript.challenge_scalar(); + let r_j = transcript.challenge_scalar(b""); r.push(r_j); // bound all tables to the verifier's challenege diff --git a/src/unipoly.rs b/src/unipoly.rs index ae80c160..864db386 100644 --- a/src/unipoly.rs +++ b/src/unipoly.rs @@ -22,7 +22,7 @@ impl UniPoly { assert!(evals.len() == 3 || evals.len() == 4); let coeffs = if evals.len() == 3 { // ax^2 + bx + c - let two_inv = F::from(2).inverse().unwrap(); + let two_inv = F::from(2 as u8).inverse().unwrap(); let c = evals[0]; let a = two_inv * (evals[2] - evals[1] - evals[1] + c); @@ -30,8 +30,8 @@ impl UniPoly { vec![c, b, a] } else { // ax^3 + bx^2 + cx + d - let two_inv = F::from(2).inverse().unwrap(); - let six_inv = F::from(6).inverse().unwrap(); + let two_inv = F::from(2 as u8).inverse().unwrap(); + let six_inv = F::from(6 as u8).inverse().unwrap(); let d = evals[0]; let a = six_inv @@ -71,8 +71,7 @@ impl UniPoly { } eval } - - pub fn compress(&self) -> CompressedUniPoly{ + pub fn compress(&self) -> CompressedUniPoly { let coeffs_except_linear_term = [&self.coeffs[..1], &self.coeffs[2..]].concat(); assert_eq!(coeffs_except_linear_term.len() + 1, self.coeffs.len()); CompressedUniPoly { @@ -81,6 +80,7 @@ impl UniPoly { } } + impl CompressedUniPoly { // we require eval(0) + eval(1) = hint, so we can solve for the linear term as: // linear_term = hint - 2 * constant_term - deg2 term - deg3 term @@ -102,7 +102,7 @@ impl TranscriptWriter for UniPoly { fn write_to_transcript(&self, transcript: &mut impl Transcript) { // transcript.append_message(label, b"UniPoly_begin"); for i in 0..self.coeffs.len() { - transcript.append(&self.coeffs[i], "coeffs"); + transcript.append(b"", &self.coeffs[i]); } // transcript.append_message(label, b"UniPoly_end"); } @@ -118,11 +118,11 @@ mod tests { type F = ark_bls12_377::Fr; #[test] - fn test_from_evals_quad() { + fn test_from_evals_quad() { // polynomial is 2x^2 + 3x + 1 let e0 = F::one(); - let e1 = F::from(6); - let e2 = F::from(15); + let e1 = F::from(6 as u8); + let e2 = F::from(15 as u8); let evals = vec![e0, e1, e2]; let poly = UniPoly::from_evals(&evals); @@ -130,8 +130,8 @@ mod tests { assert_eq!(poly.eval_at_one(), e1); assert_eq!(poly.coeffs.len(), 3); assert_eq!(poly.coeffs[0], F::one()); - assert_eq!(poly.coeffs[1], F::from(3)); - assert_eq!(poly.coeffs[2], F::from(2)); + assert_eq!(poly.coeffs[1], F::from(3 as u8)); + assert_eq!(poly.coeffs[2], F::from(2 as u8)); let hint = e0 + e1; let compressed_poly = poly.compress(); @@ -140,8 +140,8 @@ mod tests { assert_eq!(decompressed_poly.coeffs[i], poly.coeffs[i]); } - let e3 = F::from(28); - assert_eq!(poly.evaluate(&F::from(3)), e3); + let e3 = F::from(28 as u8); + assert_eq!(poly.evaluate(&F::from(3 as u8)), e3); } #[test] From 3106e83f84349959dfac13f9b061331078037db0 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Wed, 8 Feb 2023 22:17:17 +0100 Subject: [PATCH 29/64] putting back non random blinds --- src/dense_mlpoly.rs | 23 +++++++++++++++-------- src/sparse_mlpoly.rs | 11 +++-------- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index f9a820fe..67e7b551 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -1,11 +1,11 @@ #![allow(clippy::too_many_arguments)] -use crate::poseidon_transcript::PoseidonTranscript; -use crate::transcript::{Transcript, TranscriptWriter}; use super::commitments::{MultiCommitGens, PedersenCommit}; use super::errors::ProofVerifyError; use super::math::Math; use super::nizk::{DotProductProofGens, DotProductProofLog}; +use crate::poseidon_transcript::PoseidonTranscript; +use crate::transcript::{Transcript, TranscriptWriter}; use ark_ec::scalar_mul::variable_base::VariableBaseMSM; use ark_ec::{pairing::Pairing, CurveGroup}; use ark_ff::{PrimeField, Zero}; @@ -13,9 +13,9 @@ use ark_poly::MultilinearExtension; use ark_poly_commit::multilinear_pc::data_structures::{CommitterKey, VerifierKey}; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::*; +use ark_std::One; use core::ops::Index; use std::ops::{Add, AddAssign, Neg, Sub, SubAssign}; -use ark_std::One; #[cfg(feature = "multicore")] use rayon::prelude::*; @@ -350,6 +350,7 @@ impl DensePolynomial { pub fn commit( &self, gens: &PolyCommitmentGens, + random_blinds: bool, ) -> (PolyCommitment, PolyCommitmentBlinds) where E: Pairing, @@ -364,9 +365,13 @@ impl DensePolynomial { assert_eq!(L_size * R_size, n); let blinds = PolyCommitmentBlinds { - blinds: (0..L_size) - .map(|_| F::rand(&mut rand::thread_rng())) - .collect::>(), + blinds: if random_blinds { + (0..L_size) + .map(|_| F::rand(&mut rand::thread_rng())) + .collect::>() + } else { + (0..L_size).map(|_| F::zero()).collect::>() + }, }; (self.commit_inner(&blinds.blinds, &gens.gens.gens_n), blinds) @@ -545,7 +550,9 @@ where // compute a weighted sum of commitments and L let C_decompressed = &comm.C; - let C_LZ = ::msm(&::normalize_batch(C_decompressed), &L).unwrap(); + let C_LZ = + ::msm(&::normalize_batch(C_decompressed), &L) + .unwrap(); self .proof @@ -739,7 +746,7 @@ mod tests { assert_eq!(eval, F::from(28)); let gens = PolyCommitmentGens::new(poly.get_num_vars(), b"test-two"); - let (poly_commitment, blinds) = poly.commit(&gens); + let (poly_commitment, blinds) = poly.commit(&gens, false); let params = poseidon_params(); let mut prover_transcript = PoseidonTranscript::new(¶ms); diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index e3f4b93f..8503bafa 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -71,7 +71,7 @@ impl Derefs { where E: Pairing, { - let (comm_ops_val, _blinds) = self.comb.commit(gens); + let (comm_ops_val, _blinds) = self.comb.commit(gens, false); DerefsCommitment { comm_ops_val } } } @@ -82,7 +82,6 @@ pub struct DerefsEvalProof { } impl DerefsEvalProof { - fn prove_single( joint_poly: &DensePolynomial, r: &[E::ScalarField], @@ -483,8 +482,8 @@ impl SparseMatPolynomial { let batch_size = sparse_polys.len(); let dense = SparseMatPolynomial::multi_sparse_to_dense_rep(sparse_polys); - let (comm_comb_ops, _blinds_comb_ops) = dense.comb_ops.commit(&gens.gens_ops); - let (comm_comb_mem, _blinds_comb_mem) = dense.comb_mem.commit(&gens.gens_mem); + let (comm_comb_ops, _blinds_comb_ops) = dense.comb_ops.commit(&gens.gens_ops, false); + let (comm_comb_mem, _blinds_comb_mem) = dense.comb_mem.commit(&gens.gens_mem, false); ( SparseMatPolyCommitment { @@ -683,7 +682,6 @@ struct HashLayerProof { } impl HashLayerProof { - fn prove_helper( rand: (&Vec, &Vec), addr_timestamps: &AddrTimestamps, @@ -1030,7 +1028,6 @@ struct ProductLayerProof { } impl ProductLayerProof { - pub fn prove( row_prod_layer: &mut ProductLayer, col_prod_layer: &mut ProductLayer, @@ -1322,7 +1319,6 @@ struct PolyEvalNetworkProof { } impl PolyEvalNetworkProof { - pub fn prove( network: &mut PolyEvalNetwork, dense: &MultiSparseMatPolynomialAsDense, @@ -1423,7 +1419,6 @@ pub struct SparseMatPolyEvalProof { } impl SparseMatPolyEvalProof { - fn equalize( rx: &[E::ScalarField], ry: &[E::ScalarField], From f94a450deced650d5d554bc693da66bd01b23768 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 9 Feb 2023 00:05:58 +0100 Subject: [PATCH 30/64] using absorb when we can --- src/dense_mlpoly.rs | 11 ++-- src/lib.rs | 33 ++++++----- src/nizk/bullet.rs | 8 +-- src/nizk/mod.rs | 27 +++++---- src/poseidon_transcript.rs | 75 ++++++++++++++++++++++++- src/product_tree.rs | 37 +++++++------ src/r1csinstance.rs | 21 ++++--- src/r1csproof.rs | 30 ++++------ src/sparse_mlpoly.rs | 109 +++++++++++++++++++++---------------- src/sumcheck.rs | 8 +-- src/transcript.rs | 4 +- src/unipoly.rs | 14 +++-- 12 files changed, 238 insertions(+), 139 deletions(-) diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 67e7b551..63e1f4ae 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -4,8 +4,8 @@ use super::commitments::{MultiCommitGens, PedersenCommit}; use super::errors::ProofVerifyError; use super::math::Math; use super::nizk::{DotProductProofGens, DotProductProofLog}; -use crate::poseidon_transcript::PoseidonTranscript; -use crate::transcript::{Transcript, TranscriptWriter}; +use crate::poseidon_transcript::{PoseidonTranscript, TranscriptWriter}; +use ark_crypto_primitives::sponge::Absorb; use ark_ec::scalar_mul::variable_base::VariableBaseMSM; use ark_ec::{pairing::Pairing, CurveGroup}; use ark_ff::{PrimeField, Zero}; @@ -462,10 +462,10 @@ impl Index for DensePolynomial { } } -impl TranscriptWriter for PolyCommitment { - fn write_to_transcript(&self, transcript: &mut impl Transcript) { +impl TranscriptWriter for PolyCommitment { + fn write_to_transcript(&self, transcript: &mut PoseidonTranscript) { for i in 0..self.C.len() { - transcript.append(b"", &self.C[i]); + transcript.append_point(b"", &self.C[i]); } } } @@ -478,6 +478,7 @@ pub struct PolyEvalProof { impl PolyEvalProof where E: Pairing, + E::ScalarField: Absorb, { pub fn prove( poly: &DensePolynomial, diff --git a/src/lib.rs b/src/lib.rs index 920904d6..e25a2ad5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -42,6 +42,7 @@ mod constraints; pub mod poseidon_transcript; use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; +use ark_crypto_primitives::sponge::Absorb; use ark_serialize::*; use core::cmp::max; use errors::{ProofVerifyError, R1CSError}; @@ -334,8 +335,11 @@ pub struct SNARK { ry: Vec, } -impl SNARK { - +impl SNARK +where + E: Pairing, + E::ScalarField: Absorb, +{ /// A public computation to create a commitment to an R1CS instance pub fn encode( inst: &Instance, @@ -411,9 +415,9 @@ impl SNARK { let timer_eval = Timer::new("eval_sparse_polys"); let inst_evals = { let (Ar, Br, Cr) = inst.inst.evaluate(&rx, &ry); - transcript.append(b"", &Ar); - transcript.append(b"", &Br); - transcript.append(b"", &Cr); + transcript.append_scalar(b"", &Ar); + transcript.append_scalar(b"", &Br); + transcript.append_scalar(b"", &Cr); (Ar, Br, Cr) }; timer_eval.stop(); @@ -483,9 +487,9 @@ impl SNARK { // transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); let (Ar, Br, Cr) = &self.inst_evals; - transcript.append(b"", Ar); - transcript.append(b"", Br); - transcript.append(b"", Cr); + transcript.append_scalar(b"", Ar); + transcript.append_scalar(b"", Br); + transcript.append_scalar(b"", Cr); self.r1cs_eval_proof.verify( &comm.comm, @@ -530,8 +534,11 @@ pub struct NIZK { r: (Vec, Vec), } -impl NIZK { - +impl NIZK +where + E: Pairing, + E::ScalarField: Absorb, +{ /// A method to produce a NIZK proof of the satisfiability of an R1CS instance pub fn prove( inst: &Instance, @@ -542,7 +549,7 @@ impl NIZK { ) -> Self { let timer_prove = Timer::new("NIZK::prove"); // transcript.append_protocol_name(NIZK::protocol_name()); - transcript.append(b"", &inst.digest); + transcript.append_bytes(b"", &inst.digest); let (r1cs_sat_proof, rx, ry) = { // we might need to pad variables @@ -589,7 +596,7 @@ impl NIZK { ) -> Result { let timer_verify = Timer::new("NIZK::verify"); - transcript.append(b"", &inst.digest); + transcript.append_bytes(b"", &inst.digest); // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check @@ -632,7 +639,7 @@ impl NIZK { let timer_verify = Timer::new("NIZK::verify"); // transcript.append_protocol_name(NIZK::protocol_name()); - transcript.append(b"", &inst.digest); + transcript.append_bytes(b"", &inst.digest); // We send evaluations of A, B, C at r = (rx, ry) as claims // to enable the verifier complete the first sum-check diff --git a/src/nizk/bullet.rs b/src/nizk/bullet.rs index ab1d8ea0..5d84b679 100644 --- a/src/nizk/bullet.rs +++ b/src/nizk/bullet.rs @@ -117,8 +117,8 @@ impl BulletReductionProof { .as_slice(), ); - transcript.append(b"", &L); - transcript.append(b"", &R); + transcript.append_point(b"", &L); + transcript.append_point(b"", &R); let u: G::ScalarField = transcript.challenge_scalar(b""); let u_inv = u.inverse().unwrap(); @@ -179,8 +179,8 @@ impl BulletReductionProof { // 1. Recompute x_k,...,x_1 based on the proof transcript let mut challenges = Vec::with_capacity(lg_n); for (L, R) in self.L_vec.iter().zip(self.R_vec.iter()) { - transcript.append(b"", L); - transcript.append(b"", R); + transcript.append_point(b"", L); + transcript.append_point(b"", R); challenges.push(transcript.challenge_scalar(b"")); } diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index 9858b5c6..7f416fa7 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -5,6 +5,7 @@ use crate::ark_std::UniformRand; use crate::math::Math; use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::Transcript; +use ark_crypto_primitives::sponge::Absorb; use ark_ec::CurveGroup; use ark_serialize::*; @@ -36,8 +37,10 @@ pub struct DotProductProofLog { z2: G::ScalarField, } -impl DotProductProofLog { - +impl DotProductProofLog +where +G: CurveGroup, +G::ScalarField: Absorb { pub fn prove( gens: &DotProductProofGens, transcript: &mut PoseidonTranscript, @@ -69,11 +72,11 @@ impl DotProductProofLog { }; let Cx = PedersenCommit::commit_slice(x_vec, blind_x, &gens.gens_n); - transcript.append(b"", &Cx); + transcript.append_point(b"", &Cx); let Cy = PedersenCommit::commit_scalar(y, blind_y, &gens.gens_1); - transcript.append(b"", &Cy); - transcript.append(b"", &a_vec); + transcript.append_point(b"", &Cy); + transcript.append_scalar_vector(b"", &a_vec); let blind_Gamma = (*blind_x) + blind_y; let (bullet_reduction_proof, _Gamma_hat, x_hat, a_hat, g_hat, rhat_Gamma) = @@ -97,10 +100,10 @@ impl DotProductProofLog { }; PedersenCommit::commit_scalar(&d, &r_delta, &gens_hat) }; - transcript.append(b"", &delta); + transcript.append_point(b"", &delta); let beta = PedersenCommit::commit_scalar(&d, &r_beta, &gens.gens_1); - transcript.append(b"", &beta); + transcript.append_point(b"", &beta); let c: G::ScalarField = transcript.challenge_scalar(b""); @@ -137,9 +140,9 @@ impl DotProductProofLog { // Cy.write_to_transcript( transcript); // a.write_to_transcript( transcript); - transcript.append(b"", Cx); - transcript.append(b"", Cy); - transcript.append(b"", &a); + transcript.append_point(b"", Cx); + transcript.append_point(b"", Cy); + transcript.append_scalar_vector(b"", &a); let Gamma = Cx.add(Cy); @@ -150,8 +153,8 @@ impl DotProductProofLog { // self.delta.write_to_transcript( transcript); // self.beta.write_to_transcript( transcript); - transcript.append(b"", &self.delta); - transcript.append(b"", &self.beta); + transcript.append_point(b"", &self.delta); + transcript.append_point(b"", &self.beta); let c = transcript.challenge_scalar(b""); diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index ba7e0fd1..15cc1f26 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -1,8 +1,9 @@ use crate::transcript::Transcript; use ark_crypto_primitives::sponge::{ poseidon::{PoseidonConfig, PoseidonSponge}, - CryptographicSponge, + Absorb, CryptographicSponge, }; +use ark_ec::{pairing::Pairing, CurveGroup}; use ark_ff::PrimeField; use ark_serialize::CanonicalSerialize; use ark_serialize::Compress; @@ -43,6 +44,76 @@ impl PoseidonTranscript { pub fn new_from_state(&mut self, challenge: &F) { self.sponge = PoseidonSponge::new(&self.params); - self.append(b"",challenge); + self.append(b"", challenge); } + pub fn append_u64(&mut self, _label: &'static [u8], x: u64) { + self.sponge.absorb(&x); + } + + pub fn append_bytes(&mut self, _label: &'static [u8], x: &Vec) { + self.sponge.absorb(x); + } + + pub fn append_scalar(&mut self, _label: &'static [u8], scalar: &T) { + self.sponge.absorb(&scalar); + } + + pub fn append_point(&mut self, _label: &'static [u8], point: &G) + where + G: CurveGroup, + { + let mut point_encoding = Vec::new(); + point + .serialize_with_mode(&mut point_encoding, Compress::Yes) + .unwrap(); + self.sponge.absorb(&point_encoding); + } + + pub fn append_scalar_vector( + &mut self, + _label: &'static [u8], + scalars: &[T], + ) { + for scalar in scalars.iter() { + self.append_scalar(b"", scalar); + } + } + + pub fn append_gt(&mut self, _label: &'static [u8], g_t: &E::TargetField) + where + E: Pairing, + { + let mut bytes = Vec::new(); + g_t.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); + self.append_bytes(b"", &bytes); + } +} + +pub trait TranscriptWriter { + fn write_to_transcript(&self, transcript: &mut PoseidonTranscript); } +//pub trait AppendToPoseidon { +// fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript); +//} +// +//impl AppendToPoseidon for CompressedGroup { +// fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { +// transcript.append_point(self); +// } +//} +// +//impl AppendToPoseidon for Commitment { +// fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { +// let mut bytes = Vec::new(); +// self.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); +// transcript.append_bytes(&bytes); +// } +//} +// +//impl AppendToPoseidon for G1Affine { +// fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { +// let mut bytes = Vec::new(); +// self.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); +// transcript.append_bytes(&bytes); +//} +// diff --git a/src/product_tree.rs b/src/product_tree.rs index 817c487d..32af26f0 100644 --- a/src/product_tree.rs +++ b/src/product_tree.rs @@ -5,6 +5,7 @@ use super::math::Math; use super::sumcheck::SumcheckInstanceProof; use crate::poseidon_transcript::PoseidonTranscript; use crate::transcript::Transcript; +use ark_crypto_primitives::sponge::Absorb; use ark_ff::PrimeField; use ark_serialize::*; @@ -118,7 +119,7 @@ pub struct LayerProof { } #[allow(dead_code)] -impl LayerProof { +impl LayerProof { pub fn verify( &self, claim: F, @@ -142,7 +143,7 @@ pub struct LayerProofBatched { } #[allow(dead_code)] -impl LayerProofBatched { +impl LayerProofBatched { pub fn verify( &self, claim: F, @@ -168,7 +169,7 @@ pub struct ProductCircuitEvalProofBatched { claims_dotp: (Vec, Vec, Vec), } -impl ProductCircuitEvalProof { +impl ProductCircuitEvalProof { #![allow(dead_code)] pub fn prove( circuit: &mut ProductCircuit, @@ -199,8 +200,8 @@ impl ProductCircuitEvalProof { transcript, ); - transcript.append(b"", &claims_prod[0]); - transcript.append(b"", &claims_prod[1]); + transcript.append_scalar(b"", &claims_prod[0]); + transcript.append_scalar(b"", &claims_prod[1]); // produce a random challenge let r_layer = transcript.challenge_scalar(b""); @@ -229,8 +230,8 @@ impl ProductCircuitEvalProof { let (claim_last, rand_prod) = self.proof[i].verify(claim, num_rounds, 3, transcript); let claims_prod = &self.proof[i].claims; - transcript.append(b"", &claims_prod[0]); - transcript.append(b"", &claims_prod[1]); + transcript.append_scalar(b"", &claims_prod[0]); + transcript.append_scalar(b"", &claims_prod[1]); assert_eq!(rand.len(), rand_prod.len()); let eq: F = (0..rand.len()) @@ -250,7 +251,7 @@ impl ProductCircuitEvalProof { } } -impl ProductCircuitEvalProofBatched { +impl ProductCircuitEvalProofBatched { pub fn prove( prod_circuit_vec: &mut Vec<&mut ProductCircuit>, dotp_circuit_vec: &mut Vec<&mut DotProductCircuit>, @@ -334,16 +335,16 @@ impl ProductCircuitEvalProofBatched { let (claims_prod_left, claims_prod_right, _claims_eq) = claims_prod; for i in 0..prod_circuit_vec.len() { - transcript.append(b"", &claims_prod_left[i]); - transcript.append(b"", &claims_prod_right[i]); + transcript.append_scalar(b"", &claims_prod_left[i]); + transcript.append_scalar(b"", &claims_prod_right[i]); } if layer_id == 0 && !dotp_circuit_vec.is_empty() { let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = claims_dotp; for i in 0..dotp_circuit_vec.len() { - transcript.append(b"", &claims_dotp_left[i]); - transcript.append(b"", &claims_dotp_right[i]); - transcript.append(b"", &claims_dotp_weight[i]); + transcript.append_scalar(b"", &claims_dotp_left[i]); + transcript.append_scalar(b"", &claims_dotp_right[i]); + transcript.append_scalar(b"", &claims_dotp_weight[i]); } claims_dotp_final = (claims_dotp_left, claims_dotp_right, claims_dotp_weight); } @@ -410,8 +411,8 @@ impl ProductCircuitEvalProofBatched { assert_eq!(claims_prod_right.len(), claims_prod_vec.len()); for i in 0..claims_prod_vec.len() { - transcript.append(b"", &claims_prod_left[i]); - transcript.append(b"", &claims_prod_right[i]); + transcript.append_scalar(b"", &claims_prod_left[i]); + transcript.append_scalar(b"", &claims_prod_right[i]); } assert_eq!(rand.len(), rand_prod.len()); @@ -427,9 +428,9 @@ impl ProductCircuitEvalProofBatched { let num_prod_instances = claims_prod_vec.len(); let (claims_dotp_left, claims_dotp_right, claims_dotp_weight) = &self.claims_dotp; for i in 0..claims_dotp_left.len() { - transcript.append(b"", &claims_dotp_left[i]); - transcript.append(b"", &claims_dotp_right[i]); - transcript.append(b"", &claims_dotp_weight[i]); + transcript.append_scalar(b"", &claims_dotp_left[i]); + transcript.append_scalar(b"", &claims_dotp_right[i]); + transcript.append_scalar(b"", &claims_dotp_weight[i]); claim_expected += coeff_vec[i + num_prod_instances] * claims_dotp_left[i] diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index 93157f58..c83bc5d9 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -6,8 +6,9 @@ use super::sparse_mlpoly::{ SparseMatPolyCommitmentGens, SparseMatPolyEvalProof, SparseMatPolynomial, }; use super::timer::Timer; -use crate::poseidon_transcript::PoseidonTranscript; -use crate::transcript::{Transcript, TranscriptWriter}; +use crate::poseidon_transcript::{PoseidonTranscript, TranscriptWriter}; +use crate::transcript::Transcript; +use ark_crypto_primitives::sponge::Absorb; use ark_ec::pairing::Pairing; use ark_ec::CurveGroup; use ark_ff::PrimeField; @@ -54,11 +55,11 @@ pub struct R1CSCommitment { comm: SparseMatPolyCommitment, } -impl TranscriptWriter for R1CSCommitment { - fn write_to_transcript(&self, transcript: &mut impl Transcript) { - transcript.append(b"", &(self.num_cons as u64)); - transcript.append(b"", &(self.num_vars as u64)); - transcript.append(b"", &(self.num_inputs as u64)); +impl TranscriptWriter for R1CSCommitment { + fn write_to_transcript(&self, transcript: &mut PoseidonTranscript) { + transcript.append_u64(b"", self.num_cons as u64); + transcript.append_u64(b"", self.num_vars as u64); + transcript.append_u64(b"", self.num_inputs as u64); self.comm.write_to_transcript(transcript); } } @@ -327,7 +328,11 @@ pub struct R1CSEvalProof { proof: SparseMatPolyEvalProof, } -impl R1CSEvalProof { +impl R1CSEvalProof +where + E: Pairing, + E::ScalarField: Absorb, +{ pub fn prove( decomm: &R1CSDecommitment, rx: &[E::ScalarField], // point at which the polynomial is evaluated diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 48eac2f3..fc747f52 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -7,8 +7,9 @@ use crate::mipp::MippProof; use crate::poseidon_transcript::PoseidonTranscript; use crate::sqrt_pst::Polynomial; use crate::sumcheck::SumcheckInstanceProof; -use crate::transcript::{Transcript}; +use crate::transcript::Transcript; use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; +use ark_crypto_primitives::sponge::Absorb; use ark_ec::pairing::Pairing; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; @@ -61,7 +62,11 @@ impl R1CSGens { } } -impl R1CSProof { +impl R1CSProof +where + E: Pairing, + E::ScalarField: Absorb, +{ fn prove_phase_one( num_rounds: usize, evals_tau: &mut DensePolynomial, @@ -116,7 +121,6 @@ impl R1CSProof { (sc_proof_phase_two, r, claims) } - pub fn prove( inst: &R1CSInstance, vars: Vec, @@ -137,9 +141,7 @@ impl R1CSProof { // commitment list to the satisfying witness polynomial list let (comm_list, t) = pl.commit(&gens.gens_pc.ck); - let mut bytes = Vec::new(); - t.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); - transcript.append(b"", &bytes); + transcript.append_gt::(b"", &t); // comm.write_to_transcript(transcript); timer_commit.stop(); @@ -147,7 +149,7 @@ impl R1CSProof { let c = transcript.challenge_scalar(b""); transcript.new_from_state(&c); - transcript.append(b"", &input); + transcript.append_scalar_vector(b"", &input); let timer_sc_proof_phase1 = Timer::new("prove_sc_phase_one"); @@ -272,12 +274,7 @@ impl R1CSProof { poseidon: PoseidonConfig, ) -> Result<(u128, u128, u128), ProofVerifyError> { // serialise and add the IPP commitment to the transcript - let mut bytes = Vec::new(); - self - .t - .serialize_with_mode(&mut bytes, Compress::Yes) - .unwrap(); - transcript.append(b"", &bytes); + transcript.append_gt::(b"", &self.t); let c = transcript.challenge_scalar(b""); @@ -373,12 +370,7 @@ impl R1CSProof { poseidon: PoseidonConfig, ) -> Result { // serialise and add the IPP commitment to the transcript - let mut bytes = Vec::new(); - self - .t - .serialize_with_mode(&mut bytes, Compress::Yes) - .unwrap(); - transcript.append(b"", &bytes); + transcript.append_gt::(b"", &self.t); let c: E::ScalarField = transcript.challenge_scalar(b""); diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index 8503bafa..1a06a752 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -9,8 +9,9 @@ use super::errors::ProofVerifyError; use super::math::Math; use super::product_tree::{DotProductCircuit, ProductCircuit, ProductCircuitEvalProofBatched}; use super::timer::Timer; -use crate::poseidon_transcript::PoseidonTranscript; -use crate::transcript::{Transcript, TranscriptWriter}; +use crate::poseidon_transcript::{PoseidonTranscript, TranscriptWriter}; +use crate::transcript::Transcript; +use ark_crypto_primitives::sponge::Absorb; use ark_ec::pairing::Pairing; use ark_ec::CurveGroup; use ark_ff::PrimeField; @@ -81,7 +82,11 @@ pub struct DerefsEvalProof { proof_derefs: PolyEvalProof, } -impl DerefsEvalProof { +impl DerefsEvalProof +where + E: Pairing, + E::ScalarField: Absorb, +{ fn prove_single( joint_poly: &DensePolynomial, r: &[E::ScalarField], @@ -93,7 +98,7 @@ impl DerefsEvalProof { // append the claimed evaluations to transcript // evals.append_to_transcript(b"evals_ops_val", transcript); - transcript.append(b"", &evals); + transcript.append_scalar_vector(b"", &evals); // n-to-1 reduction let (r_joint, eval_joint) = { @@ -111,7 +116,7 @@ impl DerefsEvalProof { (r_joint, joint_claim_eval) }; // decommit the joint polynomial at r_joint - transcript.append(b"", &eval_joint); + transcript.append_scalar(b"", &eval_joint); let (proof_derefs, _comm_derefs_eval) = PolyEvalProof::prove( joint_poly, None, @@ -157,7 +162,7 @@ impl DerefsEvalProof { ) -> Result<(), ProofVerifyError> { // append the claimed evaluations to transcript // evals.append_to_transcript(b"evals_ops_val", transcript); - transcript.append(b"", &evals); + transcript.append_scalar_vector(b"", &evals); // n-to-1 reduction let challenges = transcript.challenge_scalar_vec(b"", evals.len().log_2()); @@ -172,7 +177,7 @@ impl DerefsEvalProof { // decommit the joint polynomial at r_joint // joint_claim_eval.append_to_transcript(b"joint_claim_eval", transcript); - transcript.append(b"", &joint_claim_eval); + transcript.append_scalar(b"", &joint_claim_eval); proof.verify_plain(gens, transcript, &r_joint, &joint_claim_eval, comm) } @@ -203,8 +208,8 @@ impl DerefsEvalProof { } } -impl TranscriptWriter for DerefsCommitment { - fn write_to_transcript(&self, transcript: &mut impl Transcript) { +impl TranscriptWriter for DerefsCommitment { + fn write_to_transcript(&self, transcript: &mut PoseidonTranscript) { self.comm_ops_val.write_to_transcript(transcript); } } @@ -322,11 +327,11 @@ pub struct SparseMatPolyCommitment { comm_comb_mem: PolyCommitment, } -impl TranscriptWriter for SparseMatPolyCommitment { - fn write_to_transcript(&self, transcript: &mut impl Transcript) { - transcript.append(b"", &(self.batch_size as u64)); - transcript.append(b"", &(self.num_ops as u64)); - transcript.append(b"", &(self.num_mem_cells as u64)); +impl TranscriptWriter for SparseMatPolyCommitment { + fn write_to_transcript(&self, transcript: &mut PoseidonTranscript) { + transcript.append_u64(b"", self.batch_size as u64); + transcript.append_u64(b"", self.num_ops as u64); + transcript.append_u64(b"", self.num_mem_cells as u64); self.comm_comb_ops.write_to_transcript(transcript); self.comm_comb_mem.write_to_transcript(transcript); } @@ -681,7 +686,11 @@ struct HashLayerProof { proof_derefs: DerefsEvalProof, } -impl HashLayerProof { +impl HashLayerProof +where + E: Pairing, + E::ScalarField: Absorb, +{ fn prove_helper( rand: (&Vec, &Vec), addr_timestamps: &AddrTimestamps, @@ -754,7 +763,7 @@ impl HashLayerProof { evals_ops.extend(&eval_col_read_ts_vec); evals_ops.extend(&eval_val_vec); evals_ops.resize(evals_ops.len().next_power_of_two(), E::ScalarField::zero()); - transcript.append(b"", &evals_ops); + transcript.append_scalar_vector(b"", &evals_ops); let challenges_ops = transcript.challenge_scalar_vec(b"", evals_ops.len().log_2()); let mut poly_evals_ops = DensePolynomial::new(evals_ops); @@ -766,7 +775,7 @@ impl HashLayerProof { let mut r_joint_ops = challenges_ops; r_joint_ops.extend(rand_ops); debug_assert_eq!(dense.comb_ops.evaluate(&r_joint_ops), joint_claim_eval_ops); - transcript.append(b"", &joint_claim_eval_ops); + transcript.append_scalar(b"", &joint_claim_eval_ops); let (proof_ops, _comm_ops_eval) = PolyEvalProof::prove( &dense.comb_ops, None, @@ -780,7 +789,7 @@ impl HashLayerProof { // form a single decommitment using comb_comb_mem at rand_mem let evals_mem: Vec = vec![eval_row_audit_ts, eval_col_audit_ts]; // evals_mem.append_to_transcript(b"claim_evals_mem", transcript); - transcript.append(b"", &evals_mem); + transcript.append_scalar_vector(b"", &evals_mem); let challenges_mem = transcript.challenge_scalar_vec(b"", evals_mem.len().log_2()); let mut poly_evals_mem = DensePolynomial::new(evals_mem); @@ -792,7 +801,7 @@ impl HashLayerProof { let mut r_joint_mem = challenges_mem; r_joint_mem.extend(rand_mem); debug_assert_eq!(dense.comb_mem.evaluate(&r_joint_mem), joint_claim_eval_mem); - transcript.append(b"", &joint_claim_eval_mem); + transcript.append_scalar(b"", &joint_claim_eval_mem); let (proof_mem, _comm_mem_eval) = PolyEvalProof::prove( &dense.comb_mem, None, @@ -937,7 +946,7 @@ impl HashLayerProof { evals_ops.extend(eval_col_read_ts_vec); evals_ops.extend(eval_val_vec); evals_ops.resize(evals_ops.len().next_power_of_two(), E::ScalarField::zero()); - transcript.append(b"", &evals_ops); + transcript.append_scalar_vector(b"", &evals_ops); // evals_ops.append_to_transcript(b"claim_evals_ops", transcript); let challenges_ops = transcript.challenge_scalar_vec(b"", evals_ops.len().log_2()); @@ -949,7 +958,7 @@ impl HashLayerProof { let joint_claim_eval_ops = poly_evals_ops[0]; let mut r_joint_ops = challenges_ops; r_joint_ops.extend(rand_ops); - transcript.append(b"", &joint_claim_eval_ops); + transcript.append_scalar(b"", &joint_claim_eval_ops); assert!(self .proof_ops .verify_plain( @@ -965,7 +974,7 @@ impl HashLayerProof { // form a single decommitment using comb_comb_mem at rand_mem let evals_mem: Vec = vec![*eval_row_audit_ts, *eval_col_audit_ts]; // evals_mem.append_to_transcript(b"claim_evals_mem", transcript); - transcript.append(b"", &evals_mem); + transcript.append_scalar_vector(b"", &evals_mem); let challenges_mem = transcript.challenge_scalar_vec(b"", evals_mem.len().log_2()); let mut poly_evals_mem = DensePolynomial::new(evals_mem); @@ -977,7 +986,7 @@ impl HashLayerProof { let mut r_joint_mem = challenges_mem; r_joint_mem.extend(rand_mem); // joint_claim_eval_mem.append_to_transcript(b"joint_claim_eval_mem", transcript); - transcript.append(b"", &joint_claim_eval_mem); + transcript.append_scalar(b"", &joint_claim_eval_mem); self.proof_mem.verify_plain( &gens.gens_mem, transcript, @@ -1027,7 +1036,7 @@ struct ProductLayerProof { proof_ops: ProductCircuitEvalProofBatched, } -impl ProductLayerProof { +impl ProductLayerProof { pub fn prove( row_prod_layer: &mut ProductLayer, col_prod_layer: &mut ProductLayer, @@ -1054,10 +1063,10 @@ impl ProductLayerProof { let rs: F = (0..row_eval_read.len()).map(|i| row_eval_read[i]).product(); assert_eq!(row_eval_init * ws, rs * row_eval_audit); - transcript.append(b"", &row_eval_init); - transcript.append(b"", &row_eval_read); - transcript.append(b"", &row_eval_write); - transcript.append(b"", &row_eval_audit); + transcript.append_scalar(b"", &row_eval_init); + transcript.append_scalar_vector(b"", &row_eval_read); + transcript.append_scalar_vector(b"", &row_eval_write); + transcript.append_scalar(b"", &row_eval_audit); let col_eval_init = col_prod_layer.init.evaluate(); let col_eval_audit = col_prod_layer.audit.evaluate(); @@ -1075,10 +1084,10 @@ impl ProductLayerProof { let rs: F = (0..col_eval_read.len()).map(|i| col_eval_read[i]).product(); assert_eq!(col_eval_init * ws, rs * col_eval_audit); - transcript.append(b"", &col_eval_init); - transcript.append(b"", &col_eval_read); - transcript.append(b"", &col_eval_write); - transcript.append(b"", &col_eval_audit); + transcript.append_scalar(b"", &col_eval_init); + transcript.append_scalar_vector(b"", &col_eval_read); + transcript.append_scalar_vector(b"", &col_eval_write); + transcript.append_scalar(b"", &col_eval_audit); // prepare dotproduct circuit for batching then with ops-related product circuits assert_eq!(eval.len(), derefs.row_ops_val.len()); @@ -1103,8 +1112,8 @@ impl ProductLayerProof { // eval_dotp_left.append_to_transcript(b"claim_eval_dotp_left", transcript); // eval_dotp_right.append_to_transcript(b"claim_eval_dotp_right", transcript); - transcript.append(b"", &eval_dotp_left); - transcript.append(b"", &eval_dotp_right); + transcript.append_scalar(b"", &eval_dotp_left); + transcript.append_scalar(b"", &eval_dotp_right); assert_eq!(eval_dotp_left + eval_dotp_right, eval[i]); eval_dotp_left_vec.push(eval_dotp_left); eval_dotp_right_vec.push(eval_dotp_right); @@ -1240,10 +1249,10 @@ impl ProductLayerProof { // row_eval_write.append_to_transcript(b"claim_row_eval_write", transcript); // row_eval_audit.append_to_transcript(b"claim_row_eval_audit", transcript); - transcript.append(b"", row_eval_init); - transcript.append(b"", row_eval_read); - transcript.append(b"", row_eval_write); - transcript.append(b"", row_eval_audit); + transcript.append_scalar(b"", row_eval_init); + transcript.append_scalar_vector(b"", row_eval_read); + transcript.append_scalar_vector(b"", row_eval_write); + transcript.append_scalar(b"", row_eval_audit); // subset check let (col_eval_init, col_eval_read, col_eval_write, col_eval_audit) = &self.eval_col; @@ -1260,10 +1269,10 @@ impl ProductLayerProof { // col_eval_write.append_to_transcript(b"claim_col_eval_write", transcript); // col_eval_audit.append_to_transcript(b"claim_col_eval_audit", transcript); - transcript.append(b"", col_eval_init); - transcript.append(b"", col_eval_read); - transcript.append(b"", col_eval_write); - transcript.append(b"", col_eval_audit); + transcript.append_scalar(b"", col_eval_init); + transcript.append_scalar_vector(b"", col_eval_read); + transcript.append_scalar_vector(b"", col_eval_write); + transcript.append_scalar(b"", col_eval_audit); // verify the evaluation of the sparse polynomial let (eval_dotp_left, eval_dotp_right) = &self.eval_val; @@ -1274,8 +1283,8 @@ impl ProductLayerProof { assert_eq!(eval_dotp_left[i] + eval_dotp_right[i], eval[i]); // eval_dotp_left[i].append_to_transcript(b"claim_eval_dotp_left", transcript); // eval_dotp_right[i].append_to_transcript(b"claim_eval_dotp_right", transcript) - transcript.append(b"", &eval_dotp_left[i]); - transcript.append(b"", &eval_dotp_right[i]); + transcript.append_scalar(b"", &eval_dotp_left[i]); + transcript.append_scalar(b"", &eval_dotp_right[i]); claims_dotp_circuit.push(eval_dotp_left[i]); claims_dotp_circuit.push(eval_dotp_right[i]); @@ -1318,7 +1327,11 @@ struct PolyEvalNetworkProof { proof_hash_layer: HashLayerProof, } -impl PolyEvalNetworkProof { +impl PolyEvalNetworkProof +where + E: Pairing, + E::ScalarField: Absorb, +{ pub fn prove( network: &mut PolyEvalNetwork, dense: &MultiSparseMatPolynomialAsDense, @@ -1418,7 +1431,11 @@ pub struct SparseMatPolyEvalProof { poly_eval_network_proof: PolyEvalNetworkProof, } -impl SparseMatPolyEvalProof { +impl SparseMatPolyEvalProof +where +E: Pairing, +E::ScalarField: Absorb +{ fn equalize( rx: &[E::ScalarField], ry: &[E::ScalarField], diff --git a/src/sumcheck.rs b/src/sumcheck.rs index a6c82e3e..8c728e09 100644 --- a/src/sumcheck.rs +++ b/src/sumcheck.rs @@ -2,11 +2,11 @@ #![allow(clippy::type_complexity)] use super::dense_mlpoly::DensePolynomial; use super::errors::ProofVerifyError; -use crate::poseidon_transcript::PoseidonTranscript; +use crate::poseidon_transcript::{PoseidonTranscript, TranscriptWriter}; use crate::transcript::Transcript; -use crate::transcript::TranscriptWriter; use super::unipoly::UniPoly; +use ark_crypto_primitives::sponge::Absorb; use ark_ff::PrimeField; use ark_serialize::*; @@ -18,7 +18,7 @@ pub struct SumcheckInstanceProof { pub polys: Vec>, } -impl SumcheckInstanceProof { +impl SumcheckInstanceProof { pub fn new(polys: Vec>) -> Self { SumcheckInstanceProof { polys } } @@ -60,7 +60,7 @@ impl SumcheckInstanceProof { } } -impl SumcheckInstanceProof { +impl SumcheckInstanceProof { pub fn prove_cubic_with_additive_term( claim: &F, num_rounds: usize, diff --git a/src/transcript.rs b/src/transcript.rs index cf61b8f8..d0f119ac 100644 --- a/src/transcript.rs +++ b/src/transcript.rs @@ -13,6 +13,4 @@ pub trait Transcript { } } -pub trait TranscriptWriter { - fn write_to_transcript(&self, transcript: &mut impl Transcript); -} +pub use crate::poseidon_transcript::TranscriptWriter; \ No newline at end of file diff --git a/src/unipoly.rs b/src/unipoly.rs index 864db386..40cfac4c 100644 --- a/src/unipoly.rs +++ b/src/unipoly.rs @@ -1,4 +1,9 @@ -use crate::transcript::{Transcript, TranscriptWriter}; +use crate::{ + constraints::PoseidonTranscripVar, + poseidon_transcript::{PoseidonTranscript, TranscriptWriter}, + transcript::Transcript, +}; +use ark_crypto_primitives::sponge::{poseidon::PoseidonConfig, Absorb}; use ark_ff::{Field, PrimeField}; use ark_serialize::*; // ax^2 + bx + c stored as vec![c,b,a] @@ -80,7 +85,6 @@ impl UniPoly { } } - impl CompressedUniPoly { // we require eval(0) + eval(1) = hint, so we can solve for the linear term as: // linear_term = hint - 2 * constant_term - deg2 term - deg3 term @@ -98,11 +102,11 @@ impl CompressedUniPoly { } } -impl TranscriptWriter for UniPoly { - fn write_to_transcript(&self, transcript: &mut impl Transcript) { +impl TranscriptWriter for UniPoly { + fn write_to_transcript(&self, transcript: &mut PoseidonTranscript) { // transcript.append_message(label, b"UniPoly_begin"); for i in 0..self.coeffs.len() { - transcript.append(b"", &self.coeffs[i]); + transcript.append_scalar(b"", &self.coeffs[i]); } // transcript.append_message(label, b"UniPoly_end"); } From f2c2ae95d0b05d286e84fcb4e14672bdae835610 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 9 Feb 2023 00:16:10 +0100 Subject: [PATCH 31/64] absorbing scalar --- src/poseidon_transcript.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 15cc1f26..b449fcc8 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -41,11 +41,16 @@ impl PoseidonTranscript { params: params.clone(), } } +} +impl PoseidonTranscript { pub fn new_from_state(&mut self, challenge: &F) { self.sponge = PoseidonSponge::new(&self.params); - self.append(b"", challenge); + self.append_scalar(b"", challenge); } +} + +impl PoseidonTranscript { pub fn append_u64(&mut self, _label: &'static [u8], x: u64) { self.sponge.absorb(&x); } From a4c0793a5d40a3fca13b4d33bc97bbf13335b508 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 9 Feb 2023 10:39:04 +0100 Subject: [PATCH 32/64] with bls12-381 --- Cargo.toml | 3 + src/parameters.rs | 2248 +++++++++++++++++++++++++++++++++++- src/poseidon_transcript.rs | 44 +- src/r1csproof.rs | 24 +- 4 files changed, 2288 insertions(+), 31 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index afee32e8..4ab34322 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,6 +22,7 @@ ark-ff = { version = "^0.4.0", default-features = false } ark-ec = { version = "^0.4.0", default-features = false } ark-std = { version = "^0.4.0"} ark-bls12-377 = { version = "^0.4.0", features = ["r1cs","curve"] } +ark-bls12-381 = { version = "^0.4.0", features = ["curve"] } ark-serialize = { version = "^0.4.0", features = ["derive"] } ark-crypto-primitives = {version = "^0.4.0", features = ["sponge","r1cs","snark"] } ark-r1cs-std = { version = "^0.4.0", default-features = false } @@ -32,6 +33,8 @@ ark-bw6-761 = { version = "^0.4.0" } ark-poly-commit = { version = "^0.4.0" } ark-poly = {version = "^0.4.0"} +poseidon-paramgen = { git = "https://github.com/nikkolasg/poseidon377", branch = "feat/v0.4" } +poseidon-parameters = { git = "https://github.com/nikkolasg/poseidon377", branch = "feat/v0.4" } lazy_static = "1.4.0" rand = { version = "0.8", features = [ "std", "std_rng" ] } diff --git a/src/parameters.rs b/src/parameters.rs index b5416c41..ce094d86 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -1,6 +1,6 @@ -use std::str::FromStr; use ark_bls12_377::Fr; use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; +use std::str::FromStr; // Copyright: https://github.com/nikkolasg/ark-dkg/blob/main/src/parameters.rs use json::JsonValue; use lazy_static::lazy_static; @@ -175,6 +175,2252 @@ pub fn poseidon_params() -> PoseidonConfig { ) } +pub fn poseidon_params_bls12381() -> PoseidonConfig { + use ark_ff::PrimeField; + + use poseidon_parameters::{ + Alpha, ArcMatrix, Matrix, MatrixOperations, MdsMatrix, OptimizedArcMatrix, + OptimizedMdsMatrices, PoseidonParameters, RoundNumbers, SquareMatrix, + }; + + /// Parameters for the rate-1 instance of Poseidon. + pub fn rate_1() -> PoseidonParameters { + PoseidonParameters { + M: 128, + t: 2, + arc: ArcMatrix::new( + 64, + 2, + vec![ + F::from_str( + "35174739893055911104493616029378130908017657834702731071195911003169112450229", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40230473166484073181383530626136429631051240172158259323832118663695222064618", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "44839971797550275719608927493602345619671796219131839128512847210863471539893", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48611564661854252146943435349048894917868766542696555144699929229862960415826", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40685922923883326639435460208454299274336241847181916712959630054792010757353", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "15664326207051802832937062272185115402684860397390155009854115660846132411502", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "20394588293088308460232807257084465383971903994422965392216628021702933756783", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "38072899122370719412288433912125900148799524273752876684473712699262778897654", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "49322204581160802920728528441668898478821361914594574984125707955891751339030", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "50347305345520436535964839331484028706021264874956603553067798037836173651466", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40078972002964969619442410831168416589272403782380052533737439660325916494912", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16924481893633890556036089675091788745304335123156116162143840931183063196275", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "15950945481350597658451757244282476321967934435937826665895833437077276040754", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45461282993637819573288749652453263783235945959670912694991729940283965985733", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "28038644046234298370628400737789907143817173874662134250914598498795544431933", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "11068658063110546729562922745621277134925014371838931507832785600289614789661", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "52268179647931159024439236070026950641875512180000134500458099574638256905396", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "20086107503624605748636970696926057193402564249603453473832198318657465264844", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "43167135986193729211374724389523185950396183466709666189982348666853925219274", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "17447884039561508187156464597183128324504945258768782636135974167343387840370", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "38143921110340159188228723493420793611575037275078276538202087394517162729554", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16552274712964434889263219772539968802934308159216951923060313539515542031786", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "21533690197859269486445042768281999749203728740955226853531960445800814390872", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "21880324978302337713295482389160476412101864070449056420604485964938425631874", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "21087213863282472584378891060158648179244240590829455065271622458534461188836", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45441431536584624993024537766051601076632677727170866925607963222280486713049", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "30366589203056029534723552859270912699187398046791858542022387243548834105416", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16733270300941292512227942400499003353179417300568446496228472021958136869720", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47689344912249729511524931558254404476237521452234110431416696572010445584377", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "51082398733030351856006960204664584892075227924560364715540908659573989781750", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "43167631539924493632724725819354890041217481730115306669306358893967667398874", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "26279180116818040201102703456273647693759458841914982758720296510206405428021", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "46216367353381802394196440660508655919466089439065744128194487821032228754652", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7697386669681950056820540614309128742475760814776187777185453179049760288818", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "21817782321868614563882198267973607652180292878221625474094565055913249527776", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "32913105459022632076500291209667002113801444807962815598956143828295422498435", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "8502502120080628515364514243099640525167810254369523636419786457595198827016", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "30648576830725883184207011877575066156796295678736109722327320104219004516016", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45671339585608301452963125193632555070872004563546575584248952737167410364255", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "2451562695720526255410159479987351458212942773001104345088347268337017907959", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7231969043003068614870036828648884326376597612869950504826116686399812612615", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47273307399297813289148635743141600935869642174900722409535446469272321692250", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27329929942264081140373255677952719498722443205980316837000158653641048193545", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34885886867965298185213628215610826501969641592969627478599392659366672373049", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "25721535552417582691507498115295027536875255063383254204046680789623453672738", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "33423368799112636093951957186824418831319616314165017049161108153912788138739", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "25189833401427047672367010746537756277335149535290712725530069868987176912248", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47958321445457642291354706357838648199395839243428939094705320859005837779683", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "31774563161791760397918769926476984438505538732495587514473389056037994152917", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47850858374935538132163048932011117808042772861034650362901884832239133015769", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "12199128582337530870446335645513552144450426981759317374083089598580379977396", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47383053307574057321551022954108360871160286114060097754143644961632203800586", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "13803873123545025453120373087635256640332504728831881021333511396025254579362", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "23837178543685639949465643421241187863068824855019641916163779619876342616994", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "31189587782086980076115943766865860302487147542493494232203123657276644861038", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "46499582074722273407549664254855207560785631035023694347608062453482015876732", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "26662253988013080389152043229142922449414668235746968994211290964651963062577", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7640762545216115926527336572554187073301504782874830986070234716899751469743", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34498963117721665288552413822781095260286143856801028464976598055529802364121", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "36341838182182173408654982018067474537982027919615372559077851240339996099909", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48300815548868869899847665404889851174998730453507260888165473018017728774364", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "32747300835280625438818285868260843662581362309780466961516941514877133787742", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45303055704904829718227319208556302736092241137651995389242696979297340573498", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "4026222424264187292280393735543795340594832329968061359731987906507864963350", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "31228142597140675203545792153829318135052985461159014806691885654670181871080", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16750970802520682106296025351756788120283612961882144708385294780405339907653", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "20590763839978944194181943371062517541785283010773701162056751611994399836929", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "37594126822410828138420253267022641722217448081718524151329800547846230255554", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "2394917091132363277251637490654694317375647018617394697153359156851524846663", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "19737567843628339977867209886872321409023634418218069475717765230353858108711", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "42204066697221303743688912704913307336835691602781490921152392223146514657878", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "38876322214460807353380405022935203128264986058834077654885314090533204762902", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40247991658021514679970964566577204929744204347802761928836742882323686245178", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "39561906161264753200012194261000443020947453081833838411624410505039816555019", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "11166460550589161376470931941636300531100498757195643048019370497750388675069", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "24790565769125580275808568537357949817511203246184311136698467928472412457388", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "11554811916664964398531930051533768691042015283409338870777253388332997487366", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29075039934313065791873216420662295250443756892492849722546053940305352208592", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "39006987007077517708714677464659567763889315959219170058379373515871687446558", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "10061028159773233365380812282944217591122833800994251428395880422048305719205", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "25031744215108526550081827540392247511207429766528399786265447570055547629537", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "10547501786444119997366494351393198125463151669258024373934193854191592243660", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "41670235909630407351693051359364867863450981844765151303749913478896054145311", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1550363730816174450383908114106000535080436631518311072819950124158318191772", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "45637974422753200101239746614094187617113326582060147125507840703682309645118", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "6370060789642013553710100441744454816236009933679880713369392969145674474801", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "35620803994636505121581975698616513950631274399330479421894994077745439358719", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16067112068611935345405777398473590697557771847332060708439576094222338198471", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "30951410852793823879089954146089097398960021512400590702302072057244085482851", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "12276812530571706718883327429408937606433993990196477479137283768845651925632", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1005720997907928078270582364714493795153874104882338052955430808063076949858", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "9432539242737555038548151091729061683667954775317685872580932218706555557193", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48144091886915459055979584468520005030090312584666359911350135788203438616374", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "22450925011805294935671299553107487783565275020715623196582033415826118494250", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "28149684753511832845415363706857286714650553416094912809755106801999479476946", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "18984416380626314263255929984317834766173910658882331424593963038930830788043", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48109900230700383470215980955557162221434150244334962594952526792484549956555", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40552374456316069085238865117122876015343906448762540220517521948829052005624", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "11526656751459710456216830286109707102065783384712439837497260759522383867216", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "49248089956591852389575948260598336493822205882486787578128339029114849462975", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "39372448145844604168126556278211607335704246721513006643620494230495045549807", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "47966243235447948688248399775419397863458889989786324048551282844713998917659", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "10668975765352037241237726978611677229760273562923261307501184329169652545894", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "39507142063581063677057438880300974612179697912640172055190488555355094497813", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "15638468735129173771932513149618051284064233358516491068231561062569214055646", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "17353431924571233181583986362517131255570540171642397316569828436646526591477", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1113151614126806805734456303419241558692549524321573289125509096780876830226", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "9984420714488068655842637440690086256585448839019918893900233218326556751996", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "35272971102390074745676114574216945810721613586667868556864593470832260078394", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "41799703040810741398331928206395151226132289978178889483708245720764196041488", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "23792232833587856422363054609833448590404370086820676679702830143363402049151", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "23400519399894650768950735628474893615670522380725820954092838104223338024750", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "27249609507238299376315178191811683563055616067419555246827701486258799476504", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "31460888976162844111664970745263960397533358231300142520392677905396078538854", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "50541111392701415266654707208995846306449617779331389560072929586596066181460", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16849402175596403120072436766532798469469816167994479896578935023255475833387", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "42001541432800637111202252596131403836308927391584450209899896684682298556999", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "7555678132801484327842862804202549162661136010229184979905673538160644182471", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16973896084242861113146860522670036493260039473958217656803484585799718732593", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "42770539705351951841624093062193706601998280964803890045059967451602239435405", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "32967596628774896802370445267419978233370846153035552840999780942126744328522", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29885680756072762644563275495535410002799284565481770605563275267677242393534", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "6767565503770123230340610856538459014642791435134695578527084133222911388089", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "38034555907959016273994483610140537209819290169171432036495630086575144181148", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "35085508904879414050180652811899618801153816228166027426392174635054369363148", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "36657441819968029240636827111992013950645228690585008802597958876784513528202", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1322579115549556048043817488502303168290944210869541203532619083870097554885", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16882844793625225132625646093399829180269843611668936510323341639265456546672", + ) + .map_err(|_| ()) + .unwrap(), + ], + ), + mds: MdsMatrix::from_elements(vec![ + F::from_str( + "26217937587563095239723870254092982918845276250263818911301829349969290592257", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123009", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123009", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "39326906381344642859585805381139474378267914375395728366952744024953935888385", + ) + .map_err(|_| ()) + .unwrap(), + ]), + alpha: Alpha::Exponent(5), + rounds: RoundNumbers { r_P: 56, r_F: 8 }, + optimized_mds: OptimizedMdsMatrices { + M_hat: SquareMatrix::new( + 1, + 1, + vec![F::from_str( + "39326906381344642859585805381139474378267914375395728366952744024953935888385", + ) + .map_err(|_| ()) + .unwrap()], + ), + v: Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123009", + ) + .map_err(|_| ()) + .unwrap()], + ), + w: Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123009", + ) + .map_err(|_| ()) + .unwrap()], + ), + M_prime: SquareMatrix::new( + 2, + 2, + vec![ + F::from_str("1").map_err(|_| ()).unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "39326906381344642859585805381139474378267914375395728366952744024953935888385", + ) + .map_err(|_| ()) + .unwrap(), + ], + ), + M_doubleprime: SquareMatrix::new( + 2, + 2, + vec![ + F::from_str( + "26217937587563095239723870254092982918845276250263818911301829349969290592257", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123009", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123010", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("1").map_err(|_| ()).unwrap(), + ], + ), + M_inverse: SquareMatrix::new( + 2, + 2, + vec![ + F::from_str("18").map_err(|_| ()).unwrap(), + F::from_str( + "52435875175126190479447740508185965837690552500527637822603658699938581184489", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "52435875175126190479447740508185965837690552500527637822603658699938581184489", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("36").map_err(|_| ()).unwrap(), + ], + ), + M_hat_inverse: SquareMatrix::new(1, 1, vec![F::from_str("4").map_err(|_| ()).unwrap()]), + M_00: F::from_str( + "26217937587563095239723870254092982918845276250263818911301829349969290592257", + ) + .map_err(|_| ()) + .unwrap(), + M_i: Matrix::new( + 2, + 2, + vec![ + F::from_str("1").map_err(|_| ()).unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "18844142621030738849315867221324380626272463491487726140277535101490822753350", + ) + .map_err(|_| ()) + .unwrap(), + ], + ), + v_collection: vec![ + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123009", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "48066218910532341272827095465837135351216339792150334670720020474943699419137", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "51343461108977728177792579247598758216071999323433312034632749143689860743169", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52162771658589074904033950193039163932285914206254056375610931310876401074177", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52367599295991911585594292929399265361339392926959242460855476852673036156929", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52418806205342620755984378613489290718602762607135538982166613238122194927617", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52431607932680298048581900034511797057918605027179613112494397334484484620289", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52434808364514717371731280389767423642747565632190631645076343358575057043457", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435608472473322202518625478581330288954805783443386278221829864597700149249", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435808499462973410215461750784806950506615821256574936508201491103360925697", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435858506210386212139670818835676115894568330709872101079794397729776119809", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435871007897239412620723085848393407241556458073196392222692624386379918337", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435874133318952712740986152601572730078303489914027465008417181050530867969", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435874914674381037771051919289867560787490247874235233204848320216568605377", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435875110013238119028568360961941268464786937364287175253956105008078039729", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435875158847952389342947471379959695384111109736800160766233051205955398317", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435875171056630956921542248984464302113942152829928407144302287755424737964", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "13108968792764157739230385562246116075528485538207482101786075571938856184491", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "16386210991972587054669531517608020478304759509683779981097433567969359342251", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17205521541774694383529318006448496578998828002552854450925273066976985131691", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17410349179225221215744264628658615604172345125770123068382232941728891579051", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17461556088587852923798001284211145360465724406574440222746472910416868190891", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17474357815928510850811435448099277799539069226775519511337532902588862343851", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17477558247763675332564793989071310909307405431825789333485297900631860882091", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17478358355722466453003133624314319186749489483088356789022239150142610516651", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17478558382712164233112718533125071256110010495903998652906474462520297925291", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17478608389459588678140114760327759273450140749107909118877533290614719777451", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17478620891146444789396963817128431277785173312408886735370297997638325240491", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17478624016568158817211176081328599278868931453234131139493489174394226606251", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17478624797923587324164729147378641279139870988440442240524286968583201947691", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17478624993262444450903117413891151779207605872242020015781986417130445783051", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17478625042097158732587714480519279404224539593192414459596411279267256741891", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "17478625054305837303008863747176311310478773023430013070550017494801459481601", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "43696562644921102185338021317933552205887607631253231634590248398654300758785", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "50251047042574918405920310710622862429739816283209036275600306124617511078081", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "51889668141988372461065883058795189985702868446197987435852820556108313657905", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52299323416841735974852276145838271874693631486945225225915949163981014302861", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52401737235555076853298874417599042346941322247132034673431731315949189464100", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "13100434308888769213324718604399760586735330561783008668357932828987297366025", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "42602014958566835162916985032239414524951747015841480534042227232200760229891", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "23759472533423256410591181385106345090660574879092279589161471483034835353601", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "45266774514700456962233600727416060650933058095168798264243111895712644726785", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "50643600010019757100144205562993489541001178899187927933013521998882097070081", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "51987806383849582134621856771887846763518209100192710350206124524674460155905", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52323857977307038393241269574111436069147466650443905954504275156122550927361", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52407870875671402457896122774667333395554781038006704855578812813984573620225", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52428874100262493474059836074806307727156609634897404580847447228450079293441", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52434124906410266228100764399841051310057066784120079512164605832066455711745", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435437607947209416610996481099737205782181071425748244993895482970549816321", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435765783331445213738554501414408679713459643252165428201217895696573342465", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435847827177504163020444006493076548196279286208769724003048498878079224001", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435868338139018900340916382762743515316984196947920797953506149673455694385", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435873465879397584671034476830160257097160424632708566441120562372299811981", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "52435874747814492255753564000347014442542204481553905508563024165547010841380", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "13108968686953623063938391000086753610635551120388476377140756041386752710345", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "42604148553083048625570403131161162780926802155492847461237933035300624065971", + ) + .map_err(|_| ()) + .unwrap()], + ), + ], + w_hat_collection: vec![ + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123010", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123014", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123030", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123094", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054123350", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054124374", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054128470", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054144854", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054210390", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959054472534", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959055521110", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959059715414", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959076492630", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959143601494", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799959412036950", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799960485778774", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799964780746070", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105799981960615254", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105800050680091990", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105800325557998934", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105801425069626710", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105805823116137814", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105823415302182230", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069105893784046359894", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069106175259023070550", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069107301158929913174", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069111804758557283670", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069129819157066765654", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069201876751104693590", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215069490107127256405334", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215070643028631863252310", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215075254714650290640214", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215093701458724000191830", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215167488435018838398294", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425215462636340198191224150", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425216643227960915602527574", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425221365594443785247741270", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425240255060375263828596054", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425315812924101178152015190", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018425618044379004835445691734", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018426826970198619464620397910", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018431662673477077981319222614", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018451005486590912048114521430", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018528376739046248315295716694", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667018837861748867593384020497750", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667020075801788152973658919621974", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667025027561945294494758516118870", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667044834602573860579156902106454", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667124062765088124916750446056790", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701667440975415145182267124621858134", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701668708626015373411668621325063510", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701673779228416286329274608137885014", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701694061638019937999698555389171030", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793701775191276434544681394344394315094", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793702099709830092971408177500414891350", + ) + .map_err(|_| ()) + .unwrap()], + ), + Matrix::new( + 1, + 1, + vec![F::from_str( + "34957250116750793652965160338790643891793703397784044726678315310124497196374", + ) + .map_err(|_| ()) + .unwrap()], + ), + ], + }, + optimized_arc: OptimizedArcMatrix::new( + 64, + 2, + vec![ + F::from_str( + "35174739893055911104493616029378130908017657834702731071195911003169112450229", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40230473166484073181383530626136429631051240172158259323832118663695222064618", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "44839971797550275719608927493602345619671796219131839128512847210863471539893", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "48611564661854252146943435349048894917868766542696555144699929229862960415826", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "40685922923883326639435460208454299274336241847181916712959630054792010757353", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "15664326207051802832937062272185115402684860397390155009854115660846132411502", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "20394588293088308460232807257084465383971903994422965392216628021702933756783", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "38072899122370719412288433912125900148799524273752876684473712699262778897654", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "49322204581160802920728528441668898478821361914594574984125707955891751339030", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "11546720471100324356589604343765555296510298649382063718189403564461171492863", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "36110500355375093385871324309901194334631381714439614527255597369650392849542", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "3756733109852649396584228778466872196362658360207834645541184763961143083412", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "20251607517129761800297717300935980834606026680555472569470067927197814159058", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "5239353656737021436485173734268526002414965521861452100759532275936501964328", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "21300582910621453970075769970860650148943961329345636625818996955442242699131", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "21774765045116391148518722696232516163747969484119487964534360379873683242474", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "32722020057382082921329627018209709241475223852823289284428837414575534351309", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "33794442154315934695685025730187059000350644284482738453096808368436585150449", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "33356374759281451907993729503609407203078972387590420010346940719085856704600", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "16536766136174515568748693372704332161755684369233290734723311101392999671135", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "8155200347229826422648196061952312662995020845960673032039978897344425954944", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "41985238868657599524996779043300115443521336708491497391680518882421278207729", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "2063843216042971340057236348680603623021991141192327802421586125773152594966", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "19356199834277755257103978861422834427804178278770847736951931659601323041542", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "29407672119923084920129945390192575072371921764906862646182735700097611522286", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "23210564968750306632117638368081722417964237341952338866982652674742521848757", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "8919995046928353282196218782858660366384850716988440303338558248538040137799", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "19043131128131902196810002314394958855517905924691850071099334468401919991484", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "25372870640639931691894472507697938527971989245889491446494524936601040512930", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "28282246530559666639248292418150612779488812032571757345639609704112262688983", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "4783363587969676537071285052282175798266183157943408226768189618569177474295", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "8076640637611150676334686135767012090042738255090463961889429353861598391372", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "24983375671346681151260101878347553784741288686089866276697910000786751968362", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "31553063832521567697657525429909509843798042381162189406532027953053159867207", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "19397104375854813327815726265263423471142957336141256772200649485967743774458", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "16725724823339561221215536793048178667667680016815450011449579154320911341709", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "29033205376761356766961278958992230640915887129936846622211162138860547139883", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "38925727287210137779737577678032857716007633840925015412872642147337070197265", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "13054654952792030513819618373016885831657266413665857293819158146045635067324", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "11180520252272331206279504788346032423267894347729733535225909704781507388571", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "7829946038227950055107016972108651685558876858831437098551135271673396051934", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "12701392201570649781537742561621512715498088373227761999475504609950261026304", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "23110137476929231098068588344230895315869860683132386592399445163537587938902", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "34695385667367102489869345055605545198093190593541852158466006956870675273759", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "12473323874126447804974523548032043130473262290187841938491511255009575125315", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "33244171987325668610688838482180693579479913109349074619790699084518395114481", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "27800393704277881477423349646564407645504532574407056486674972536299508573197", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "38745941841134632283836831790084585919738726247813020052320436485069697160134", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "4222833134514534672312302121120063621259334942025065036721108691056782902710", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "2485253499287676466887039925919700883876238304552596149921625457687783981441", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "25376906928048350822232005681485535724988673469685171030423527493301156576940", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "50342721676243810030803571465468547773052568618511402750629061381943937849683", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "50049567928418632800795449103016632592486776140307034477479632433833610837266", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "23363897037620979716239578483167524838213385084153054827737272694281707133655", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "42704349801891305094989489174556449021777358796153454937762648266288696875551", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "15237078269491179837022602841512824373125754318519659312626032716476523314968", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "7834537534669842390771199937679522681049234016864851395164092776167283317372", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "40845753633682491334484157504358463583021595599904903653237711863475293554700", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "30415461155839336041159980430244598228679743472933918944484463593999625263250", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "45446374854682768122749806214133032507584129094447282144188920433140420665338", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "44511327229393971496561348546051674374277199034903648290060869960535543887325", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "31765384944890114175945271689093192004082637054270389724882306488698748316643", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "11724725910654336698843657420057198898582989110234170277283715189016057454856", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "35333898667082217702239272910553250687762082480432049530115838213528273162930", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "13139429039691322549933623030015220158389702383341486257474725505081327320136", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str("0").map_err(|_| ()).unwrap(), + F::from_str( + "32967596628774896802370445267419978233370846153035552840999780942126744328522", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "29885680756072762644563275495535410002799284565481770605563275267677242393534", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "6767565503770123230340610856538459014642791435134695578527084133222911388089", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "38034555907959016273994483610140537209819290169171432036495630086575144181148", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "35085508904879414050180652811899618801153816228166027426392174635054369363148", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "36657441819968029240636827111992013950645228690585008802597958876784513528202", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "1322579115549556048043817488502303168290944210869541203532619083870097554885", + ) + .map_err(|_| ()) + .unwrap(), + F::from_str( + "16882844793625225132625646093399829180269843611668936510323341639265456546672", + ) + .map_err(|_| ()) + .unwrap(), + ], + ), + } + } + let rate1_config = rate_1::(); + let arkwork_mds = (0..rate1_config.mds.0 .0.n_rows) + .map(|i| rate1_config.mds.0 .0.row_vector(i).elements) + .collect::>>(); + let arkwork_ark = (0..rate1_config.arc.0.n_rows) + .map(|i| rate1_config.arc.0.row_vector(i).elements).collect::>>(); + PoseidonConfig { + full_rounds: rate1_config.rounds.r_F, + partial_rounds: rate1_config.rounds.r_P, + alpha: match rate1_config.alpha { + poseidon_parameters::Alpha::Exponent(alpha) => alpha as u64, + _ => panic!("Alpha is not exponent") + }, + ark: arkwork_ark, + mds: arkwork_mds, + rate: 1, // only hash one at a time + capacity: 1, // ?? + } +} + lazy_static! { pub static ref POSEIDON_PARAMETERS_FR_377: PoseidonConfig = poseidon_params(); } diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index b449fcc8..4ac33768 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -97,28 +97,22 @@ impl PoseidonTranscript { pub trait TranscriptWriter { fn write_to_transcript(&self, transcript: &mut PoseidonTranscript); } -//pub trait AppendToPoseidon { -// fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript); -//} -// -//impl AppendToPoseidon for CompressedGroup { -// fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { -// transcript.append_point(self); -// } -//} -// -//impl AppendToPoseidon for Commitment { -// fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { -// let mut bytes = Vec::new(); -// self.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); -// transcript.append_bytes(&bytes); -// } -//} -// -//impl AppendToPoseidon for G1Affine { -// fn append_to_poseidon(&self, transcript: &mut PoseidonTranscript) { -// let mut bytes = Vec::new(); -// self.serialize_with_mode(&mut bytes, Compress::Yes).unwrap(); -// transcript.append_bytes(&bytes); -//} -// + +#[cfg(test)] +mod test { + use ark_bls12_381::Fr; + use ark_ff::PrimeField; + use poseidon_paramgen; + #[test] + fn poseidon_parameters_generation() { + print_modulus::(); + println!( + "{}", + poseidon_paramgen::poseidon_build::compile::(128, vec![2], Fr::MODULUS, true) + ); + } + + fn print_modulus() { + println!("modulus: {:?}", F::MODULUS); + } +} diff --git a/src/r1csproof.rs b/src/r1csproof.rs index fc747f52..0d85e407 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -512,16 +512,30 @@ mod tests { } #[test] - pub fn check_r1cs_proof() { + fn check_r1cs_proof_bls12_377() { + let params = poseidon_params(); + check_r1cs_proof::(params); + } + + #[test] + fn check_r1cs_proof_bls12_381() { + let params = crate::parameters::poseidon_params_bls12381(); + check_r1cs_proof::(params); + } + fn check_r1cs_proof

(params: PoseidonConfig) + where + P: Pairing, + P::ScalarField: Absorb, + { let num_vars = 16; let num_cons = num_vars; let num_inputs = 3; let (inst, vars, input) = - R1CSInstance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); + R1CSInstance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - let gens = R1CSGens::::new(b"test-m", num_cons, num_vars); + let gens = R1CSGens::

::new(b"test-m", num_cons, num_vars); - let params = poseidon_params(); + //let params = poseidon_params(); // let mut random_tape = RandomTape::new(b"proof"); let mut prover_transcript = PoseidonTranscript::new(¶ms); @@ -542,7 +556,7 @@ mod tests { &inst_evals, &mut verifier_transcript, &gens, - poseidon_params() + params, ) .is_ok()); } From bd4c32a84ee1523007db5d8eda2ab3a22f0af229 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Thu, 9 Feb 2023 13:59:18 +0000 Subject: [PATCH 33/64] stuff --- benches/snark.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/benches/snark.rs b/benches/snark.rs index 3fc6f34f..c083fa6c 100644 --- a/benches/snark.rs +++ b/benches/snark.rs @@ -114,6 +114,7 @@ fn snark_verify_benchmark(c: &mut Criterion) { black_box(&inputs), black_box(&mut verifier_transcript), black_box(&gens) + black_box(poseidon_params()) ) .is_ok()); }); From c64004fcb29fcc733a7bb42c6c096f43b6f66e18 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 9 Feb 2023 13:07:46 +0100 Subject: [PATCH 34/64] trying to bring ark-blst to testudo --- Cargo.toml | 3 +++ src/parameters.rs | 1 + src/r1csproof.rs | 7 ++++++- 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 4ab34322..da35a3c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,6 +23,7 @@ ark-ec = { version = "^0.4.0", default-features = false } ark-std = { version = "^0.4.0"} ark-bls12-377 = { version = "^0.4.0", features = ["r1cs","curve"] } ark-bls12-381 = { version = "^0.4.0", features = ["curve"] } +ark-blst = { git = "https://github.com/nikkolasg/ark-blst" } ark-serialize = { version = "^0.4.0", features = ["derive"] } ark-crypto-primitives = {version = "^0.4.0", features = ["sponge","r1cs","snark"] } ark-r1cs-std = { version = "^0.4.0", default-features = false } @@ -85,3 +86,5 @@ std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-seri ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/pst_on_g2"} ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", features = ["r1cs"] } +# [patch."https://github.com/nikkolasg/ark-blst"] +# ark-blst = { path = "../ark-blst" } \ No newline at end of file diff --git a/src/parameters.rs b/src/parameters.rs index ce094d86..428ea371 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -175,6 +175,7 @@ pub fn poseidon_params() -> PoseidonConfig { ) } +// Generated from poseidon_transcript::test::poseidon_parameters_generation pub fn poseidon_params_bls12381() -> PoseidonConfig { use ark_ff::PrimeField; diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 0d85e407..be1743b9 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -432,7 +432,7 @@ where #[cfg(test)] mod tests { - use crate::parameters::poseidon_params; + use crate::parameters::{poseidon_params, poseidon_params_bls12381}; use super::*; type F = ark_bls12_377::Fr; @@ -511,6 +511,11 @@ mod tests { assert!(is_sat); } + #[test] + fn check_r1cs_proof_ark_blst() { + let params = poseidon_params_bls12381(); + check_r1cs_proof::(params); + } #[test] fn check_r1cs_proof_bls12_377() { let params = poseidon_params(); From d2e313626a6aeed54fd26591b453554712f602fb Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 9 Feb 2023 14:12:11 +0100 Subject: [PATCH 35/64] correcting random implementation --- Cargo.toml | 10 ++++++-- src/parameters.rs | 59 +++++++++++++++++++++++++++++++++++++++++++---- src/r1csproof.rs | 7 +++--- 3 files changed, 66 insertions(+), 10 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index da35a3c5..d570533e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,7 +23,7 @@ ark-ec = { version = "^0.4.0", default-features = false } ark-std = { version = "^0.4.0"} ark-bls12-377 = { version = "^0.4.0", features = ["r1cs","curve"] } ark-bls12-381 = { version = "^0.4.0", features = ["curve"] } -ark-blst = { git = "https://github.com/nikkolasg/ark-blst" } +ark-blst = { git = "https://github.com/nikkolasg/ark-blst" , branch = "feat/absorb" } ark-serialize = { version = "^0.4.0", features = ["derive"] } ark-crypto-primitives = {version = "^0.4.0", features = ["sponge","r1cs","snark"] } ark-r1cs-std = { version = "^0.4.0", default-features = false } @@ -36,6 +36,8 @@ ark-poly = {version = "^0.4.0"} poseidon-paramgen = { git = "https://github.com/nikkolasg/poseidon377", branch = "feat/v0.4" } poseidon-parameters = { git = "https://github.com/nikkolasg/poseidon377", branch = "feat/v0.4" } +# Needed for ark-blst +blstrs = { version = "^0.6.1", features = ["__private_bench"] } lazy_static = "1.4.0" rand = { version = "0.8", features = [ "std", "std_rng" ] } @@ -85,6 +87,10 @@ std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-seri [patch.crates-io] ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/pst_on_g2"} ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", features = ["r1cs"] } +blstrs = { git = "https://github.com/nikkolasg/blstrs", branch = "feat/arkwork" } +ark-ec = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } +ark-ff = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } +ark-serialize = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } -# [patch."https://github.com/nikkolasg/ark-blst"] +# [patch."https://github.com/nikkolasg/ark-blst"] # ark-blst = { path = "../ark-blst" } \ No newline at end of file diff --git a/src/parameters.rs b/src/parameters.rs index 428ea371..ce5b5a66 100644 --- a/src/parameters.rs +++ b/src/parameters.rs @@ -2407,21 +2407,70 @@ pub fn poseidon_params_bls12381() -> PoseidonConfig { .map(|i| rate1_config.mds.0 .0.row_vector(i).elements) .collect::>>(); let arkwork_ark = (0..rate1_config.arc.0.n_rows) - .map(|i| rate1_config.arc.0.row_vector(i).elements).collect::>>(); + .map(|i| rate1_config.arc.0.row_vector(i).elements) + .collect::>>(); PoseidonConfig { full_rounds: rate1_config.rounds.r_F, partial_rounds: rate1_config.rounds.r_P, - alpha: match rate1_config.alpha { - poseidon_parameters::Alpha::Exponent(alpha) => alpha as u64, - _ => panic!("Alpha is not exponent") + alpha: match rate1_config.alpha { + poseidon_parameters::Alpha::Exponent(alpha) => alpha as u64, + _ => panic!("Alpha is not exponent"), }, ark: arkwork_ark, mds: arkwork_mds, - rate: 1, // only hash one at a time + rate: 1, // only hash one at a time capacity: 1, // ?? } } +pub trait PoseidonConfiguration: ark_ff::PrimeField { + fn poseidon_params() -> PoseidonConfig; +} + +impl PoseidonConfiguration for ark_bls12_381::Fr { + fn poseidon_params() -> PoseidonConfig { + poseidon_params_bls12381() + } +} + +impl PoseidonConfiguration for ark_bls12_377::Fr { + fn poseidon_params() -> PoseidonConfig { + poseidon_params() + } +} + +impl PoseidonConfiguration for ark_blst::Scalar { + fn poseidon_params() -> PoseidonConfig { + let config = poseidon_params_bls12381(); + let arks = config + .ark + .iter() + .map(|v| { + v.iter() + .map(|&e| ark_blst::Scalar::from(e)) + .collect::>() + }) + .collect(); + let mdss = config + .mds + .iter() + .map(|v| { + v.iter() + .map(|&e| ark_blst::Scalar::from(e)) + .collect::>() + }) + .collect(); + PoseidonConfig { + full_rounds: config.full_rounds, + partial_rounds: config.partial_rounds, + alpha: config.alpha, + ark: arks, + mds: mdss, + rate: config.rate, + capacity: config.capacity, + } + } +} lazy_static! { pub static ref POSEIDON_PARAMETERS_FR_377: PoseidonConfig = poseidon_params(); } diff --git a/src/r1csproof.rs b/src/r1csproof.rs index be1743b9..25617549 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -511,20 +511,21 @@ mod tests { assert!(is_sat); } + use crate::parameters::PoseidonConfiguration; #[test] fn check_r1cs_proof_ark_blst() { - let params = poseidon_params_bls12381(); + let params = ark_blst::Scalar::poseidon_params(); check_r1cs_proof::(params); } #[test] fn check_r1cs_proof_bls12_377() { - let params = poseidon_params(); + let params = ark_bls12_377::Fr::poseidon_params(); check_r1cs_proof::(params); } #[test] fn check_r1cs_proof_bls12_381() { - let params = crate::parameters::poseidon_params_bls12381(); + let params = ark_bls12_381::Fr::poseidon_params(); check_r1cs_proof::(params); } fn check_r1cs_proof

(params: PoseidonConfig) From 853799e80178f08dd4b9cd95566a8b0bc8b0e5c1 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 9 Feb 2023 15:22:28 +0100 Subject: [PATCH 36/64] with square in place --- Cargo.toml | 4 ++-- src/commitments.rs | 15 +++++++-------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d570533e..8ed8e8b5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -92,5 +92,5 @@ ark-ec = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owne ark-ff = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } ark-serialize = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } -# [patch."https://github.com/nikkolasg/ark-blst"] -# ark-blst = { path = "../ark-blst" } \ No newline at end of file +[patch."https://github.com/nikkolasg/ark-blst"] +ark-blst = { path = "../ark-blst" } \ No newline at end of file diff --git a/src/commitments.rs b/src/commitments.rs index e67fac9a..458a8397 100644 --- a/src/commitments.rs +++ b/src/commitments.rs @@ -1,8 +1,9 @@ +use crate::ark_std::UniformRand; use crate::parameters::*; -use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; - use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_crypto_primitives::sponge::CryptographicSponge; +use ark_ec::{CurveGroup, VariableBaseMSM}; +use rand::SeedableRng; use std::ops::Mul; #[derive(Debug, Clone)] @@ -23,12 +24,10 @@ impl MultiCommitGens { let gens = (0..=n) .map(|_| { - let mut el_aff: Option = None; - while el_aff.is_none() { - let uniform_bytes = sponge.squeeze_bytes(64); - el_aff = G::Affine::from_random_bytes(&uniform_bytes); - } - el_aff.unwrap().clear_cofactor() + let mut uniform_bytes = [0u8; 32]; + uniform_bytes.copy_from_slice(&sponge.squeeze_bytes(32)[..]); + let mut prng = rand::rngs::StdRng::from_seed(uniform_bytes); + G::Affine::rand(&mut prng) }) .collect::>(); From d5cbe1ba18ed1131894b5e08b587cbe3ce25328b Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 9 Feb 2023 17:20:03 +0100 Subject: [PATCH 37/64] works with blst --- Cargo.toml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8ed8e8b5..61c3cea8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -90,7 +90,4 @@ ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", features = ["r1c blstrs = { git = "https://github.com/nikkolasg/blstrs", branch = "feat/arkwork" } ark-ec = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } ark-ff = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } -ark-serialize = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } - -[patch."https://github.com/nikkolasg/ark-blst"] -ark-blst = { path = "../ark-blst" } \ No newline at end of file +ark-serialize = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } \ No newline at end of file From 4a85c78375d817eb6c619cd4129c75ce76dbc400 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Thu, 9 Feb 2023 17:20:03 +0100 Subject: [PATCH 38/64] works with blst --- src/nizk/mod.rs | 7 ++++--- src/sparse_mlpoly.rs | 6 +++--- src/transcript.rs | 2 +- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/nizk/mod.rs b/src/nizk/mod.rs index 7f416fa7..ffafabbe 100644 --- a/src/nizk/mod.rs +++ b/src/nizk/mod.rs @@ -37,10 +37,11 @@ pub struct DotProductProofLog { z2: G::ScalarField, } -impl DotProductProofLog +impl DotProductProofLog where -G: CurveGroup, -G::ScalarField: Absorb { + G: CurveGroup, + G::ScalarField: Absorb, +{ pub fn prove( gens: &DotProductProofGens, transcript: &mut PoseidonTranscript, diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index 1a06a752..5df7b267 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -1431,10 +1431,10 @@ pub struct SparseMatPolyEvalProof { poly_eval_network_proof: PolyEvalNetworkProof, } -impl SparseMatPolyEvalProof +impl SparseMatPolyEvalProof where -E: Pairing, -E::ScalarField: Absorb + E: Pairing, + E::ScalarField: Absorb, { fn equalize( rx: &[E::ScalarField], diff --git a/src/transcript.rs b/src/transcript.rs index d0f119ac..aaec0950 100644 --- a/src/transcript.rs +++ b/src/transcript.rs @@ -13,4 +13,4 @@ pub trait Transcript { } } -pub use crate::poseidon_transcript::TranscriptWriter; \ No newline at end of file +pub use crate::poseidon_transcript::TranscriptWriter; From ab9a413e5c238f49d730c38334f1d7988f4f61cb Mon Sep 17 00:00:00 2001 From: Volker Mische Date: Mon, 13 Feb 2023 10:26:28 +0100 Subject: [PATCH 39/64] fix: don't require nightly Rust With removing the `test` feature, it can also be built with a stable Rust release and don't require a nightly Rust version. --- src/lib.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index e25a2ad5..8ec3d1b7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,5 @@ #![allow(non_snake_case)] #![doc = include_str!("../README.md")] -#![feature(test)] #![allow(clippy::assertions_on_result_states)] extern crate ark_std; @@ -10,7 +9,6 @@ extern crate lazy_static; extern crate merlin; extern crate rand; extern crate sha3; -extern crate test; #[macro_use] extern crate json; From 01630832ac9c55625c879b1d45e0eecaf11dd8aa Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 14 Feb 2023 17:02:38 +0100 Subject: [PATCH 40/64] using ark-blst main branch --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 61c3cea8..f38dcf89 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,7 +23,7 @@ ark-ec = { version = "^0.4.0", default-features = false } ark-std = { version = "^0.4.0"} ark-bls12-377 = { version = "^0.4.0", features = ["r1cs","curve"] } ark-bls12-381 = { version = "^0.4.0", features = ["curve"] } -ark-blst = { git = "https://github.com/nikkolasg/ark-blst" , branch = "feat/absorb" } +ark-blst = { git = "https://github.com/nikkolasg/ark-blst" } ark-serialize = { version = "^0.4.0", features = ["derive"] } ark-crypto-primitives = {version = "^0.4.0", features = ["sponge","r1cs","snark"] } ark-r1cs-std = { version = "^0.4.0", default-features = false } From e7c843283a9b8a7aaebec35c45aefa58179a69b2 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Mon, 13 Feb 2023 18:07:09 +0000 Subject: [PATCH 41/64] started cleanup and added testudo benchmark --- Cargo.toml | 10 +- benches/nizk.rs | 148 ----------- benches/r1cs.rs | 64 ----- benches/snark.rs | 136 ---------- benches/testudo.rs | 127 +++++++++ examples/cubic.rs | 21 +- profiler/nizk.rs | 117 ++++----- profiler/snark.rs | 111 ++++---- src/constraints.rs | 19 +- src/lib.rs | 521 +------------------------------------ src/mipp.rs | 6 +- src/poseidon_transcript.rs | 2 +- src/r1csproof.rs | 344 +++++++++++++----------- src/testudo.rs | 349 +++++++++++++++++++++++++ 14 files changed, 821 insertions(+), 1154 deletions(-) delete mode 100644 benches/nizk.rs delete mode 100644 benches/r1cs.rs delete mode 100644 benches/snark.rs create mode 100644 benches/testudo.rs create mode 100644 src/testudo.rs diff --git a/Cargo.toml b/Cargo.toml index 61c3cea8..8a6fc3cd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -64,15 +64,7 @@ name = "nizk" path = "profiler/nizk.rs" [[bench]] -name = "snark" -harness = false - -[[bench]] -name = "nizk" -harness = false - -[[bench]] -name = "r1cs" +name = "testudo" harness = false debug = true diff --git a/benches/nizk.rs b/benches/nizk.rs deleted file mode 100644 index 71a67a19..00000000 --- a/benches/nizk.rs +++ /dev/null @@ -1,148 +0,0 @@ -extern crate core; -extern crate criterion; -extern crate digest; -extern crate libspartan; -extern crate merlin; -extern crate sha3; - -use std::time::{Duration, SystemTime}; - -use libspartan::{ - parameters::POSEIDON_PARAMETERS_FR_377, poseidon_transcript::PoseidonTranscript, Instance, - NIZKGens, NIZK, -}; - -use criterion::*; - -fn nizk_prove_benchmark(c: &mut Criterion) { - for &s in [24, 28, 30].iter() { - let mut group = c.benchmark_group("R1CS_prove_benchmark"); - - let num_vars = (2_usize).pow(s as u32); - let num_cons = num_vars; - let num_inputs = 10; - let start = SystemTime::now(); - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - let end = SystemTime::now(); - let duration = end.duration_since(start).unwrap(); - println!( - "Generating r1cs instance with {} constraints took {} ms", - num_cons, - duration.as_millis() - ); - let gens = NIZKGens::new(num_cons, num_vars, num_inputs); - - let name = format!("R1CS_prove_{}", num_vars); - group - .measurement_time(Duration::from_secs(60)) - .bench_function(&name, move |b| { - b.iter(|| { - let mut prover_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); - NIZK::prove( - black_box(&inst), - black_box(vars.clone()), - black_box(&inputs), - black_box(&gens), - black_box(&mut prover_transcript), - ); - }); - }); - group.finish(); - } -} - -fn nizk_verify_benchmark(c: &mut Criterion) { - for &s in [4, 6, 8, 10, 12, 16, 20, 24, 28, 30].iter() { - let mut group = c.benchmark_group("R1CS_verify_benchmark"); - - let num_vars = (2_usize).pow(s as u32); - let num_cons = num_vars; - // these are the public io - let num_inputs = 10; - let start = SystemTime::now(); - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - let end = SystemTime::now(); - let duration = end.duration_since(start).unwrap(); - println!( - "Generating r1cs instance with {} constraints took {} ms", - num_cons, - duration.as_millis() - ); - let gens = NIZKGens::new(num_cons, num_vars, num_inputs); - // produce a proof of satisfiability - let mut prover_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); - let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); - - let name = format!("R1CS_verify_{}", num_cons); - group - .measurement_time(Duration::from_secs(60)) - .bench_function(&name, move |b| { - b.iter(|| { - let mut verifier_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); - assert!(proof - .verify( - black_box(&inst), - black_box(&inputs), - black_box(&mut verifier_transcript), - black_box(&gens), - ) - .is_ok()); - }); - }); - group.finish(); - } -} - -fn nizk_verify_groth16_benchmark(c: &mut Criterion) { - for &s in [4, 6, 8, 10, 12, 16, 20, 24, 28, 30].iter() { - let mut group = c.benchmark_group("R1CS_verify_groth16_benchmark"); - - let num_vars = (2_usize).pow(s as u32); - let num_cons = num_vars; - // these are the public io - let num_inputs = 10; - let start = SystemTime::now(); - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - let end = SystemTime::now(); - let duration = end.duration_since(start).unwrap(); - println!( - "Generating r1cs instance with {} constraints took {} ms", - num_cons, - duration.as_millis() - ); - // produce a proof of satisfiability - let mut prover_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); - let gens = NIZKGens::new(num_cons, num_vars, num_inputs); - let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); - - let name = format!("R1CS_verify_groth16_{}", num_cons); - group - .measurement_time(Duration::from_secs(60)) - .bench_function(&name, move |b| { - b.iter(|| { - let mut verifier_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); - assert!(proof - .verify_groth16( - black_box(&inst), - black_box(&inputs), - black_box(&mut verifier_transcript), - black_box(&gens) - ) - .is_ok()); - }); - }); - group.finish(); - } -} - -fn set_duration() -> Criterion { - Criterion::default().sample_size(2) -} - -criterion_group! { -name = benches_nizk; -config = set_duration(); -targets = nizk_prove_benchmark, nizk_verify_benchmark, nizk_verify_groth16_benchmark -} - -criterion_main!(benches_nizk); diff --git a/benches/r1cs.rs b/benches/r1cs.rs deleted file mode 100644 index bab38bf4..00000000 --- a/benches/r1cs.rs +++ /dev/null @@ -1,64 +0,0 @@ -use std::time::Instant; - -use libspartan::{ - parameters::POSEIDON_PARAMETERS_FR_377, poseidon_transcript::PoseidonTranscript, Instance, - NIZKGens, NIZK, -}; -use serde::Serialize; - -#[derive(Default, Clone, Serialize)] -struct BenchmarkResults { - power: usize, - input_constraints: usize, - spartan_verifier_circuit_constraints: usize, - spartan_proving_time: u128, - groth16_setup_time: u128, - groth16_proving_time: u128, - testudo_verification_time: u128, - testudo_proving_time: u128, -} - -fn main() { - let mut writer = csv::Writer::from_path("testudo.csv").expect("unable to open csv writer"); - for &s in [10, 12, 14, 16, 18, 20, 22, 24, 26].iter() { - println!("Running for {} inputs", s); - let mut br = BenchmarkResults::default(); - let num_vars = (2_usize).pow(s as u32); - let num_cons = num_vars; - br.power = s; - br.input_constraints = num_cons; - let num_inputs = 10; - - let start = Instant::now(); - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - let _duration = start.elapsed().as_millis(); - let mut prover_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); - - let gens = NIZKGens::new(num_cons, num_vars, num_inputs); - - let start = Instant::now(); - let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); - let duration = start.elapsed().as_millis(); - br.spartan_proving_time = duration; - - let mut verifier_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); - let res = proof.verify(&inst, &inputs, &mut verifier_transcript, &gens); - assert!(res.is_ok()); - br.spartan_verifier_circuit_constraints = res.unwrap(); - - let mut verifier_transcript = PoseidonTranscript::new(&POSEIDON_PARAMETERS_FR_377); - let res = proof.verify_groth16(&inst, &inputs, &mut verifier_transcript, &gens); - assert!(res.is_ok()); - - let (ds, dp, dv) = res.unwrap(); - br.groth16_setup_time = ds; - br.groth16_proving_time = dp; - - br.testudo_proving_time = br.spartan_proving_time + br.groth16_proving_time; - br.testudo_verification_time = dv; - writer - .serialize(br) - .expect("unable to write results to csv"); - writer.flush().expect("wasn't able to flush"); - } -} diff --git a/benches/snark.rs b/benches/snark.rs deleted file mode 100644 index c083fa6c..00000000 --- a/benches/snark.rs +++ /dev/null @@ -1,136 +0,0 @@ -extern crate libspartan; -extern crate merlin; - -use libspartan::{ - parameters::poseidon_params, poseidon_transcript::PoseidonTranscript, Instance, SNARKGens, SNARK, -}; - -use criterion::*; - -fn snark_encode_benchmark(c: &mut Criterion) { - for &s in [10, 12, 16].iter() { - let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Logarithmic); - let mut group = c.benchmark_group("SNARK_encode_benchmark"); - group.plot_config(plot_config); - - let num_vars = (2_usize).pow(s as u32); - let num_cons = num_vars; - let num_inputs = 10; - let (inst, _vars, _inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - - // produce public parameters - let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_cons); - - // produce a commitment to R1CS instance - let name = format!("SNARK_encode_{}", num_cons); - group.bench_function(&name, move |b| { - b.iter(|| { - SNARK::encode(black_box(&inst), black_box(&gens)); - }); - }); - group.finish(); - } -} - -fn snark_prove_benchmark(c: &mut Criterion) { - for &s in [10, 12, 16].iter() { - let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Logarithmic); - let mut group = c.benchmark_group("SNARK_prove_benchmark"); - group.plot_config(plot_config); - - let num_vars = (2_usize).pow(s as u32); - let num_cons = num_vars; - let num_inputs = 10; - - let params = poseidon_params(); - - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - - // produce public parameters - let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_cons); - - // produce a commitment to R1CS instance - let (comm, decomm) = SNARK::encode(&inst, &gens); - - // produce a proof - let name = format!("SNARK_prove_{}", num_cons); - group.bench_function(&name, move |b| { - b.iter(|| { - let mut prover_transcript = PoseidonTranscript::new(¶ms); - SNARK::prove( - black_box(&inst), - black_box(&comm), - black_box(&decomm), - black_box(vars.clone()), - black_box(&inputs), - black_box(&gens), - black_box(&mut prover_transcript), - ); - }); - }); - group.finish(); - } -} - -fn snark_verify_benchmark(c: &mut Criterion) { - for &s in [10, 12, 16].iter() { - let plot_config = PlotConfiguration::default().summary_scale(AxisScale::Logarithmic); - let mut group = c.benchmark_group("SNARK_verify_benchmark"); - group.plot_config(plot_config); - - let params = poseidon_params(); - - let num_vars = (2_usize).pow(s as u32); - let num_cons = num_vars; - let num_inputs = 10; - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - - // produce public parameters - let gens = SNARKGens::new(num_cons, num_vars, num_inputs, num_cons); - - // produce a commitment to R1CS instance - let (comm, decomm) = SNARK::encode(&inst, &gens); - - // produce a proof of satisfiability - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = SNARK::prove( - &inst, - &comm, - &decomm, - vars, - &inputs, - &gens, - &mut prover_transcript, - ); - - // verify the proof - let name = format!("SNARK_verify_{}", num_cons); - group.bench_function(&name, move |b| { - b.iter(|| { - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify( - black_box(&comm), - black_box(&inputs), - black_box(&mut verifier_transcript), - black_box(&gens) - black_box(poseidon_params()) - ) - .is_ok()); - }); - }); - group.finish(); - } -} - -fn set_duration() -> Criterion { - Criterion::default().sample_size(10) -} - -criterion_group! { -name = benches_snark; -config = set_duration(); -targets = snark_verify_benchmark -} - -criterion_main!(benches_snark); diff --git a/benches/testudo.rs b/benches/testudo.rs new file mode 100644 index 00000000..a7c1851b --- /dev/null +++ b/benches/testudo.rs @@ -0,0 +1,127 @@ +use std::time::Instant; + +use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; +use ark_crypto_primitives::sponge::Absorb; +use ark_ec::pairing::Pairing; +use ark_ff::PrimeField; +use ark_serialize::*; +use libspartan::parameters::PoseidonConfiguration; +use libspartan::{ + parameters::poseidon_params, + poseidon_transcript::PoseidonTranscript, + testudo::{self, TestudoGens, TestudoSnark}, + Instance, +}; +use serde::Serialize; + +#[derive(Default, Clone, Serialize)] +struct BenchmarkResults { + power: usize, + input_constraints: usize, + testudo_proving_time: u128, + testudo_verification_time: u128, + sat_proof_size: usize, + eval_proof_size: usize, + total_proof_size: usize, +} + +fn main() { + bench_with_bls12_377(); + // bench_with_bls12_381(); + // bench_with_ark_blst(); +} + +fn bench_with_ark_blst() { + let params = ark_blst::Scalar::poseidon_params(); + testudo_snark_bench::(params, "testudo_blst"); +} + +fn bench_with_bls12_377() { + let params = ark_bls12_377::Fr::poseidon_params(); + testudo_snark_bench::(params, "testudo_bls12_377"); +} + +fn bench_with_bls12_381() { + let params = ark_bls12_381::Fr::poseidon_params(); + testudo_snark_bench::(params, "testudo_bls12_381"); +} + +fn testudo_snark_bench(params: PoseidonConfig, file_name: &str) +where + E: Pairing, + E::ScalarField: PrimeField, + E::ScalarField: Absorb, +{ + let mut writer = csv::Writer::from_path(file_name).expect("unable to open csv writer"); + for &s in [4, 10, 12, 14, 16, 18, 20, 22, 24, 26].iter() { + println!("Running for {} inputs", s); + let mut br = BenchmarkResults::default(); + let num_vars = (2_usize).pow(s as u32); + let num_cons = num_vars; + br.power = s; + br.input_constraints = num_cons; + let num_inputs = 10; + + let (inst, vars, inputs) = + Instance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); + let mut prover_transcript = PoseidonTranscript::new(¶ms.clone()); + + let gens = TestudoGens::::new(num_cons, num_vars, num_inputs, num_cons, params.clone()); + + let (comm, decomm) = TestudoSnark::::encode(&inst, &gens); + + let start = Instant::now(); + let proof = TestudoSnark::prove( + &inst, + &comm, + &decomm, + vars, + &inputs, + &gens, + &mut prover_transcript, + params.clone(), + ) + .unwrap(); + let duration = start.elapsed().as_millis(); + br.testudo_proving_time = duration; + + let mut sat_proof = Vec::::new(); + proof + .r1cs_verifier_proof + .serialize_with_mode(&mut sat_proof, Compress::Yes) + .unwrap(); + br.sat_proof_size = sat_proof.len(); + + let mut eval_proof = Vec::::new(); + proof + .r1cs_eval_proof + .serialize_with_mode(&mut eval_proof, Compress::Yes) + .unwrap(); + br.eval_proof_size = eval_proof.len(); + + let mut total_proof = Vec::::new(); + proof + .serialize_with_mode(&mut total_proof, Compress::Yes) + .unwrap(); + br.total_proof_size = total_proof.len(); + + let mut verifier_transcript = PoseidonTranscript::new(¶ms.clone()); + let start = Instant::now(); + + let res = proof.verify( + &gens, + &comm, + &inputs, + &mut verifier_transcript, + params.clone(), + ); + assert!(res.is_ok()); + let duration = start.elapsed().as_millis(); + br.testudo_verification_time = duration; + + writer + .serialize(br) + .expect("unable to write results to csv"); + writer.flush().expect("wasn't able to flush"); + } +} diff --git a/examples/cubic.rs b/examples/cubic.rs index 12cfc4c3..1a497661 100644 --- a/examples/cubic.rs +++ b/examples/cubic.rs @@ -11,9 +11,10 @@ use ark_ec::pairing::Pairing; use ark_ff::{BigInteger, PrimeField}; use ark_std::{One, UniformRand, Zero}; +use libspartan::testudo::{TestudoGens, TestudoSnark}; use libspartan::{ parameters::poseidon_params, poseidon_transcript::PoseidonTranscript, InputsAssignment, Instance, - SNARKGens, VarsAssignment, SNARK, + VarsAssignment, }; #[allow(non_snake_case)] @@ -129,14 +130,20 @@ fn main() { let params = poseidon_params(); // produce public parameters - let gens = SNARKGens::::new(num_cons, num_vars, num_inputs, num_non_zero_entries); + let gens = TestudoGens::::new( + num_cons, + num_vars, + num_inputs, + num_non_zero_entries, + params.clone(), + ); // create a commitment to the R1CS instance - let (comm, decomm) = SNARK::encode(&inst, &gens); + let (comm, decomm) = TestudoSnark::encode(&inst, &gens); // produce a proof of satisfiability let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = SNARK::prove( + let proof = TestudoSnark::prove( &inst, &comm, &decomm, @@ -144,16 +151,18 @@ fn main() { &assignment_inputs, &gens, &mut prover_transcript, - ); + params.clone(), + ) + .unwrap(); // verify the proof of satisfiability let mut verifier_transcript = PoseidonTranscript::new(¶ms); assert!(proof .verify( + &gens, &comm, &assignment_inputs, &mut verifier_transcript, - &gens, params ) .is_ok()); diff --git a/profiler/nizk.rs b/profiler/nizk.rs index 794b4783..0edfaaa2 100644 --- a/profiler/nizk.rs +++ b/profiler/nizk.rs @@ -1,58 +1,59 @@ -#![allow(non_snake_case)] -#![allow(clippy::assertions_on_result_states)] - -extern crate libspartan; -extern crate merlin; -extern crate rand; - -use ark_serialize::*; -use libspartan::parameters::poseidon_params; -use libspartan::poseidon_transcript::PoseidonTranscript; -use libspartan::{Instance, NIZKGens, NIZK}; - -type F = ark_bls12_377::Fr; -type E = ark_bls12_377::Bls12_377; - -fn print(msg: &str) { - let star = "* "; - println!("{:indent$}{}{}", "", star, msg, indent = 2); -} - -pub fn main() { - // the list of number of variables (and constraints) in an R1CS instance - let inst_sizes = vec![10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]; - - println!("Profiler:: NIZK"); - for &s in inst_sizes.iter() { - let num_vars = (2_usize).pow(s as u32); - let num_cons = num_vars; - let num_inputs = 10; - - // produce a synthetic R1CSInstance - let (inst, vars, inputs) = - Instance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - - // produce public generators - let gens = NIZKGens::::new(num_cons, num_vars, num_inputs); - - let params = poseidon_params(); - // produce a proof of satisfiability - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); - - let mut proof_encoded = Vec::new(); - proof - .serialize_with_mode(&mut proof_encoded, Compress::Yes) - .unwrap(); - let msg_proof_len = format!("NIZK::proof_compressed_len {:?}", proof_encoded.len()); - print(&msg_proof_len); - - // verify the proof of satisfiability - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&inst, &inputs, &mut verifier_transcript, &gens, params) - .is_ok()); - - println!(); - } -} +// #![allow(non_snake_case)] +// #![allow(clippy::assertions_on_result_states)] + +// extern crate libspartan; +// extern crate merlin; +// extern crate rand; + +// use ark_serialize::*; +// use libspartan::parameters::poseidon_params; +// use libspartan::poseidon_transcript::PoseidonTranscript; +// use libspartan::{Instance, NIZKGens, NIZK}; + +// type F = ark_bls12_377::Fr; +// type E = ark_bls12_377::Bls12_377; + +// fn print(msg: &str) { +// let star = "* "; +// println!("{:indent$}{}{}", "", star, msg, indent = 2); +// } + +// pub fn main() { +// // the list of number of variables (and constraints) in an R1CS instance +// let inst_sizes = vec![10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]; + +// println!("Profiler:: NIZK"); +// for &s in inst_sizes.iter() { +// let num_vars = (2_usize).pow(s as u32); +// let num_cons = num_vars; +// let num_inputs = 10; + +// // produce a synthetic R1CSInstance +// let (inst, vars, inputs) = +// Instance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); + +// // produce public generators +// let gens = NIZKGens::::new(num_cons, num_vars, num_inputs, poseidon_params()); + +// let params = poseidon_params(); +// // produce a proof of satisfiability +// let mut prover_transcript = PoseidonTranscript::new(¶ms); +// let proof = NIZK::prove(&inst, vars, &inputs, &gens, &mut prover_transcript); + +// let mut proof_encoded = Vec::new(); +// proof +// .serialize_with_mode(&mut proof_encoded, Compress::Yes) +// .unwrap(); +// let msg_proof_len = format!("NIZK::proof_compressed_len {:?}", proof_encoded.len()); +// print(&msg_proof_len); + +// // verify the proof of satisfiability +// let mut verifier_transcript = PoseidonTranscript::new(¶ms); +// assert!(proof +// .verify(&inst, &inputs, &mut verifier_transcript, &gens, params) +// .is_ok()); + +// println!(); +// } +// } +fn main() {} \ No newline at end of file diff --git a/profiler/snark.rs b/profiler/snark.rs index a913fdfc..6fb75bac 100644 --- a/profiler/snark.rs +++ b/profiler/snark.rs @@ -1,68 +1,69 @@ -#![allow(non_snake_case)] -#![allow(clippy::assertions_on_result_states)] +// #![allow(non_snake_case)] +// #![allow(clippy::assertions_on_result_states)] -extern crate libspartan; -extern crate merlin; +// extern crate libspartan; +// extern crate merlin; -use ark_serialize::*; -use libspartan::parameters::poseidon_params; -use libspartan::poseidon_transcript::PoseidonTranscript; -use libspartan::{Instance, SNARKGens, SNARK}; -type F = ark_bls12_377::Fr; -type E = ark_bls12_377::Bls12_377; +// use ark_serialize::*; +// use libspartan::parameters::poseidon_params; +// use libspartan::poseidon_transcript::PoseidonTranscript; +// use libspartan::{Instance, SNARKGens, SNARK}; +// type F = ark_bls12_377::Fr; +// type E = ark_bls12_377::Bls12_377; -fn print(msg: &str) { - let star = "* "; - println!("{:indent$}{}{}", "", star, msg, indent = 2); -} +// fn print(msg: &str) { +// let star = "* "; +// println!("{:indent$}{}{}", "", star, msg, indent = 2); +// } -pub fn main() { - // the list of number of variables (and constraints) in an R1CS instance - let inst_sizes = vec![10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]; +// pub fn main() { +// // the list of number of variables (and constraints) in an R1CS instance +// let inst_sizes = vec![10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]; - println!("Profiler:: SNARK"); - for &s in inst_sizes.iter() { - let num_vars = (2_usize).pow(s as u32); - let num_cons = num_vars; - let num_inputs = 10; +// println!("Profiler:: SNARK"); +// for &s in inst_sizes.iter() { +// let num_vars = (2_usize).pow(s as u32); +// let num_cons = num_vars; +// let num_inputs = 10; - // produce a synthetic R1CSInstance - let (inst, vars, inputs) = - Instance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); +// // produce a synthetic R1CSInstance +// let (inst, vars, inputs) = +// Instance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - // produce public generators - let gens = SNARKGens::::new(num_cons, num_vars, num_inputs, num_cons); +// // produce public generators +// let gens = SNARKGens::::new(num_cons, num_vars, num_inputs, num_cons, poseidon_params()); - // create a commitment to R1CSInstance - let (comm, decomm) = SNARK::encode(&inst, &gens); +// // create a commitment to R1CSInstance +// let (comm, decomm) = SNARK::encode(&inst, &gens); - let params = poseidon_params(); +// let params = poseidon_params(); - // produce a proof of satisfiability - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = SNARK::prove( - &inst, - &comm, - &decomm, - vars, - &inputs, - &gens, - &mut prover_transcript, - ); +// // produce a proof of satisfiability +// let mut prover_transcript = PoseidonTranscript::new(¶ms); +// let proof = SNARK::prove( +// &inst, +// &comm, +// &decomm, +// vars, +// &inputs, +// &gens, +// &mut prover_transcript, +// ); - let mut proof_encoded = Vec::new(); - proof - .serialize_with_mode(&mut proof_encoded, Compress::Yes) - .unwrap(); - let msg_proof_len = format!("SNARK::proof_compressed_len {:?}", proof_encoded.len()); - print(&msg_proof_len); +// let mut proof_encoded = Vec::new(); +// proof +// .serialize_with_mode(&mut proof_encoded, Compress::Yes) +// .unwrap(); +// let msg_proof_len = format!("SNARK::proof_compressed_len {:?}", proof_encoded.len()); +// print(&msg_proof_len); - // verify the proof of satisfiability - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify(&comm, &inputs, &mut verifier_transcript, &gens, params) - .is_ok()); +// // verify the proof of satisfiability +// let mut verifier_transcript = PoseidonTranscript::new(¶ms); +// assert!(proof +// .verify(&comm, &inputs, &mut verifier_transcript, &gens, params) +// .is_ok()); - println!(); - } -} +// println!(); +// } +// } +fn main() {} diff --git a/src/constraints.rs b/src/constraints.rs index 9ebf1704..a220630c 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -18,6 +18,7 @@ use ark_r1cs_std::{ alloc::{AllocVar, AllocationMode}, fields::fp::FpVar, prelude::{EqGadget, FieldVar}, + R1CSVar, }; use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystemRef, Namespace, SynthesisError}; @@ -34,13 +35,10 @@ impl PoseidonTranscripVar where F: PrimeField, { - fn new(cs: ConstraintSystemRef, params: &PoseidonConfig, challenge: Option) -> Self { + fn new(cs: ConstraintSystemRef, params: &PoseidonConfig, c_var: FpVar) -> Self { let mut sponge = PoseidonSpongeVar::new(cs.clone(), params); - if let Some(c) = challenge { - let c_var = FpVar::::new_witness(cs.clone(), || Ok(c)).unwrap(); - sponge.absorb(&c_var).unwrap(); - } + sponge.absorb(&c_var).unwrap(); Self { cs, sponge } } @@ -268,8 +266,9 @@ impl R1CSVerificationCircuit { /// This section implements the sumcheck verification part of Spartan impl ConstraintSynthesizer for R1CSVerificationCircuit { fn generate_constraints(self, cs: ConstraintSystemRef) -> ark_relations::r1cs::Result<()> { + let initial_challenge_var = FpVar::::new_input(cs.clone(), || Ok(self.prev_challenge))?; let mut transcript_var = - PoseidonTranscripVar::new(cs.clone(), &self.params, Some(self.prev_challenge)); + PoseidonTranscripVar::new(cs.clone(), &self.params, initial_challenge_var); let poly_sc1_vars = self .sc_phase1 @@ -288,7 +287,7 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { let input_vars = self .input .iter() - .map(|i| FpVar::::new_variable(cs.clone(), || Ok(i), AllocationMode::Witness).unwrap()) + .map(|i| FpVar::::new_variable(cs.clone(), || Ok(i), AllocationMode::Input).unwrap()) .collect::>>(); let claimed_ry_vars = self @@ -370,9 +369,9 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { let (eval_A_r, eval_B_r, eval_C_r) = self.evals; - let eval_A_r_var = FpVar::::new_witness(cs.clone(), || Ok(eval_A_r))?; - let eval_B_r_var = FpVar::::new_witness(cs.clone(), || Ok(eval_B_r))?; - let eval_C_r_var = FpVar::::new_witness(cs.clone(), || Ok(eval_C_r))?; + let eval_A_r_var = FpVar::::new_input(cs.clone(), || Ok(eval_A_r))?; + let eval_B_r_var = FpVar::::new_input(cs.clone(), || Ok(eval_B_r))?; + let eval_C_r_var = FpVar::::new_input(cs.clone(), || Ok(eval_C_r))?; let scalar_var = &r_A_var * &eval_A_r_var + &r_B_var * &eval_B_r_var + &r_C_var * &eval_C_r_var; diff --git a/src/lib.rs b/src/lib.rs index e25a2ad5..555e788e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -32,6 +32,7 @@ mod r1csproof; mod sparse_mlpoly; mod sqrt_pst; mod sumcheck; +pub mod testudo; mod timer; pub(crate) mod transcript; mod unipoly; @@ -291,386 +292,6 @@ impl Instance { } } -/// `SNARKGens` holds public parameters for producing and verifying proofs with the Spartan SNARK -pub struct SNARKGens { - gens_r1cs_sat: R1CSGens, - gens_r1cs_eval: R1CSCommitmentGens, -} - -impl SNARKGens { - /// Constructs a new `SNARKGens` given the size of the R1CS statement - /// `num_nz_entries` specifies the maximum number of non-zero entries in any of the three R1CS matrices - pub fn new(num_cons: usize, num_vars: usize, num_inputs: usize, num_nz_entries: usize) -> Self { - let num_vars_padded = { - let mut num_vars_padded = max(num_vars, num_inputs + 1); - if num_vars_padded != num_vars_padded.next_power_of_two() { - num_vars_padded = num_vars_padded.next_power_of_two(); - } - num_vars_padded - }; - - let gens_r1cs_sat = R1CSGens::new(b"gens_r1cs_sat", num_cons, num_vars_padded); - let gens_r1cs_eval = R1CSCommitmentGens::new( - b"gens_r1cs_eval", - num_cons, - num_vars_padded, - num_inputs, - num_nz_entries, - ); - SNARKGens { - gens_r1cs_sat, - gens_r1cs_eval, - } - } -} - -use ark_ec::pairing::Pairing; -/// `SNARK` holds a proof produced by Spartan SNARK -#[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct SNARK { - r1cs_sat_proof: R1CSProof, - inst_evals: (E::ScalarField, E::ScalarField, E::ScalarField), - r1cs_eval_proof: R1CSEvalProof, - rx: Vec, - ry: Vec, -} - -impl SNARK -where - E: Pairing, - E::ScalarField: Absorb, -{ - /// A public computation to create a commitment to an R1CS instance - pub fn encode( - inst: &Instance, - gens: &SNARKGens, - ) -> ( - ComputationCommitment, - ComputationDecommitment, - ) { - let timer_encode = Timer::new("SNARK::encode"); - let (comm, decomm) = inst.inst.commit(&gens.gens_r1cs_eval); - timer_encode.stop(); - ( - ComputationCommitment { comm }, - ComputationDecommitment { decomm }, - ) - } - - /// A method to produce a SNARK proof of the satisfiability of an R1CS instance - pub fn prove( - inst: &Instance, - comm: &ComputationCommitment, - decomm: &ComputationDecommitment, - vars: VarsAssignment, - inputs: &InputsAssignment, - gens: &SNARKGens, - transcript: &mut PoseidonTranscript, - ) -> Self { - let timer_prove = Timer::new("SNARK::prove"); - - // transcript.append_protocol_name(SNARK::protocol_name()); - comm.comm.write_to_transcript(transcript); - - let (r1cs_sat_proof, rx, ry) = { - let (proof, rx, ry) = { - // we might need to pad variables - let padded_vars = { - let num_padded_vars = inst.inst.get_num_vars(); - let num_vars = vars.assignment.len(); - if num_padded_vars > num_vars { - vars.pad(num_padded_vars) - } else { - vars - } - }; - - R1CSProof::prove( - &inst.inst, - padded_vars.assignment, - &inputs.assignment, - &gens.gens_r1cs_sat, - transcript, - ) - }; - - let mut proof_encoded: Vec = Vec::new(); - proof - .serialize_with_mode(&mut proof_encoded, Compress::Yes) - .unwrap(); - Timer::print(&format!("len_r1cs_sat_proof {:?}", proof_encoded.len())); - - (proof, rx, ry) - }; - - // We need to reset the transcript state before starting the evaluation - // proof and share this state with the verifier because, on the verifier's - // side all the previous updates are done on the transcript - // circuit variable and the transcript outside the circuit will be - // inconsistent wrt to the prover's. - // transcript.new_from_state(&r1cs_sat_proof.transcript_sat_state); - - // We send evaluations of A, B, C at r = (rx, ry) as claims - // to enable the verifier complete the first sum-check - let timer_eval = Timer::new("eval_sparse_polys"); - let inst_evals = { - let (Ar, Br, Cr) = inst.inst.evaluate(&rx, &ry); - transcript.append_scalar(b"", &Ar); - transcript.append_scalar(b"", &Br); - transcript.append_scalar(b"", &Cr); - (Ar, Br, Cr) - }; - timer_eval.stop(); - - let r1cs_eval_proof = { - let proof = R1CSEvalProof::prove( - &decomm.decomm, - &rx, - &ry, - &inst_evals, - &gens.gens_r1cs_eval, - transcript, - ); - - let mut proof_encoded: Vec = Vec::new(); - proof - .serialize_with_mode(&mut proof_encoded, Compress::Yes) - .unwrap(); - Timer::print(&format!("len_r1cs_eval_proof {:?}", proof_encoded.len())); - proof - }; - - timer_prove.stop(); - SNARK { - r1cs_sat_proof, - inst_evals, - r1cs_eval_proof, - rx, - ry, - } - } - - /// A method to verify the SNARK proof of the satisfiability of an R1CS instance - pub fn verify( - &self, - comm: &ComputationCommitment, - input: &InputsAssignment, - transcript: &mut PoseidonTranscript, - gens: &SNARKGens, - poseidon: PoseidonConfig, - ) -> Result<(u128, u128, u128), ProofVerifyError> { - let timer_verify = Timer::new("SNARK::verify"); - // transcript.append_protocol_name(SNARK::protocol_name()); - - // append a commitment to the computation to the transcript - comm.comm.write_to_transcript(transcript); - - let timer_sat_proof = Timer::new("verify_sat_proof"); - assert_eq!(input.assignment.len(), comm.comm.get_num_inputs()); - // let (rx, ry) = - let res = self.r1cs_sat_proof.verify_groth16( - comm.comm.get_num_vars(), - comm.comm.get_num_cons(), - &input.assignment, - &self.inst_evals, - transcript, - &gens.gens_r1cs_sat, - poseidon, - )?; - timer_sat_proof.stop(); - - let timer_eval_proof = Timer::new("verify_eval_proof"); - // Reset the transcript using the state sent by the prover. - // TODO: find a way to retrieve this state from the circuit. Currently - // the API for generating constraints doesn't support returning values - // computed inside the circuit. - // transcript.new_from_state(&self.r1cs_sat_proof.transcript_sat_state); - - let (Ar, Br, Cr) = &self.inst_evals; - transcript.append_scalar(b"", Ar); - transcript.append_scalar(b"", Br); - transcript.append_scalar(b"", Cr); - - self.r1cs_eval_proof.verify( - &comm.comm, - &self.rx, - &self.ry, - &self.inst_evals, - &gens.gens_r1cs_eval, - transcript, - )?; - timer_eval_proof.stop(); - timer_verify.stop(); - Ok(res) - } -} - -#[derive(Clone)] -/// `NIZKGens` holds public parameters for producing and verifying proofs with the Spartan NIZK -pub struct NIZKGens { - gens_r1cs_sat: R1CSGens, -} - -impl NIZKGens { - /// Constructs a new `NIZKGens` given the size of the R1CS statement - pub fn new(num_cons: usize, num_vars: usize, num_inputs: usize) -> Self { - let num_vars_padded = { - let mut num_vars_padded = max(num_vars, num_inputs + 1); - if num_vars_padded != num_vars_padded.next_power_of_two() { - num_vars_padded = num_vars_padded.next_power_of_two(); - } - num_vars_padded - }; - - let gens_r1cs_sat = R1CSGens::new(b"gens_r1cs_sat", num_cons, num_vars_padded); - NIZKGens { gens_r1cs_sat } - } -} - -/// `NIZK` holds a proof produced by Spartan NIZK -#[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] -pub struct NIZK { - r1cs_sat_proof: R1CSProof, - r: (Vec, Vec), -} - -impl NIZK -where - E: Pairing, - E::ScalarField: Absorb, -{ - /// A method to produce a NIZK proof of the satisfiability of an R1CS instance - pub fn prove( - inst: &Instance, - vars: VarsAssignment, - input: &InputsAssignment, - gens: &NIZKGens, - transcript: &mut PoseidonTranscript, - ) -> Self { - let timer_prove = Timer::new("NIZK::prove"); - // transcript.append_protocol_name(NIZK::protocol_name()); - transcript.append_bytes(b"", &inst.digest); - - let (r1cs_sat_proof, rx, ry) = { - // we might need to pad variables - let padded_vars = { - let num_padded_vars = inst.inst.get_num_vars(); - let num_vars = vars.assignment.len(); - if num_padded_vars > num_vars { - vars.pad(num_padded_vars) - } else { - vars - } - }; - - let (proof, rx, ry) = R1CSProof::prove( - &inst.inst, - padded_vars.assignment, - &input.assignment, - &gens.gens_r1cs_sat, - transcript, - ); - let mut proof_encoded = Vec::new(); - proof - .serialize_with_mode(&mut proof_encoded, Compress::Yes) - .unwrap(); - Timer::print(&format!("len_r1cs_sat_proof {:?}", proof_encoded.len())); - (proof, rx, ry) - }; - - timer_prove.stop(); - NIZK { - r1cs_sat_proof, - r: (rx, ry), - } - } - - /// A method to verify a NIZK proof of the satisfiability of an R1CS instance - pub fn verify( - &self, - inst: &Instance, - input: &InputsAssignment, - transcript: &mut PoseidonTranscript, - gens: &NIZKGens, - poseidon: PoseidonConfig, - ) -> Result { - let timer_verify = Timer::new("NIZK::verify"); - - transcript.append_bytes(b"", &inst.digest); - - // We send evaluations of A, B, C at r = (rx, ry) as claims - // to enable the verifier complete the first sum-check - // let timer_eval = Timer::new("eval_sparse_polys"); - let (claimed_rx, claimed_ry) = &self.r; - let inst_evals = inst.inst.evaluate(claimed_rx, claimed_ry); - // timer_eval.stop(); - - let timer_sat_proof = Timer::new("verify_sat_proof"); - assert_eq!(input.assignment.len(), inst.inst.get_num_inputs()); - // let (rx, ry) = - let nc = self.r1cs_sat_proof.circuit_size( - inst.inst.get_num_vars(), - inst.inst.get_num_cons(), - &input.assignment, - &inst_evals, - transcript, - &gens.gens_r1cs_sat, - poseidon, - )?; - - // verify if claimed rx and ry are correct - // assert_eq!(rx, *claimed_rx); - // assert_eq!(ry, *claimed_ry); - timer_sat_proof.stop(); - timer_verify.stop(); - - Ok(nc) - } - - /// A method to verify a NIZK proof of the satisfiability of an R1CS instance with Groth16 - pub fn verify_groth16( - &self, - inst: &Instance, - input: &InputsAssignment, - transcript: &mut PoseidonTranscript, - gens: &NIZKGens, - poseidon: PoseidonConfig, - ) -> Result<(u128, u128, u128), ProofVerifyError> { - let timer_verify = Timer::new("NIZK::verify"); - - // transcript.append_protocol_name(NIZK::protocol_name()); - transcript.append_bytes(b"", &inst.digest); - - // We send evaluations of A, B, C at r = (rx, ry) as claims - // to enable the verifier complete the first sum-check - let timer_eval = Timer::new("eval_sparse_polys"); - let (claimed_rx, claimed_ry) = &self.r; - let inst_evals = inst.inst.evaluate(claimed_rx, claimed_ry); - timer_eval.stop(); - - let timer_sat_proof = Timer::new("verify_sat_proof"); - assert_eq!(input.assignment.len(), inst.inst.get_num_inputs()); - // let (rx, ry) = - let (ds, dp, dv) = self.r1cs_sat_proof.verify_groth16( - inst.inst.get_num_vars(), - inst.inst.get_num_cons(), - &input.assignment, - &inst_evals, - transcript, - &gens.gens_r1cs_sat, - poseidon, - )?; - - // verify if claimed rx and ry are correct - // assert_eq!(rx, *claimed_rx); - // assert_eq!(ry, *claimed_ry); - timer_sat_proof.stop(); - timer_verify.stop(); - - Ok((ds, dp, dv)) - } -} - #[inline] pub(crate) fn dot_product(a: &[F], b: &[F]) -> F { let mut res = F::zero(); @@ -690,48 +311,6 @@ mod tests { type F = ark_bls12_377::Fr; type E = ark_bls12_377::Bls12_377; - #[test] - pub fn check_snark() { - let num_vars = 256; - let num_cons = num_vars; - let num_inputs = 10; - - // produce public generators - let gens = SNARKGens::::new(num_cons, num_vars, num_inputs, num_cons); - - // produce a synthetic R1CSInstance - let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - - // create a commitment to R1CSInstance - let (comm, decomm) = SNARK::encode(&inst, &gens); - - let params = poseidon_params(); - - // produce a proof - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = SNARK::prove( - &inst, - &comm, - &decomm, - vars, - &inputs, - &gens, - &mut prover_transcript, - ); - - // verify the proof - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify( - &comm, - &inputs, - &mut verifier_transcript, - &gens, - poseidon_params() - ) - .is_ok()); - } - #[test] pub fn check_r1cs_invalid_index() { let num_cons = 4; @@ -776,102 +355,4 @@ mod tests { assert!(inst.is_err()); assert_eq!(inst.err(), Some(R1CSError::InvalidScalar)); } - - #[test] - fn test_padded_constraints() { - // parameters of the R1CS instance - let num_cons = 1; - let num_vars = 0; - let num_inputs = 3; - let num_non_zero_entries = 3; - - // We will encode the above constraints into three matrices, where - // the coefficients in the matrix are in the little-endian byte order - let mut A: Vec<(usize, usize, Vec)> = Vec::new(); - let mut B: Vec<(usize, usize, Vec)> = Vec::new(); - let mut C: Vec<(usize, usize, Vec)> = Vec::new(); - - // Create a^2 + b + 13 - A.push((0, num_vars + 2, (F::one().into_bigint().to_bytes_le()))); // 1*a - B.push((0, num_vars + 2, F::one().into_bigint().to_bytes_le())); // 1*a - C.push((0, num_vars + 1, F::one().into_bigint().to_bytes_le())); // 1*z - C.push((0, num_vars, (-F::from(13u64)).into_bigint().to_bytes_le())); // -13*1 - C.push((0, num_vars + 3, (-F::one()).into_bigint().to_bytes_le())); // -1*b - - // Var Assignments (Z_0 = 16 is the only output) - let vars = vec![F::zero().into_bigint().to_bytes_le(); num_vars]; - - // create an InputsAssignment (a = 1, b = 2) - let mut inputs = vec![F::zero().into_bigint().to_bytes_le(); num_inputs]; - inputs[0] = F::from(16u64).into_bigint().to_bytes_le(); - inputs[1] = F::from(1u64).into_bigint().to_bytes_le(); - inputs[2] = F::from(2u64).into_bigint().to_bytes_le(); - - let assignment_inputs = InputsAssignment::::new(&inputs).unwrap(); - let assignment_vars = VarsAssignment::new(&vars).unwrap(); - - // Check if instance is satisfiable - let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C).unwrap(); - let res = inst.is_sat(&assignment_vars, &assignment_inputs); - assert!(res.unwrap(), "should be satisfied"); - - // SNARK public params - let gens = SNARKGens::::new(num_cons, num_vars, num_inputs, num_non_zero_entries); - - // create a commitment to the R1CS instance - let (comm, decomm) = SNARK::encode(&inst, &gens); - - let params = poseidon_params(); - - // produce a SNARK - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = SNARK::prove( - &inst, - &comm, - &decomm, - assignment_vars.clone(), - &assignment_inputs, - &gens, - &mut prover_transcript, - ); - - // verify the SNARK - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify( - &comm, - &assignment_inputs, - &mut verifier_transcript, - &gens, - poseidon_params() - ) - .is_ok()); - - // NIZK public params - let gens = NIZKGens::::new(num_cons, num_vars, num_inputs); - - let params = poseidon_params(); - - // produce a NIZK - let mut prover_transcript = PoseidonTranscript::new(¶ms); - let proof = NIZK::prove( - &inst, - assignment_vars, - &assignment_inputs, - &gens, - &mut prover_transcript, - ); - - // verify the NIZK - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - assert!(proof - .verify_groth16( - &inst, - &assignment_inputs, - &mut verifier_transcript, - &gens, - poseidon_params() - ) - .is_ok()); - } } diff --git a/src/mipp.rs b/src/mipp.rs index 161b2265..49e5d810 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -118,7 +118,6 @@ impl MippProof { xs.push(c); xs_inv.push(c_inv); } - assert!(m_a.len() == 1 && m_y.len() == 1 && m_h.len() == 1); let final_a = m_a[0]; @@ -222,6 +221,7 @@ impl MippProof { // doesn't bring much improvement final_y *= E::ScalarField::one() + c_inv.mul(point[i]) - point[i]; } + // First, each entry of T and U are multiplied independently by their // respective challenges which is done in parralel and, at the end, // the results are merged together for each vector following their @@ -294,14 +294,16 @@ impl MippProof { // a PST verification at the random point rs, given the pst proof // received from the prover prover let check_h = MultilinearPC::::check_2(vk, &comm_h, &rs, v, &proof.pst_proof_h); + assert!(check_h == true); let final_u = proof.final_a.mul(final_y); let final_t: ::TargetField = E::pairing(proof.final_a, proof.final_h).0; let check_t = ref_final_res.tc == final_t; + assert!(check_t == true); let check_u = ref_final_res.uc == final_u; - + assert!(check_u == true); check_h & check_u & check_t } } diff --git a/src/poseidon_transcript.rs b/src/poseidon_transcript.rs index 4ac33768..8a08ae7e 100644 --- a/src/poseidon_transcript.rs +++ b/src/poseidon_transcript.rs @@ -45,7 +45,7 @@ impl PoseidonTranscript { impl PoseidonTranscript { pub fn new_from_state(&mut self, challenge: &F) { - self.sponge = PoseidonSponge::new(&self.params); + self.sponge = PoseidonSponge::new(&self.params.clone()); self.append_scalar(b"", challenge); } } diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 25617549..2524ddb9 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -1,18 +1,20 @@ #![allow(clippy::too_many_arguments)] use super::dense_mlpoly::{DensePolynomial, EqPolynomial, PolyCommitmentGens}; use super::errors::ProofVerifyError; -use crate::constraints::{R1CSVerificationCircuit, VerifierConfig}; +use crate::constraints::{R1CSVerificationCircuit, SumcheckVerificationCircuit, VerifierConfig}; use crate::math::Math; use crate::mipp::MippProof; use crate::poseidon_transcript::PoseidonTranscript; use crate::sqrt_pst::Polynomial; use crate::sumcheck::SumcheckInstanceProof; use crate::transcript::Transcript; +use crate::unipoly::UniPoly; use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; use ark_crypto_primitives::sponge::Absorb; use ark_ec::pairing::Pairing; use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; +use itertools::Itertools; use super::r1csinstance::R1CSInstance; @@ -20,17 +22,19 @@ use super::sparse_mlpoly::{SparsePolyEntry, SparsePolynomial}; use super::timer::Timer; use ark_snark::{CircuitSpecificSetupSNARK, SNARK}; -use ark_groth16::Groth16; +use crate::ark_std::UniformRand; +use ark_groth16::{Groth16, ProvingKey, VerifyingKey}; use ark_relations::r1cs::{ConstraintSynthesizer, ConstraintSystem}; use ark_serialize::*; use ark_std::{One, Zero}; +use core::num; use std::time::Instant; #[derive(CanonicalSerialize, CanonicalDeserialize, Debug)] pub struct R1CSProof { // The PST commitment to the multilinear extension of the witness. - comm: Commitment, + pub comm: Commitment, sc_proof_phase1: SumcheckInstanceProof, claims_phase2: ( E::ScalarField, @@ -39,26 +43,129 @@ pub struct R1CSProof { E::ScalarField, ), sc_proof_phase2: SumcheckInstanceProof, - eval_vars_at_ry: E::ScalarField, - proof_eval_vars_at_ry: Proof, + pub eval_vars_at_ry: E::ScalarField, + pub proof_eval_vars_at_ry: Proof, rx: Vec, ry: Vec, // The transcript state after the satisfiability proof was computed. pub transcript_sat_state: E::ScalarField, + pub initial_state: E::ScalarField, pub t: E::TargetField, pub mipp_proof: MippProof, } +#[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] +pub struct R1CSVerifierProof { + comm: Commitment, + circuit_proof: ark_groth16::Proof, + initial_state: E::ScalarField, + transcript_sat_state: E::ScalarField, + eval_vars_at_ry: E::ScalarField, + ry: Vec, + proof_eval_vars_at_ry: Proof, + t: E::TargetField, + mipp_proof: MippProof, +} + +#[derive(Clone)] +pub struct CircuitGens { + pk: ProvingKey, + vk: VerifyingKey, +} + +impl CircuitGens +where + E: Pairing, +{ + pub fn new( + num_cons: usize, + num_vars: usize, + num_inputs: usize, + poseidon: PoseidonConfig, + ) -> Self { + let mut rng = rand::thread_rng(); + + let uni_polys_round1 = (0..num_cons.log_2()) + .map(|i| { + UniPoly::::from_evals(&[ + E::ScalarField::rand(&mut rng), + E::ScalarField::rand(&mut rng), + E::ScalarField::rand(&mut rng), + E::ScalarField::rand(&mut rng), + ]) + }) + .collect::>>(); + + let uni_polys_round2 = (0..num_vars.log_2() + 1) + .map(|i| { + UniPoly::::from_evals(&[ + E::ScalarField::rand(&mut rng), + E::ScalarField::rand(&mut rng), + E::ScalarField::rand(&mut rng), + ]) + }) + .collect::>>(); + + let circuit = R1CSVerificationCircuit { + num_vars: num_vars, + num_cons: num_cons, + input: (0..num_inputs) + .map(|i| E::ScalarField::rand(&mut rng)) + .collect_vec(), + input_as_sparse_poly: SparsePolynomial::new( + num_vars.log_2(), + (0..num_inputs + 1) + .map(|i| SparsePolyEntry::new(i, E::ScalarField::rand(&mut rng))) + .collect::>>(), + ), + evals: ( + E::ScalarField::zero(), + E::ScalarField::zero(), + E::ScalarField::zero(), + ), + params: poseidon, + prev_challenge: E::ScalarField::zero(), + claims_phase2: ( + E::ScalarField::zero(), + E::ScalarField::zero(), + E::ScalarField::zero(), + E::ScalarField::zero(), + ), + eval_vars_at_ry: E::ScalarField::zero(), + sc_phase1: SumcheckVerificationCircuit { + polys: uni_polys_round1, + }, + sc_phase2: SumcheckVerificationCircuit { + polys: uni_polys_round2, + }, + claimed_ry: (0..num_vars.log_2() + 1) + .map(|i| E::ScalarField::rand(&mut rng)) + .collect_vec(), + claimed_transcript_sat_state: E::ScalarField::zero(), + }; + let (pk, vk) = Groth16::::setup(circuit.clone(), &mut rng).unwrap(); + CircuitGens { pk, vk } + } +} + #[derive(Clone)] pub struct R1CSGens { gens_pc: PolyCommitmentGens, + gens_gc: CircuitGens, } impl R1CSGens { - pub fn new(label: &'static [u8], _num_cons: usize, num_vars: usize) -> Self { + pub fn new( + label: &'static [u8], + num_cons: usize, + num_vars: usize, + num_inputs: usize, + poseidon: PoseidonConfig, + ) -> Self { let num_poly_vars = num_vars.log_2(); let gens_pc = PolyCommitmentGens::new(num_poly_vars, label); - R1CSGens { gens_pc } + let gens_gc = CircuitGens::new(num_cons, num_vars, num_inputs, poseidon); + R1CSGens { gens_pc, gens_gc } } } @@ -146,8 +253,8 @@ where // comm.write_to_transcript(transcript); timer_commit.stop(); - let c = transcript.challenge_scalar(b""); - transcript.new_from_state(&c); + let initial_state = transcript.challenge_scalar(b""); + transcript.new_from_state(&initial_state); transcript.append_scalar_vector(b"", &input); @@ -223,8 +330,8 @@ where transcript, ); timer_sc_proof_phase2.stop(); - let c = transcript.challenge_scalar(b""); - transcript.new_from_state(&c); + let transcript_sat_state = transcript.challenge_scalar(b""); + transcript.new_from_state(&transcript_sat_state); // TODO: modify the polynomial evaluation in Spartan to be consistent // with the evaluation in ark-poly-commit so that reversing is not needed @@ -233,10 +340,6 @@ where let (comm, proof_eval_vars_at_ry, mipp_proof) = pl.open(transcript, comm_list, &gens.gens_pc.ck, &ry[1..], &t); - println!( - "proof size (no of quotients): {:?}", - proof_eval_vars_at_ry.proofs.len() - ); timmer_opening.stop(); @@ -247,6 +350,7 @@ where ( R1CSProof { comm, + initial_state, sc_proof_phase1, claims_phase2: (*Az_claim, *Bz_claim, *Cz_claim, prod_Az_Bz_claims), sc_proof_phase2, @@ -254,7 +358,7 @@ where proof_eval_vars_at_ry, rx: rx.clone(), ry: ry.clone(), - transcript_sat_state: c, + transcript_sat_state, t, mipp_proof, }, @@ -263,7 +367,7 @@ where ) } - pub fn verify_groth16( + pub fn prove_verifier( &self, num_vars: usize, num_cons: usize, @@ -272,11 +376,12 @@ where transcript: &mut PoseidonTranscript, gens: &R1CSGens, poseidon: PoseidonConfig, - ) -> Result<(u128, u128, u128), ProofVerifyError> { + ) -> Result, ProofVerifyError> { // serialise and add the IPP commitment to the transcript transcript.append_gt::(b"", &self.t); - let c = transcript.challenge_scalar(b""); + let initial_state = transcript.challenge_scalar(b""); + transcript.new_from_state(&initial_state); let mut input_as_sparse_poly_entries = vec![SparsePolyEntry::new(0, E::ScalarField::one())]; //remaining inputs @@ -285,10 +390,8 @@ where .map(|i| SparsePolyEntry::new(i + 1, input[i])) .collect::>>(), ); - - let n = num_vars; let input_as_sparse_poly = - SparsePolynomial::new(n.log_2() as usize, input_as_sparse_poly_entries); + SparsePolynomial::new(num_vars.log_2() as usize, input_as_sparse_poly_entries); let config = VerifierConfig { num_vars, @@ -296,7 +399,7 @@ where input: input.to_vec(), evals: *evals, params: poseidon, - prev_challenge: c, + prev_challenge: initial_state, claims_phase2: self.claims_phase2, polys_sc1: self.sc_proof_phase1.polys.clone(), polys_sc2: self.sc_proof_phase2.polys.clone(), @@ -309,127 +412,70 @@ where let circuit = R1CSVerificationCircuit::new(&config); - // this is universal, we don't measure it - // TODO put this _outside_ the verification - let start = Instant::now(); - let (pk, vk) = Groth16::::setup(circuit.clone(), &mut rand::thread_rng()).unwrap(); - let ds = start.elapsed().as_millis(); + let circuit_prover_timer = Timer::new("provecircuit"); + let proof = Groth16::::prove(&gens.gens_gc.pk, circuit, &mut rand::thread_rng()).unwrap(); + circuit_prover_timer.stop(); - let prove_outer = Timer::new("provecircuit"); - let start = Instant::now(); - let proof = Groth16::::prove(&pk, circuit, &mut rand::thread_rng()).unwrap(); - let dp = start.elapsed().as_millis(); - prove_outer.stop(); - - let timer_verification = Timer::new("verification"); - let start = Instant::now(); - - /// TODO : they are not necessary ? - let (v_A, v_B, v_C, v_AB) = self.claims_phase2; + Ok(R1CSVerifierProof { + comm: self.comm.clone(), + circuit_proof: proof, + initial_state: self.initial_state, + transcript_sat_state: self.transcript_sat_state, + eval_vars_at_ry: self.eval_vars_at_ry, + ry: self.ry.clone(), + proof_eval_vars_at_ry: self.proof_eval_vars_at_ry.clone(), + t: self.t, + mipp_proof: self.mipp_proof.clone(), + }) + } +} - let mut pubs = vec![]; +impl R1CSVerifierProof +where + ::ScalarField: Absorb, +{ + pub fn verify( + &self, + input: &[E::ScalarField], + evals: &(E::ScalarField, E::ScalarField, E::ScalarField), + transcript: &mut PoseidonTranscript, + gens: &R1CSGens, + ) -> Result { + let (Ar, Br, Cr) = evals; + let mut pubs = vec![self.initial_state]; + pubs.extend(input.clone()); pubs.extend(self.ry.clone()); - pubs.extend(vec![self.eval_vars_at_ry, self.transcript_sat_state]); - + pubs.extend(vec![ + self.eval_vars_at_ry, + *Ar, + *Br, + *Cr, + self.transcript_sat_state, + ]); transcript.new_from_state(&self.transcript_sat_state); par! { // verifies the Groth16 proof for the spartan verifier - let is_verified = Groth16::::verify(&vk, &pubs, &proof).unwrap(), + let is_verified = Groth16::::verify(&gens.gens_gc.vk, &pubs, &self.circuit_proof).unwrap(), // verifies the proof of opening against the result of evaluating the // witness polynomial at point ry - let res = Polynomial::verify( - transcript, - &gens.gens_pc.vk, - &self.comm, - &self.ry[1..], - self.eval_vars_at_ry, - &self.proof_eval_vars_at_ry, - &self.mipp_proof, - &self.t, - ) - }; - let dv = start.elapsed().as_millis(); - timer_verification.stop(); - - assert!(res == true && is_verified == true); - - Ok((ds, dp, dv)) - } - - // Helper function to find the number of constraint in the circuit which - // requires executing it. - pub fn circuit_size( - &self, - num_vars: usize, - num_cons: usize, - input: &[E::ScalarField], - evals: &(E::ScalarField, E::ScalarField, E::ScalarField), - transcript: &mut PoseidonTranscript, - _gens: &R1CSGens, - poseidon: PoseidonConfig, - ) -> Result { - // serialise and add the IPP commitment to the transcript - transcript.append_gt::(b"", &self.t); - - let c: E::ScalarField = transcript.challenge_scalar(b""); - - let mut input_as_sparse_poly_entries = vec![SparsePolyEntry::new(0, E::ScalarField::one())]; - //remaining inputs - input_as_sparse_poly_entries.extend( - (0..input.len()) - .map(|i| SparsePolyEntry::new(i + 1, input[i])) - .collect::>>(), - ); - - let n = num_vars; - let input_as_sparse_poly = - SparsePolynomial::new(n.log_2() as usize, input_as_sparse_poly_entries); - - let config = VerifierConfig { - num_vars, - num_cons, - input: input.to_vec(), - evals: *evals, - params: poseidon, - prev_challenge: c, - claims_phase2: self.claims_phase2, - polys_sc1: self.sc_proof_phase1.polys.clone(), - polys_sc2: self.sc_proof_phase2.polys.clone(), - eval_vars_at_ry: self.eval_vars_at_ry, - input_as_sparse_poly, - ry: self.ry.clone(), - comm: self.comm.clone(), - transcript_sat_state: self.transcript_sat_state, + let res = Polynomial::verify( + transcript, + &gens.gens_pc.vk, + &self.comm, + &self.ry[1..], + self.eval_vars_at_ry, + &self.proof_eval_vars_at_ry, + &self.mipp_proof, + &self.t, + ) }; - - let _rng = ark_std::test_rng(); - let circuit = R1CSVerificationCircuit::new(&config); - let cs = ConstraintSystem::::new_ref(); - circuit.generate_constraints(cs.clone()).unwrap(); - assert!(cs.is_satisfied().unwrap()); - - Ok(cs.num_constraints()) + assert!(is_verified == true); + assert!(res == true); + Ok(is_verified && res) } } -// fn verify_constraints_outer(circuit: VerifierCircuit, _num_cons: &usize) -> usize { -// let cs = ConstraintSystem::::new_ref(); -// circuit.generate_constraints(cs.clone()).unwrap(); -// assert!(cs.is_satisfied().unwrap()); -// cs.num_constraints() -// } - -// fn verify_constraints_inner(circuit: VerifierCircuit, _num_cons: &usize) -> usize { -// let cs = ConstraintSystem::::new_ref(); -// circuit -// .inner_circuit -// .generate_constraints(cs.clone()) -// .unwrap(); -// assert!(cs.is_satisfied().unwrap()); -// cs.num_constraints() -// } - #[cfg(test)] mod tests { use crate::parameters::{poseidon_params, poseidon_params_bls12381}; @@ -438,6 +484,7 @@ mod tests { type F = ark_bls12_377::Fr; type E = ark_bls12_377::Bls12_377; + use ark_ff::PrimeField; use ark_std::UniformRand; fn produce_tiny_r1cs() -> (R1CSInstance, Vec, Vec) { @@ -531,39 +578,46 @@ mod tests { fn check_r1cs_proof

(params: PoseidonConfig) where P: Pairing, + P::ScalarField: PrimeField, P::ScalarField: Absorb, { - let num_vars = 16; + let num_vars = 1024; let num_cons = num_vars; let num_inputs = 3; let (inst, vars, input) = R1CSInstance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - let gens = R1CSGens::

::new(b"test-m", num_cons, num_vars); + let gens = R1CSGens::

::new(b"test-m", num_cons, num_vars, num_inputs, params.clone()); //let params = poseidon_params(); // let mut random_tape = RandomTape::new(b"proof"); - let mut prover_transcript = PoseidonTranscript::new(¶ms); + let mut prover_transcript = PoseidonTranscript::new(¶ms.clone()); + let c = prover_transcript.challenge_scalar::(b""); + prover_transcript.new_from_state(&c); let (proof, rx, ry) = R1CSProof::prove(&inst, vars, &input, &gens, &mut prover_transcript); let inst_evals = inst.evaluate(&rx, &ry); - let mut verifier_transcript = PoseidonTranscript::new(¶ms); - - // if you want to check the test fails - // input[0] = Scalar::zero(); - - assert!(proof - .verify_groth16( - inst.get_num_vars(), - inst.get_num_cons(), + prover_transcript.new_from_state(&c); + let verifer_proof = proof + .prove_verifier( + num_vars, + num_cons, &input, &inst_evals, - &mut verifier_transcript, + &mut prover_transcript, &gens, - params, + params.clone(), ) + .unwrap(); + + let mut verifier_transcript = PoseidonTranscript::new(¶ms.clone()); + assert!(verifer_proof + .verify(&input, &inst_evals, &mut verifier_transcript, &gens) .is_ok()); + + // if you want to check the test fails + // input[0] = Scalar::zero(); } } diff --git a/src/testudo.rs b/src/testudo.rs new file mode 100644 index 00000000..9f158f79 --- /dev/null +++ b/src/testudo.rs @@ -0,0 +1,349 @@ +use std::cmp::max; + +use crate::ark_std::One; +use crate::constraints::VerifierConfig; +use crate::errors::ProofVerifyError; +use crate::r1csinstance::{R1CSCommitmentGens, R1CSEvalProof}; +use crate::r1csproof::R1CSVerifierProof; +use crate::sparse_mlpoly::SparsePolynomial; +use crate::timer::Timer; +use crate::transcript::TranscriptWriter; +use crate::{ + constraints::R1CSVerificationCircuit, + mipp::MippProof, + poseidon_transcript::PoseidonTranscript, + r1csproof::{R1CSGens, R1CSProof}, + sparse_mlpoly::SparsePolyEntry, + transcript::Transcript, + InputsAssignment, Instance, VarsAssignment, +}; +use crate::{ComputationCommitment, ComputationDecommitment}; +use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; +use ark_crypto_primitives::sponge::Absorb; +use ark_ec::pairing::Pairing; +use ark_groth16::verifier; +use ark_poly_commit::multilinear_pc::data_structures::{Commitment, Proof}; +use ark_relations::r1cs; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress}; + +#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] +pub struct TestudoSnark { + pub r1cs_verifier_proof: R1CSVerifierProof, + pub r1cs_eval_proof: R1CSEvalProof, + pub inst_evals: (E::ScalarField, E::ScalarField, E::ScalarField), + pub r: (Vec, Vec), +} + +pub struct TestudoGens { + gens_r1cs_sat: R1CSGens, + gens_r1cs_eval: R1CSCommitmentGens, +} + +impl TestudoGens { + /// Constructs a new `TestudoGens` given the size of the R1CS statement + /// `num_nz_entries` specifies the maximum number of non-zero entries in any of the three R1CS matrices + pub fn new( + num_cons: usize, + num_vars: usize, + num_inputs: usize, + num_nz_entries: usize, + poseidon: PoseidonConfig, + ) -> Self { + let num_vars_padded = { + let mut num_vars_padded = max(num_vars, num_inputs + 1); + if num_vars_padded != num_vars_padded.next_power_of_two() { + num_vars_padded = num_vars_padded.next_power_of_two(); + } + num_vars_padded + }; + + let gens_r1cs_sat = R1CSGens::new( + b"gens_r1cs_sat", + num_cons, + num_vars_padded, + num_inputs, + poseidon, + ); + let gens_r1cs_eval = R1CSCommitmentGens::new( + b"gens_r1cs_eval", + num_cons, + num_vars_padded, + num_inputs, + num_nz_entries, + ); + TestudoGens { + gens_r1cs_sat, + gens_r1cs_eval, + } + } +} + +impl TestudoSnark +where + E::ScalarField: Absorb, +{ + pub fn encode( + inst: &Instance, + gens: &TestudoGens, + ) -> ( + ComputationCommitment, + ComputationDecommitment, + ) { + let timer_encode = Timer::new("SNARK::encode"); + let (comm, decomm) = inst.inst.commit(&gens.gens_r1cs_eval); + timer_encode.stop(); + ( + ComputationCommitment { comm }, + ComputationDecommitment { decomm }, + ) + } + + pub fn prove( + inst: &Instance, + comm: &ComputationCommitment, + decomm: &ComputationDecommitment, + vars: VarsAssignment, + inputs: &InputsAssignment, + gens: &TestudoGens, + transcript: &mut PoseidonTranscript, + poseidon: PoseidonConfig, + ) -> Result, ProofVerifyError> { + comm.comm.write_to_transcript(transcript); + let c: E::ScalarField = transcript.challenge_scalar(b""); + transcript.new_from_state(&c); + + // we might need to pad variables + let padded_vars = { + let num_padded_vars = inst.inst.get_num_vars(); + let num_vars = vars.assignment.len(); + if num_padded_vars > num_vars { + vars.pad(num_padded_vars) + } else { + vars + } + }; + + let (r1cs_sat_proof, rx, ry) = R1CSProof::prove( + &inst.inst, + padded_vars.assignment, + &inputs.assignment, + &gens.gens_r1cs_sat, + transcript, + ); + + // We send evaluations of A, B, C at r = (rx, ry) as claims + // to enable the verifier complete the first sum-check + let timer_eval = Timer::new("eval_sparse_polys"); + let inst_evals = { + let (Ar, Br, Cr) = inst.inst.evaluate(&rx, &ry); + transcript.append_scalar(b"", &Ar); + transcript.append_scalar(b"", &Br); + transcript.append_scalar(b"", &Cr); + (Ar, Br, Cr) + }; + timer_eval.stop(); + + let r1cs_eval_proof = R1CSEvalProof::prove( + &decomm.decomm, + &rx, + &ry, + &inst_evals, + &gens.gens_r1cs_eval, + transcript, + ); + + transcript.new_from_state(&c); + let r1cs_verifier_proof = r1cs_sat_proof + .prove_verifier( + inst.inst.get_num_vars(), + inst.inst.get_num_cons(), + &inputs.assignment, + &inst_evals, + transcript, + &gens.gens_r1cs_sat, + poseidon, + ) + .unwrap(); + Ok(TestudoSnark { + r1cs_verifier_proof, + r1cs_eval_proof, + inst_evals, + r: (rx, ry), + }) + } + pub fn verify( + &self, + gens: &TestudoGens, + comm: &ComputationCommitment, + input: &InputsAssignment, + transcript: &mut PoseidonTranscript, + poseidon: PoseidonConfig, + ) -> Result { + let sat_verified = self.r1cs_verifier_proof.verify( + &input.assignment, + &self.inst_evals, + transcript, + &gens.gens_r1cs_sat, + )?; + assert!(sat_verified == true); + + let (Ar, Br, Cr) = &self.inst_evals; + transcript.append_scalar(b"", Ar); + transcript.append_scalar(b"", Br); + transcript.append_scalar(b"", Cr); + + let (rx, ry) = &self.r; + self.r1cs_eval_proof.verify( + &comm.comm, + rx, + ry, + &self.inst_evals, + &gens.gens_r1cs_eval, + transcript, + )?; + Ok(sat_verified) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::ark_std::Zero; + use crate::{ + parameters::poseidon_params, + poseidon_transcript::PoseidonTranscript, + testudo::{TestudoGens, TestudoSnark}, + InputsAssignment, Instance, VarsAssignment, + }; + use ark_ff::{BigInteger, One, PrimeField}; + use ark_groth16::prover; + + #[test] + pub fn check_testudo_snark() { + let num_vars = 256; + let num_cons = num_vars; + let num_inputs = 10; + + type F = ark_bls12_377::Fr; + type E = ark_bls12_377::Bls12_377; + + // produce public generators + let gens = TestudoGens::::new(num_cons, num_vars, num_inputs, num_cons, poseidon_params()); + + // produce a synthetic R1CSInstance + let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); + + // create a commitment to R1CSInstance + let (comm, decomm) = TestudoSnark::encode(&inst, &gens); + + let params = poseidon_params(); + + // produce a proof + let mut prover_transcript = PoseidonTranscript::new(¶ms); + let proof = TestudoSnark::prove( + &inst, + &comm, + &decomm, + vars, + &inputs, + &gens, + &mut prover_transcript, + params, + ) + .unwrap(); + + // verify the proof + let mut verifier_transcript = PoseidonTranscript::new(&poseidon_params()); + assert!(proof + .verify( + &gens, + &comm, + &inputs, + &mut verifier_transcript, + poseidon_params() + ) + .is_ok()); + } + + // #[test] + // fn test_padded_constraints() { + // type F = ark_bls12_377::Fr; + // type E = ark_bls12_377::Bls12_377; + // // parameters of the R1CS instance + // let num_cons = 1; + // let num_vars = 1; + // let num_inputs = 3; + // let num_non_zero_entries = 3; + + // // We will encode the above constraints into three matrices, where + // // the coefficients in the matrix are in the little-endian byte order + // let mut A: Vec<(usize, usize, Vec)> = Vec::new(); + // let mut B: Vec<(usize, usize, Vec)> = Vec::new(); + // let mut C: Vec<(usize, usize, Vec)> = Vec::new(); + + // // Create a^2 + b + 13 + // A.push((0, num_vars + 2, (F::one().into_bigint().to_bytes_le()))); // 1*a + // B.push((0, num_vars + 2, F::one().into_bigint().to_bytes_le())); // 1*a + // C.push((0, num_vars + 1, F::one().into_bigint().to_bytes_le())); // 1*z + // C.push((0, num_vars, (-F::from(13u64)).into_bigint().to_bytes_le())); // -13*1 + // C.push((0, num_vars + 3, (-F::one()).into_bigint().to_bytes_le())); // -1*b + + // // Var Assignments (Z_0 = 16 is the only output) + // let vars = vec![F::zero().into_bigint().to_bytes_le(); num_vars]; + + // // create an InputsAssignment (a = 1, b = 2) + // let mut inputs = vec![F::zero().into_bigint().to_bytes_le(); num_inputs]; + // inputs[0] = F::from(16u64).into_bigint().to_bytes_le(); + // inputs[1] = F::from(1u64).into_bigint().to_bytes_le(); + // inputs[2] = F::from(2u64).into_bigint().to_bytes_le(); + + // let assignment_inputs = InputsAssignment::::new(&inputs).unwrap(); + // let assignment_vars = VarsAssignment::new(&vars).unwrap(); + + // // Check if instance is satisfiable + // let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C).unwrap(); + // let res = inst.is_sat(&assignment_vars, &assignment_inputs); + // assert!(res.unwrap(), "should be satisfied"); + + // // Testudo public params + // let gens = TestudoGens::::new( + // num_cons, + // num_vars, + // num_inputs, + // num_non_zero_entries, + // poseidon_params(), + // ); + + // // create a commitment to the R1CS instance + // let (comm, decomm) = TestudoSnark::encode(&inst, &gens); + + // let params = poseidon_params(); + + // // produce a TestudoSnark + // let mut prover_transcript = PoseidonTranscript::new(¶ms); + // let proof = TestudoSnark::prove( + // &inst, + // &comm, + // &decomm, + // assignment_vars.clone(), + // &assignment_inputs, + // &gens, + // &mut prover_transcript, + // poseidon_params(), + // ) + // .unwrap(); + + // // verify the TestudoSnark + // let mut verifier_transcript = PoseidonTranscript::new(¶ms); + // assert!(proof + // .verify( + // &gens, + // &comm, + // &assignment_inputs, + // &mut verifier_transcript, + // poseidon_params() + // ) + // .is_ok()); + // } + // } +} From 6d35ef84d50fea970f6a8f500ab2ec7fbb14a4fe Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Wed, 15 Feb 2023 18:47:50 +0000 Subject: [PATCH 42/64] add testudo snark and nizk in separate files --- src/lib.rs | 2 - src/testudo_nizk.rs | 197 +++++++++++++++++++++++ src/testudo_snark.rs | 371 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 568 insertions(+), 2 deletions(-) create mode 100644 src/testudo_nizk.rs create mode 100644 src/testudo_snark.rs diff --git a/src/lib.rs b/src/lib.rs index e47e9306..b1aca3a4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,5 @@ #![allow(non_snake_case)] #![doc = include_str!("../README.md")] -#![feature(test)] #![allow(clippy::assertions_on_result_states)] extern crate ark_std; @@ -10,7 +9,6 @@ extern crate lazy_static; extern crate merlin; extern crate rand; extern crate sha3; -extern crate test; #[macro_use] extern crate json; diff --git a/src/testudo_nizk.rs b/src/testudo_nizk.rs new file mode 100644 index 00000000..aa95f7d0 --- /dev/null +++ b/src/testudo_nizk.rs @@ -0,0 +1,197 @@ +use std::cmp::max; + +use crate::errors::ProofVerifyError; +use crate::r1csproof::R1CSVerifierProof; +use crate::{ + poseidon_transcript::PoseidonTranscript, + r1csproof::{R1CSGens, R1CSProof}, + transcript::Transcript, + InputsAssignment, Instance, VarsAssignment, +}; +use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; +use ark_crypto_primitives::sponge::Absorb; +use ark_ec::pairing::Pairing; + +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; + +#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] +pub struct TestudoNizk { + pub r1cs_verifier_proof: R1CSVerifierProof, + pub r: (Vec, Vec), +} + +pub struct TestudoNizkGens { + gens_r1cs_sat: R1CSGens, +} + +impl TestudoNizkGens { + /// Constructs a new `TestudoNizkGens` given the size of the R1CS statement + /// `num_nz_entries` specifies the maximum number of non-zero entries in any of the three R1CS matrices + pub fn new( + num_cons: usize, + num_vars: usize, + num_inputs: usize, + poseidon: PoseidonConfig, + ) -> Self { + // ensure num_vars is a power of 2 + let num_vars_padded = { + let mut num_vars_padded = max(num_vars, num_inputs + 1); + if num_vars_padded != num_vars_padded.next_power_of_two() { + num_vars_padded = num_vars_padded.next_power_of_two(); + } + num_vars_padded + }; + + let num_cons_padded = { + let mut num_cons_padded = num_cons; + + // ensure that num_cons_padded is at least 2 + if num_cons_padded == 0 || num_cons_padded == 1 { + num_cons_padded = 2; + } + + // ensure that num_cons_padded is a power of 2 + if num_cons.next_power_of_two() != num_cons { + num_cons_padded = num_cons.next_power_of_two(); + } + num_cons_padded + }; + + let gens_r1cs_sat = R1CSGens::new( + b"gens_r1cs_sat", + num_cons_padded, + num_vars_padded, + num_inputs, + poseidon, + ); + TestudoNizkGens { gens_r1cs_sat } + } +} + +impl TestudoNizk +where + E::ScalarField: Absorb, +{ + // Returns the Testudo SNARK proof which has two components: + // * proof that the R1CS instance is satisfiable + // * proof that the evlauation of matrices A, B and C on (x,y) are correct + pub fn prove( + inst: &Instance, + vars: VarsAssignment, + inputs: &InputsAssignment, + gens: &TestudoNizkGens, + transcript: &mut PoseidonTranscript, + poseidon: PoseidonConfig, + ) -> Result, ProofVerifyError> { + transcript.append_bytes(b"", &inst.digest); + + let c: E::ScalarField = transcript.challenge_scalar(b""); + transcript.new_from_state(&c); + + // we might need to pad variables + let padded_vars = { + let num_padded_vars = inst.inst.get_num_vars(); + let num_vars = vars.assignment.len(); + if num_padded_vars > num_vars { + vars.pad(num_padded_vars) + } else { + vars + } + }; + + let (r1cs_sat_proof, rx, ry) = R1CSProof::prove( + &inst.inst, + padded_vars.assignment, + &inputs.assignment, + &gens.gens_r1cs_sat, + transcript, + ); + + let inst_evals = inst.inst.evaluate(&rx, &ry); + + transcript.new_from_state(&c); + let r1cs_verifier_proof = r1cs_sat_proof + .prove_verifier( + inst.inst.get_num_vars(), + inst.inst.get_num_cons(), + &inputs.assignment, + &inst_evals, + transcript, + &gens.gens_r1cs_sat, + poseidon, + ) + .unwrap(); + Ok(TestudoNizk { + r1cs_verifier_proof, + r: (rx, ry), + }) + } + + // Verifies the Testudo SNARK proof ensuring the satisfiability of an R1CS + // instance + pub fn verify( + &self, + gens: &TestudoNizkGens, + inst: &Instance, + input: &InputsAssignment, + transcript: &mut PoseidonTranscript, + _poseidon: PoseidonConfig, + ) -> Result { + transcript.append_bytes(b"", &inst.digest); + let (claimed_rx, claimed_ry) = &self.r; + let inst_evals = inst.inst.evaluate(claimed_rx, claimed_ry); + + let sat_verified = self.r1cs_verifier_proof.verify( + &input.assignment, + &inst_evals, + transcript, + &gens.gens_r1cs_sat, + )?; + assert!(sat_verified == true); + Ok(sat_verified) + } +} + +#[cfg(test)] +mod tests { + use crate::{ + parameters::poseidon_params, + poseidon_transcript::PoseidonTranscript, + testudo_nizk::{TestudoNizk, TestudoNizkGens}, + Instance, + }; + + #[test] + pub fn check_testudo_nizk() { + let num_vars = 256; + let num_cons = num_vars; + let num_inputs = 10; + + type E = ark_bls12_377::Bls12_377; + + // produce public generators + let gens = TestudoNizkGens::::new(num_cons, num_vars, num_inputs, poseidon_params()); + + // produce a synthetic R1CSInstance + let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); + + let params = poseidon_params(); + + // produce a proof + let mut prover_transcript = PoseidonTranscript::new(¶ms); + let proof = + TestudoNizk::prove(&inst, vars, &inputs, &gens, &mut prover_transcript, params).unwrap(); + + // verify the proof + let mut verifier_transcript = PoseidonTranscript::new(&poseidon_params()); + assert!(proof + .verify( + &gens, + &inst, + &inputs, + &mut verifier_transcript, + poseidon_params() + ) + .is_ok()); + } +} diff --git a/src/testudo_snark.rs b/src/testudo_snark.rs new file mode 100644 index 00000000..7f8cc013 --- /dev/null +++ b/src/testudo_snark.rs @@ -0,0 +1,371 @@ +use std::cmp::max; + +use crate::errors::ProofVerifyError; +use crate::r1csinstance::{R1CSCommitmentGens, R1CSEvalProof}; +use crate::r1csproof::R1CSVerifierProof; + +use crate::timer::Timer; +use crate::transcript::TranscriptWriter; +use crate::{ + poseidon_transcript::PoseidonTranscript, + r1csproof::{R1CSGens, R1CSProof}, + transcript::Transcript, + InputsAssignment, Instance, VarsAssignment, +}; +use crate::{ComputationCommitment, ComputationDecommitment}; +use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; +use ark_crypto_primitives::sponge::Absorb; +use ark_ec::pairing::Pairing; + +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; + +#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] +pub struct TestudoSnark { + pub r1cs_verifier_proof: R1CSVerifierProof, + pub r1cs_eval_proof: R1CSEvalProof, + pub inst_evals: (E::ScalarField, E::ScalarField, E::ScalarField), + pub r: (Vec, Vec), +} + +pub struct TestudoSnarkGens { + gens_r1cs_sat: R1CSGens, + gens_r1cs_eval: R1CSCommitmentGens, +} + +impl TestudoSnarkGens { + /// Constructs a new `TestudoSnarkGens` given the size of the R1CS statement + /// `num_nz_entries` specifies the maximum number of non-zero entries in any of the three R1CS matrices + pub fn new( + num_cons: usize, + num_vars: usize, + num_inputs: usize, + num_nz_entries: usize, + poseidon: PoseidonConfig, + ) -> Self { + // ensure num_vars is a power of 2 + let num_vars_padded = { + let mut num_vars_padded = max(num_vars, num_inputs + 1); + if num_vars_padded != num_vars_padded.next_power_of_two() { + num_vars_padded = num_vars_padded.next_power_of_two(); + } + num_vars_padded + }; + + let num_cons_padded = { + let mut num_cons_padded = num_cons; + + // ensure that num_cons_padded is at least 2 + if num_cons_padded == 0 || num_cons_padded == 1 { + num_cons_padded = 2; + } + + // ensure that num_cons_padded is a power of 2 + if num_cons.next_power_of_two() != num_cons { + num_cons_padded = num_cons.next_power_of_two(); + } + num_cons_padded + }; + + let gens_r1cs_sat = R1CSGens::new( + b"gens_r1cs_sat", + num_cons_padded, + num_vars_padded, + num_inputs, + poseidon, + ); + let gens_r1cs_eval = R1CSCommitmentGens::new( + b"gens_r1cs_eval", + num_cons_padded, + num_vars_padded, + num_inputs, + num_nz_entries, + ); + TestudoSnarkGens { + gens_r1cs_sat, + gens_r1cs_eval, + } + } +} + +impl TestudoSnark +where + E::ScalarField: Absorb, +{ + // Constructs the computational commitment, required to prove that the + // evaluations of matrices A, B and C sent by the prover to the verifier + // in the SNARK are correct. + pub fn encode( + inst: &Instance, + gens: &TestudoSnarkGens, + ) -> ( + ComputationCommitment, + ComputationDecommitment, + ) { + let timer_encode = Timer::new("SNARK::encode"); + let (comm, decomm) = inst.inst.commit(&gens.gens_r1cs_eval); + timer_encode.stop(); + ( + ComputationCommitment { comm }, + ComputationDecommitment { decomm }, + ) + } + + // Returns the Testudo SNARK proof which has two components: + // * proof that the R1CS instance is satisfiable + // * proof that the evlauation of matrices A, B and C on (x,y) are correct + pub fn prove( + inst: &Instance, + comm: &ComputationCommitment, + decomm: &ComputationDecommitment, + vars: VarsAssignment, + inputs: &InputsAssignment, + gens: &TestudoSnarkGens, + transcript: &mut PoseidonTranscript, + poseidon: PoseidonConfig, + ) -> Result, ProofVerifyError> { + comm.comm.write_to_transcript(transcript); + let c: E::ScalarField = transcript.challenge_scalar(b""); + transcript.new_from_state(&c); + + // we might need to pad variables + let padded_vars = { + let num_padded_vars = inst.inst.get_num_vars(); + let num_vars = vars.assignment.len(); + if num_padded_vars > num_vars { + vars.pad(num_padded_vars) + } else { + vars + } + }; + + let (r1cs_sat_proof, rx, ry) = R1CSProof::prove( + &inst.inst, + padded_vars.assignment, + &inputs.assignment, + &gens.gens_r1cs_sat, + transcript, + ); + + // We send evaluations of A, B, C at r = (rx, ry) as claims + // to enable the verifier complete the first sum-check + let timer_eval = Timer::new("eval_sparse_polys"); + let inst_evals = { + let (Ar, Br, Cr) = inst.inst.evaluate(&rx, &ry); + transcript.append_scalar(b"", &Ar); + transcript.append_scalar(b"", &Br); + transcript.append_scalar(b"", &Cr); + (Ar, Br, Cr) + }; + timer_eval.stop(); + + let timer_eval_proof = Timer::new("r1cs_eval_proof"); + let r1cs_eval_proof = R1CSEvalProof::prove( + &decomm.decomm, + &rx, + &ry, + &inst_evals, + &gens.gens_r1cs_eval, + transcript, + ); + timer_eval_proof.stop(); + + transcript.new_from_state(&c); + let timer_sat_circuit_verification = Timer::new("r1cs_sat_circuit_verification"); + let r1cs_verifier_proof = r1cs_sat_proof + .prove_verifier( + inst.inst.get_num_vars(), + inst.inst.get_num_cons(), + &inputs.assignment, + &inst_evals, + transcript, + &gens.gens_r1cs_sat, + poseidon, + ) + .unwrap(); + timer_sat_circuit_verification.stop(); + Ok(TestudoSnark { + r1cs_verifier_proof, + r1cs_eval_proof, + inst_evals, + r: (rx, ry), + }) + } + + pub fn verify( + &self, + gens: &TestudoSnarkGens, + comm: &ComputationCommitment, + input: &InputsAssignment, + transcript: &mut PoseidonTranscript, + _poseidon: PoseidonConfig, + ) -> Result { + let timer_sat_verification = Timer::new("r1cs_sat_verification"); + let sat_verified = self.r1cs_verifier_proof.verify( + &input.assignment, + &self.inst_evals, + transcript, + &gens.gens_r1cs_sat, + )?; + timer_sat_verification.stop(); + assert!(sat_verified == true); + + let (Ar, Br, Cr) = &self.inst_evals; + transcript.append_scalar(b"", Ar); + transcript.append_scalar(b"", Br); + transcript.append_scalar(b"", Cr); + + let (rx, ry) = &self.r; + let timer_eval_verification = Timer::new("r1cs_eval_verification"); + let eval_verified = self.r1cs_eval_proof.verify( + &comm.comm, + rx, + ry, + &self.inst_evals, + &gens.gens_r1cs_eval, + transcript, + ); + timer_eval_verification.stop(); + Ok(sat_verified && eval_verified.is_ok()) + } +} + +#[cfg(test)] +mod tests { + + use crate::ark_std::Zero; + use crate::{ + parameters::poseidon_params, + poseidon_transcript::PoseidonTranscript, + testudo_snark::{TestudoSnark, TestudoSnarkGens}, + InputsAssignment, Instance, VarsAssignment, + }; + use ark_ff::{BigInteger, One, PrimeField}; + + #[test] + pub fn check_testudo_snark() { + let num_vars = 256; + let num_cons = num_vars; + let num_inputs = 10; + + type E = ark_bls12_377::Bls12_377; + + // produce public generators + let gens = + TestudoSnarkGens::::new(num_cons, num_vars, num_inputs, num_cons, poseidon_params()); + + // produce a synthetic R1CSInstance + let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); + + // create a commitment to R1CSInstance + let (comm, decomm) = TestudoSnark::encode(&inst, &gens); + + let params = poseidon_params(); + + // produce a proof + let mut prover_transcript = PoseidonTranscript::new(¶ms); + let proof = TestudoSnark::prove( + &inst, + &comm, + &decomm, + vars, + &inputs, + &gens, + &mut prover_transcript, + params, + ) + .unwrap(); + + // verify the proof + let mut verifier_transcript = PoseidonTranscript::new(&poseidon_params()); + assert!(proof + .verify( + &gens, + &comm, + &inputs, + &mut verifier_transcript, + poseidon_params() + ) + .is_ok()); + } + + #[test] + fn test_padded_constraints() { + type F = ark_bls12_377::Fr; + type E = ark_bls12_377::Bls12_377; + // parameters of the R1CS instance + let num_cons = 1; + let num_vars = 0; + let num_inputs = 3; + let num_non_zero_entries = 3; + + // We will encode the above constraints into three matrices, where + // the coefficients in the matrix are in the little-endian byte order + let mut A: Vec<(usize, usize, Vec)> = Vec::new(); + let mut B: Vec<(usize, usize, Vec)> = Vec::new(); + let mut C: Vec<(usize, usize, Vec)> = Vec::new(); + + // Create a^2 + b + 13 + A.push((0, num_vars + 2, (F::one().into_bigint().to_bytes_le()))); // 1*a + B.push((0, num_vars + 2, F::one().into_bigint().to_bytes_le())); // 1*a + C.push((0, num_vars + 1, F::one().into_bigint().to_bytes_le())); // 1*z + C.push((0, num_vars, (-F::from(13u64)).into_bigint().to_bytes_le())); // -13*1 + C.push((0, num_vars + 3, (-F::one()).into_bigint().to_bytes_le())); // -1*b + + // Var Assignments (Z_0 = 16 is the only output) + let vars = vec![F::zero().into_bigint().to_bytes_le(); num_vars]; + + // create an InputsAssignment (a = 1, b = 2) + let mut inputs = vec![F::zero().into_bigint().to_bytes_le(); num_inputs]; + inputs[0] = F::from(16u64).into_bigint().to_bytes_le(); + inputs[1] = F::from(1u64).into_bigint().to_bytes_le(); + inputs[2] = F::from(2u64).into_bigint().to_bytes_le(); + + let assignment_inputs = InputsAssignment::::new(&inputs).unwrap(); + let assignment_vars = VarsAssignment::new(&vars).unwrap(); + + // Check if instance is satisfiable + let inst = Instance::new(num_cons, num_vars, num_inputs, &A, &B, &C).unwrap(); + let res = inst.is_sat(&assignment_vars, &assignment_inputs); + assert!(res.unwrap(), "should be satisfied"); + + // Testudo public params + let gens = TestudoSnarkGens::::new( + num_cons, + num_vars, + num_inputs, + num_non_zero_entries, + poseidon_params(), + ); + + // create a commitment to the R1CS instance + let (comm, decomm) = TestudoSnark::encode(&inst, &gens); + + let params = poseidon_params(); + + // produce a TestudoSnark + let mut prover_transcript = PoseidonTranscript::new(¶ms); + let proof = TestudoSnark::prove( + &inst, + &comm, + &decomm, + assignment_vars.clone(), + &assignment_inputs, + &gens, + &mut prover_transcript, + poseidon_params(), + ) + .unwrap(); + + // verify the TestudoSnark + let mut verifier_transcript = PoseidonTranscript::new(¶ms); + assert!(proof + .verify( + &gens, + &comm, + &assignment_inputs, + &mut verifier_transcript, + poseidon_params() + ) + .is_ok()); + } +} From 7484321e04727828d0563555076c953dcc5a45e8 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Thu, 16 Feb 2023 12:30:27 +0000 Subject: [PATCH 43/64] rename functions that perform setups and add comments --- benches/testudo.rs | 3 ++- examples/cubic.rs | 2 +- profiler/testudo.rs | 3 ++- src/dense_mlpoly.rs | 4 ++-- src/r1csinstance.rs | 11 ++++++++--- src/r1csproof.rs | 27 +++++++++++++++------------ src/sparse_mlpoly.rs | 10 +++++----- src/testudo_nizk.rs | 19 +++++++++---------- src/testudo_snark.rs | 23 +++++++++++++---------- 9 files changed, 57 insertions(+), 45 deletions(-) diff --git a/benches/testudo.rs b/benches/testudo.rs index 13ca03cc..413429da 100644 --- a/benches/testudo.rs +++ b/benches/testudo.rs @@ -65,7 +65,8 @@ where Instance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); let mut prover_transcript = PoseidonTranscript::new(¶ms.clone()); - let gens = TestudoSnarkGens::::new(num_cons, num_vars, num_inputs, num_cons, params.clone()); + let gens = + TestudoSnarkGens::::setup(num_cons, num_vars, num_inputs, num_cons, params.clone()); let (comm, decomm) = TestudoSnark::::encode(&inst, &gens); diff --git a/examples/cubic.rs b/examples/cubic.rs index 7d5af810..ca777f01 100644 --- a/examples/cubic.rs +++ b/examples/cubic.rs @@ -130,7 +130,7 @@ fn main() { let params = poseidon_params(); // produce public parameters - let gens = TestudoSnarkGens::::new( + let gens = TestudoSnarkGens::::setup( num_cons, num_vars, num_inputs, diff --git a/profiler/testudo.rs b/profiler/testudo.rs index 873164a1..f56d1335 100644 --- a/profiler/testudo.rs +++ b/profiler/testudo.rs @@ -45,7 +45,8 @@ where Instance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); // produce public generators - let gens = TestudoSnarkGens::::new(num_cons, num_vars, num_inputs, num_cons, params.clone()); + let gens = + TestudoSnarkGens::::setup(num_cons, num_vars, num_inputs, num_cons, params.clone()); // create a commitment to R1CSInstance let (comm, decomm) = TestudoSnark::encode(&inst, &gens); diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 3a649b77..4dce01f7 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -182,7 +182,7 @@ pub struct PolyCommitmentGens { impl PolyCommitmentGens { // num vars is the number of variables in the multilinear polynomial // this gives the maximum degree bound - pub fn new(num_vars: usize, label: &'static [u8]) -> PolyCommitmentGens { + pub fn setup(num_vars: usize, label: &'static [u8]) -> PolyCommitmentGens { let (_left, right) = EqPolynomial::::compute_factored_lens(num_vars); let gens = DotProductProofGens::new(right.pow2(), label); @@ -743,7 +743,7 @@ mod tests { let eval = poly.evaluate(&r); assert_eq!(eval, F::from(28)); - let gens = PolyCommitmentGens::new(poly.get_num_vars(), b"test-two"); + let gens = PolyCommitmentGens::setup(poly.get_num_vars(), b"test-two"); let (poly_commitment, blinds) = poly.commit(&gens, false); let params = poseidon_params(); diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index 14bb3562..ede46c1b 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -31,7 +31,7 @@ pub struct R1CSCommitmentGens { } impl R1CSCommitmentGens { - pub fn new( + pub fn setup( label: &'static [u8], num_cons: usize, num_vars: usize, @@ -41,8 +41,13 @@ impl R1CSCommitmentGens { assert!(num_inputs < num_vars); let num_poly_vars_x = num_cons.log_2(); let num_poly_vars_y = (2 * num_vars).log_2(); - let gens = - SparseMatPolyCommitmentGens::new(label, num_poly_vars_x, num_poly_vars_y, num_nz_entries, 3); + let gens = SparseMatPolyCommitmentGens::setup( + label, + num_poly_vars_x, + num_poly_vars_y, + num_nz_entries, + 3, + ); R1CSCommitmentGens { gens } } } diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 418db372..92acabf5 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -74,7 +74,10 @@ impl CircuitGens where E: Pairing, { - pub fn new( + // Performs the circuit-specific setup required by Groth16 for the sumcheck + // circuit. This is done by filling the struct with dummy elements, ensuring + // the sizes are correct so the setup matches the circuit that will be proved. + pub fn setup( num_cons: usize, num_vars: usize, num_inputs: usize, @@ -152,7 +155,8 @@ pub struct R1CSGens { } impl R1CSGens { - pub fn new( + // Performs the setup for the polynomial commitment PST and for Groth16. + pub fn setup( label: &'static [u8], num_cons: usize, num_vars: usize, @@ -160,8 +164,8 @@ impl R1CSGens { poseidon: PoseidonConfig, ) -> Self { let num_poly_vars = num_vars.log_2(); - let gens_pc = PolyCommitmentGens::new(num_poly_vars, label); - let gens_gc = CircuitGens::new(num_cons, num_vars, num_inputs, poseidon); + let gens_pc = PolyCommitmentGens::setup(num_poly_vars, label); + let gens_gc = CircuitGens::setup(num_cons, num_vars, num_inputs, poseidon); R1CSGens { gens_pc, gens_gc } } } @@ -225,6 +229,8 @@ where (sc_proof_phase_two, r, claims) } + // Proves the R1CS instance inst is satisfiable given the assignment + // vars. pub fn prove( inst: &R1CSInstance, vars: Vec, @@ -247,7 +253,6 @@ where transcript.append_gt::(b"", &t); - // comm.write_to_transcript(transcript); timer_commit.stop(); let initial_state = transcript.challenge_scalar(b""); @@ -330,9 +335,6 @@ where let transcript_sat_state = transcript.challenge_scalar(b""); transcript.new_from_state(&transcript_sat_state); - // TODO: modify the polynomial evaluation in Spartan to be consistent - // with the evaluation in ark-poly-commit so that reversing is not needed - // anymore let timmer_opening = Timer::new("polyopening"); let (comm, proof_eval_vars_at_ry, mipp_proof) = @@ -364,6 +366,8 @@ where ) } + // Creates a Groth16 proof for the verification of sumcheck, expressed + // as a circuit. pub fn prove_verifier( &self, num_vars: usize, @@ -431,6 +435,8 @@ impl R1CSVerifierProof where ::ScalarField: Absorb, { + // Verifier the Groth16 proof for the sumcheck circuit and the PST polynomial + // commitment opening. pub fn verify( &self, input: &[E::ScalarField], @@ -583,7 +589,7 @@ mod tests { let (inst, vars, input) = R1CSInstance::::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); - let gens = R1CSGens::

::new(b"test-m", num_cons, num_vars, num_inputs, params.clone()); + let gens = R1CSGens::

::setup(b"test-m", num_cons, num_vars, num_inputs, params.clone()); //let params = poseidon_params(); // let mut random_tape = RandomTape::new(b"proof"); @@ -612,8 +618,5 @@ mod tests { assert!(verifer_proof .verify(&input, &inst_evals, &mut verifier_transcript, &gens) .is_ok()); - - // if you want to check the test fails - // input[0] = Scalar::zero(); } } diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index b538423a..c3340067 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -290,7 +290,7 @@ pub struct SparseMatPolyCommitmentGens { } impl SparseMatPolyCommitmentGens { - pub fn new( + pub fn setup( label: &'static [u8], num_vars_x: usize, num_vars_y: usize, @@ -307,9 +307,9 @@ impl SparseMatPolyCommitmentGens { let num_vars_derefs = num_nz_entries.next_power_of_two().log_2() + (batch_size * 2).next_power_of_two().log_2(); - let gens_ops = PolyCommitmentGens::new(num_vars_ops, label); - let gens_mem = PolyCommitmentGens::new(num_vars_mem, label); - let gens_derefs = PolyCommitmentGens::new(num_vars_derefs, label); + let gens_ops = PolyCommitmentGens::setup(num_vars_ops, label); + let gens_mem = PolyCommitmentGens::setup(num_vars_mem, label); + let gens_derefs = PolyCommitmentGens::setup(num_vars_derefs, label); SparseMatPolyCommitmentGens { gens_ops, gens_mem, @@ -1635,7 +1635,7 @@ mod tests { } let poly_M = SparseMatPolynomial::new(num_vars_x, num_vars_y, M); - let gens = SparseMatPolyCommitmentGens::::new( + let gens = SparseMatPolyCommitmentGens::::setup( b"gens_sparse_poly", num_vars_x, num_vars_y, diff --git a/src/testudo_nizk.rs b/src/testudo_nizk.rs index aa95f7d0..d0426789 100644 --- a/src/testudo_nizk.rs +++ b/src/testudo_nizk.rs @@ -25,9 +25,8 @@ pub struct TestudoNizkGens { } impl TestudoNizkGens { - /// Constructs a new `TestudoNizkGens` given the size of the R1CS statement - /// `num_nz_entries` specifies the maximum number of non-zero entries in any of the three R1CS matrices - pub fn new( + /// Performs the setup required by the polynomial commitment PST and Groth16 + pub fn setup( num_cons: usize, num_vars: usize, num_inputs: usize, @@ -57,7 +56,7 @@ impl TestudoNizkGens { num_cons_padded }; - let gens_r1cs_sat = R1CSGens::new( + let gens_r1cs_sat = R1CSGens::setup( b"gens_r1cs_sat", num_cons_padded, num_vars_padded, @@ -72,9 +71,7 @@ impl TestudoNizk where E::ScalarField: Absorb, { - // Returns the Testudo SNARK proof which has two components: - // * proof that the R1CS instance is satisfiable - // * proof that the evlauation of matrices A, B and C on (x,y) are correct + // Returns a proof that the R1CS instance is satisfiable pub fn prove( inst: &Instance, vars: VarsAssignment, @@ -127,8 +124,10 @@ where }) } - // Verifies the Testudo SNARK proof ensuring the satisfiability of an R1CS - // instance + // Verifies the satisfiability proof for the R1CS instance. In NIZK mode, the + // verifier evaluates matrices A, B and C themselves, which is a linear + // operation and hence this is not a SNARK. + // However, for highly structured circuits this operation is fast. pub fn verify( &self, gens: &TestudoNizkGens, @@ -170,7 +169,7 @@ mod tests { type E = ark_bls12_377::Bls12_377; // produce public generators - let gens = TestudoNizkGens::::new(num_cons, num_vars, num_inputs, poseidon_params()); + let gens = TestudoNizkGens::::setup(num_cons, num_vars, num_inputs, poseidon_params()); // produce a synthetic R1CSInstance let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); diff --git a/src/testudo_snark.rs b/src/testudo_snark.rs index 7f8cc013..fee87a57 100644 --- a/src/testudo_snark.rs +++ b/src/testudo_snark.rs @@ -33,9 +33,11 @@ pub struct TestudoSnarkGens { } impl TestudoSnarkGens { - /// Constructs a new `TestudoSnarkGens` given the size of the R1CS statement - /// `num_nz_entries` specifies the maximum number of non-zero entries in any of the three R1CS matrices - pub fn new( + /// Performs the setups required by the polynomial commitment PST, Groth16 + /// and the computational commitment given the size of the R1CS statement, + /// `num_nz_entries` specifies the maximum number of non-zero entries in + /// any of the three R1CS matrices. + pub fn setup( num_cons: usize, num_vars: usize, num_inputs: usize, @@ -66,14 +68,14 @@ impl TestudoSnarkGens { num_cons_padded }; - let gens_r1cs_sat = R1CSGens::new( + let gens_r1cs_sat = R1CSGens::setup( b"gens_r1cs_sat", num_cons_padded, num_vars_padded, num_inputs, poseidon, ); - let gens_r1cs_eval = R1CSCommitmentGens::new( + let gens_r1cs_eval = R1CSCommitmentGens::setup( b"gens_r1cs_eval", num_cons_padded, num_vars_padded, @@ -91,9 +93,9 @@ impl TestudoSnark where E::ScalarField: Absorb, { - // Constructs the computational commitment, required to prove that the + // Constructs the computational commitment, used to prove that the // evaluations of matrices A, B and C sent by the prover to the verifier - // in the SNARK are correct. + // are correct. pub fn encode( inst: &Instance, gens: &TestudoSnarkGens, @@ -112,7 +114,8 @@ where // Returns the Testudo SNARK proof which has two components: // * proof that the R1CS instance is satisfiable - // * proof that the evlauation of matrices A, B and C on (x,y) are correct + // * proof that the evlauation of matrices A, B and C on point (x,y) + // resulted from the two rounda of sumcheck are correct pub fn prove( inst: &Instance, comm: &ComputationCommitment, @@ -251,7 +254,7 @@ mod tests { // produce public generators let gens = - TestudoSnarkGens::::new(num_cons, num_vars, num_inputs, num_cons, poseidon_params()); + TestudoSnarkGens::::setup(num_cons, num_vars, num_inputs, num_cons, poseidon_params()); // produce a synthetic R1CSInstance let (inst, vars, inputs) = Instance::produce_synthetic_r1cs(num_cons, num_vars, num_inputs); @@ -329,7 +332,7 @@ mod tests { assert!(res.unwrap(), "should be satisfied"); // Testudo public params - let gens = TestudoSnarkGens::::new( + let gens = TestudoSnarkGens::::setup( num_cons, num_vars, num_inputs, From 4e9bcf8ac5d50dd2315cbe005674da5d297134dd Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Mon, 6 Mar 2023 13:26:06 +0000 Subject: [PATCH 44/64] prototyping --- src/sqrt_pst.rs | 84 ++++++++++++++++++++++++++++++++++--------------- 1 file changed, 58 insertions(+), 26 deletions(-) diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index 4ae06d2e..cc167f81 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -13,6 +13,7 @@ use crate::{ pub struct Polynomial { m: usize, + odd: bool, polys: Vec>, q: Option>, chis_b: Option>, @@ -29,24 +30,36 @@ impl Polynomial { let pl_timer = Timer::new("poly_list_build"); // check the evaluation list is a power of 2 debug_assert!(Z.len() & (Z.len() - 1) == 0); - let m = Z.len().log_2() / 2; - let pow_m = 2_usize.pow(m as u32); - let polys: Vec> = (0..pow_m) + let m_col = Z.len().log_2() / 2; + let m_row = if Z.len().log_2() % 2 == 0 { + Z.len().log_2() / 2 + } else { + Z.len().log_2() / 2 + 1 + }; + println!("{:?}", m_row); + let pow_m_col = 2_usize.pow(m_col as u32); + let pow_m_row = 2_usize.pow(m_row as u32); + let polys: Vec> = (0..pow_m_col) .into_par_iter() .map(|i| { - let z: Vec = (0..pow_m) + let z: Vec = (0..pow_m_row) .into_par_iter() // viewing the list of evaluation as a square matrix // we select by row j and column i - .map(|j| Z[(j << m) | i]) + + // to handle the odd case, we add another row to the matrix + // + .map(|j| Z[(j << m_col) | i]) .collect(); DensePolynomial::new(z) }) .collect(); - debug_assert!(polys.len() == pow_m); + debug_assert!(polys.len() == pow_m_col); + debug_assert!(polys[0].len == pow_m_row); pl_timer.stop(); Self { - m, + m: m_col, + odd: if m_row % 2 == 0 { false } else { true }, polys, q: None, chis_b: None, @@ -56,12 +69,12 @@ impl Polynomial { // Given point = (\vec{a}, \vec{b}), compute the polynomial q as // q(Y) = // \sum_{j \in \{0,1\}^m}(\sum_{i \in \{0,1\}^m} p(j,i) * chi_i(b)) * chi_j(Y) - // and p(a,b) = q(b) where p is the initial polynomial + // and p(a,b) = q(a) where p is the initial polynomial fn get_q(&mut self, point: &[E::ScalarField]) { let q_timer = Timer::new("build_q"); - debug_assert!(point.len() == 2 * self.m); - let _a = &point[0..self.m]; - let b = &point[self.m..2 * self.m]; + let odd = if self.odd { 1 } else { 0 }; + debug_assert!(point.len() == 2 * self.m + odd); + let b = &point[self.m + odd..]; let pow_m = 2_usize.pow(self.m as u32); let chis: Vec = (0..pow_m) @@ -69,7 +82,7 @@ impl Polynomial { .map(|i| Self::get_chi_i(b, i)) .collect(); - let z_q: Vec = (0..pow_m) + let z_q: Vec = (0..(pow_m * 2_usize.pow(odd as u32))) .into_par_iter() .map(|j| (0..pow_m).map(|i| self.polys[i].Z[j] * chis[i]).sum()) .collect(); @@ -80,10 +93,10 @@ impl Polynomial { } // Given point = (\vec{a}, \vec{b}) used to construct q - // compute q(b) = p(a,b). + // compute q(a) = p(a,b). pub fn eval(&mut self, point: &[E::ScalarField]) -> E::ScalarField { - let a = &point[0..point.len() / 2]; - let _b = &point[point.len() / 2..point.len()]; + let odd = if self.odd { 1 } else { 0 }; + let a = &point[0..point.len() / 2 + odd]; if self.q.is_none() { self.get_q(point); } @@ -107,7 +120,8 @@ impl Polynomial { .collect(); timer_list.stop(); - let h_vec = ck.powers_of_h[0].clone(); + let mut h_vec = ck.powers_of_h[0].clone(); + h_vec = h_vec[..comm_list.len()].to_vec(); assert!(comm_list.len() == h_vec.len()); let ipp_timer = Timer::new("ipp"); @@ -155,8 +169,8 @@ impl Polynomial { point: &[E::ScalarField], t: &E::TargetField, ) -> (Commitment, Proof, MippProof) { - let m = point.len() / 2; - let a = &point[0..m]; + let odd = if self.odd { 1 } else { 0 }; + let a = &point[0..self.m + 1]; if self.q.is_none() { self.get_q(point); } @@ -168,7 +182,6 @@ impl Polynomial { // Compute the PST commitment to q obtained as the inner products of the // commitments to the polynomials p_i and chi_i(\vec{b}) for i ranging over // the boolean hypercube of size m. - let _m = a.len(); let timer_msm = Timer::new("msm"); if self.chis_b.is_none() { panic!("chis(b) should have been computed for q"); @@ -188,7 +201,8 @@ impl Polynomial { }; let comm = MultilinearPC::::commit(ck, &q); debug_assert!(c_u == comm.g_product); - let h_vec = ck.powers_of_h[0].clone(); + let mut h_vec = ck.powers_of_h[0].clone(); + h_vec = h_vec[..comm_list.len()].to_vec(); // construct MIPP proof that U is the inner product of the vector A // and the vector y, where A is the opening vector to T @@ -224,8 +238,9 @@ impl Polynomial { T: &E::TargetField, ) -> bool { let len = point.len(); - let a = &point[0..len / 2]; - let b = &point[len / 2..len]; + let odd = if len % 2 == 1 { 1 } else { 0 }; + let a = &point[0..len / 2 + odd]; + let b = &point[len / 2 + odd..len]; let timer_mipp_verify = Timer::new("mipp_verify"); // verify that U = A^y where A is the opening vector of T @@ -260,7 +275,7 @@ mod tests { #[test] fn check_sqrt_poly_eval() { let mut rng = ark_std::test_rng(); - let num_vars = 8; + let num_vars = 5; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); let r: Vec = (0..num_vars) @@ -280,7 +295,7 @@ mod tests { #[test] fn check_new_poly_commit() { let mut rng = ark_std::test_rng(); - let num_vars = 4; + let num_vars = 5; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); let r: Vec = (0..num_vars) @@ -288,8 +303,8 @@ mod tests { .map(|_| F::rand(&mut rng)) .collect(); - let gens = MultilinearPC::::setup(2, &mut rng); - let (ck, vk) = MultilinearPC::::trim(&gens, 2); + let gens = MultilinearPC::::setup(3, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&gens, 3); let mut pl = Polynomial::from_evaluations(&Z.clone()); @@ -316,4 +331,21 @@ mod tests { ); assert!(res == true); } + + #[test] + fn test_odd() { + let mut rng = ark_std::test_rng(); + let num_vars = 5; + let len = 2_usize.pow(num_vars); + let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); + let r: Vec = (0..num_vars) + .into_iter() + .map(|_| F::rand(&mut rng)) + .collect(); + + // let gens = MultilinearPC::::setup(2, &mut rng); + // let (ck, vk) = MultilinearPC::::trim(&gens, 2); + + let mut pl: Polynomial = Polynomial::from_evaluations(&Z.clone()); + } } From 571f54f1538160576d6d41bec74f7ae24025dbb0 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Mon, 6 Mar 2023 14:10:05 +0000 Subject: [PATCH 45/64] explain testudo-nizk --- src/testudo_nizk.rs | 7 ++++++- src/testudo_snark.rs | 1 + 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/testudo_nizk.rs b/src/testudo_nizk.rs index d0426789..1e837929 100644 --- a/src/testudo_nizk.rs +++ b/src/testudo_nizk.rs @@ -15,6 +15,11 @@ use ark_ec::pairing::Pairing; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] + +// TestudoNizk is suitable for uniform circuits where the +// evaluation of R1CS matrices A, B and C is cheap and can +// be done by the verifier. For more complex circuits this +// operation has to be offloaded to the prover. pub struct TestudoNizk { pub r1cs_verifier_proof: R1CSVerifierProof, pub r: (Vec, Vec), @@ -126,7 +131,7 @@ where // Verifies the satisfiability proof for the R1CS instance. In NIZK mode, the // verifier evaluates matrices A, B and C themselves, which is a linear - // operation and hence this is not a SNARK. + // operation and hence this is not a SNARK. // However, for highly structured circuits this operation is fast. pub fn verify( &self, diff --git a/src/testudo_snark.rs b/src/testudo_snark.rs index fee87a57..5e5915b0 100644 --- a/src/testudo_snark.rs +++ b/src/testudo_snark.rs @@ -20,6 +20,7 @@ use ark_ec::pairing::Pairing; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] + pub struct TestudoSnark { pub r1cs_verifier_proof: R1CSVerifierProof, pub r1cs_eval_proof: R1CSEvalProof, From 7d52852497139870beb163cd22153ffc69b09773 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Fri, 10 Mar 2023 10:23:45 +0000 Subject: [PATCH 46/64] add support for odd case in sqrt_pst --- Cargo.toml | 2 +- src/mipp.rs | 2 +- src/sqrt_pst.rs | 73 +++++++++++++++++++++---------------------------- 3 files changed, 33 insertions(+), 44 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 529316f9..a5ac867f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -72,7 +72,7 @@ parallel = [ "std", "ark-ff/parallel", "ark-std/parallel", "ark-ec/parallel", "a std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-serialize/std"] [patch.crates-io] -ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/pst_on_g2"} +ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/variable-crs"} ark-groth16 = { git = "https://github.com/arkworks-rs/groth16" } blstrs = { git = "https://github.com/nikkolasg/blstrs", branch = "feat/arkwork" } ark-ec = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } diff --git a/src/mipp.rs b/src/mipp.rs index 77da290e..93f7a9c1 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -304,7 +304,7 @@ impl MippProof { let check_u = ref_final_res.uc == final_u; assert!(check_u == true); - check_h & check_u & check_t + check_h & check_u } } diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index cc167f81..8810cb30 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -13,7 +13,7 @@ use crate::{ pub struct Polynomial { m: usize, - odd: bool, + odd: usize, polys: Vec>, q: Option>, chis_b: Option>, @@ -30,15 +30,18 @@ impl Polynomial { let pl_timer = Timer::new("poly_list_build"); // check the evaluation list is a power of 2 debug_assert!(Z.len() & (Z.len() - 1) == 0); - let m_col = Z.len().log_2() / 2; - let m_row = if Z.len().log_2() % 2 == 0 { + + let num_vars = Z.len().log_2(); + let m_col = num_vars / 2; + let m_row = if num_vars % 2 == 0 { Z.len().log_2() / 2 } else { Z.len().log_2() / 2 + 1 }; - println!("{:?}", m_row); + let pow_m_col = 2_usize.pow(m_col as u32); let pow_m_row = 2_usize.pow(m_row as u32); + let polys: Vec> = (0..pow_m_col) .into_par_iter() .map(|i| { @@ -46,20 +49,22 @@ impl Polynomial { .into_par_iter() // viewing the list of evaluation as a square matrix // we select by row j and column i - - // to handle the odd case, we add another row to the matrix - // + // to handle the odd case, we add another row to the matrix i.e. + // we add an extra variable to the polynomials while keeping their + // number tje same .map(|j| Z[(j << m_col) | i]) .collect(); DensePolynomial::new(z) }) .collect(); + debug_assert!(polys.len() == pow_m_col); debug_assert!(polys[0].len == pow_m_row); + pl_timer.stop(); Self { m: m_col, - odd: if m_row % 2 == 0 { false } else { true }, + odd: if num_vars % 2 == 1 { 1 } else { 0 }, polys, q: None, chis_b: None, @@ -72,9 +77,9 @@ impl Polynomial { // and p(a,b) = q(a) where p is the initial polynomial fn get_q(&mut self, point: &[E::ScalarField]) { let q_timer = Timer::new("build_q"); - let odd = if self.odd { 1 } else { 0 }; - debug_assert!(point.len() == 2 * self.m + odd); - let b = &point[self.m + odd..]; + + debug_assert!(point.len() == 2 * self.m + self.odd); + let b = &point[self.m + self.odd..]; let pow_m = 2_usize.pow(self.m as u32); let chis: Vec = (0..pow_m) @@ -82,7 +87,7 @@ impl Polynomial { .map(|i| Self::get_chi_i(b, i)) .collect(); - let z_q: Vec = (0..(pow_m * 2_usize.pow(odd as u32))) + let z_q: Vec = (0..(pow_m * 2_usize.pow(self.odd as u32))) .into_par_iter() .map(|j| (0..pow_m).map(|i| self.polys[i].Z[j] * chis[i]).sum()) .collect(); @@ -95,8 +100,7 @@ impl Polynomial { // Given point = (\vec{a}, \vec{b}) used to construct q // compute q(a) = p(a,b). pub fn eval(&mut self, point: &[E::ScalarField]) -> E::ScalarField { - let odd = if self.odd { 1 } else { 0 }; - let a = &point[0..point.len() / 2 + odd]; + let a = &point[0..point.len() / 2 + self.odd]; if self.q.is_none() { self.get_q(point); } @@ -109,9 +113,7 @@ impl Polynomial { pub fn commit(&self, ck: &CommitterKey) -> (Vec>, E::TargetField) { let timer_commit = Timer::new("sqrt_commit"); - let timer_list = Timer::new("comm_list"); - // commit to each of the sqrt sized p_i let comm_list: Vec> = self .polys @@ -120,8 +122,7 @@ impl Polynomial { .collect(); timer_list.stop(); - let mut h_vec = ck.powers_of_h[0].clone(); - h_vec = h_vec[..comm_list.len()].to_vec(); + let h_vec = ck.powers_of_h[self.odd].clone(); assert!(comm_list.len() == h_vec.len()); let ipp_timer = Timer::new("ipp"); @@ -169,8 +170,7 @@ impl Polynomial { point: &[E::ScalarField], t: &E::TargetField, ) -> (Commitment, Proof, MippProof) { - let odd = if self.odd { 1 } else { 0 }; - let a = &point[0..self.m + 1]; + let a = &point[0..self.m + self.odd]; if self.q.is_none() { self.get_q(point); } @@ -201,8 +201,7 @@ impl Polynomial { }; let comm = MultilinearPC::::commit(ck, &q); debug_assert!(c_u == comm.g_product); - let mut h_vec = ck.powers_of_h[0].clone(); - h_vec = h_vec[..comm_list.len()].to_vec(); + let h_vec = ck.powers_of_h[self.odd].clone(); // construct MIPP proof that U is the inner product of the vector A // and the vector y, where A is the opening vector to T @@ -275,7 +274,7 @@ mod tests { #[test] fn check_sqrt_poly_eval() { let mut rng = ark_std::test_rng(); - let num_vars = 5; + let num_vars = 6; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); let r: Vec = (0..num_vars) @@ -293,9 +292,16 @@ mod tests { } #[test] - fn check_new_poly_commit() { + fn check_commit() { + // check odd case + check_sqrt_poly_commit(5); + + // check even case + check_sqrt_poly_commit(6); + } + + fn check_sqrt_poly_commit(num_vars: u32) { let mut rng = ark_std::test_rng(); - let num_vars = 5; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); let r: Vec = (0..num_vars) @@ -331,21 +337,4 @@ mod tests { ); assert!(res == true); } - - #[test] - fn test_odd() { - let mut rng = ark_std::test_rng(); - let num_vars = 5; - let len = 2_usize.pow(num_vars); - let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); - let r: Vec = (0..num_vars) - .into_iter() - .map(|_| F::rand(&mut rng)) - .collect(); - - // let gens = MultilinearPC::::setup(2, &mut rng); - // let (ck, vk) = MultilinearPC::::trim(&gens, 2); - - let mut pl: Polynomial = Polynomial::from_evaluations(&Z.clone()); - } } From 8fa04bbd6703be884d1c4404b61812bfbb96066a Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Fri, 10 Mar 2023 12:31:19 +0000 Subject: [PATCH 47/64] add missing constraints and correct proof size for benchmarks --- src/constraints.rs | 17 +++++++++++++++++ src/r1csproof.rs | 21 ++++++++++++++++----- src/testudo_nizk.rs | 3 ++- src/testudo_snark.rs | 4 +++- 4 files changed, 38 insertions(+), 7 deletions(-) diff --git a/src/constraints.rs b/src/constraints.rs index 5b25b762..931d3f0e 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -229,6 +229,7 @@ pub struct R1CSVerificationCircuit { pub sc_phase1: SumcheckVerificationCircuit, pub sc_phase2: SumcheckVerificationCircuit, // The point on which the polynomial was evaluated by the prover. + pub claimed_rx: Vec, pub claimed_ry: Vec, pub claimed_transcript_sat_state: F, } @@ -251,6 +252,7 @@ impl R1CSVerificationCircuit { sc_phase2: SumcheckVerificationCircuit { polys: config.polys_sc2.clone(), }, + claimed_rx: config.rx.clone(), claimed_ry: config.ry.clone(), claimed_transcript_sat_state: config.transcript_sat_state, } @@ -284,6 +286,12 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { .map(|i| FpVar::::new_variable(cs.clone(), || Ok(i), AllocationMode::Input).unwrap()) .collect::>>(); + let claimed_rx_vars = self + .claimed_rx + .iter() + .map(|r| FpVar::::new_variable(cs.clone(), || Ok(r), AllocationMode::Input).unwrap()) + .collect::>>(); + let claimed_ry_vars = self .claimed_ry .iter() @@ -304,6 +312,13 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { .sc_phase1 .verifiy_sumcheck(&poly_sc1_vars, &claim_phase1_var, &mut transcript_var)?; + // The prover sends (rx, ry) to the verifier for the evaluation proof so + // the constraints need to ensure it is indeed the result from the first + // round of sumcheck verification. + for (i, r) in claimed_rx_vars.iter().enumerate() { + rx_var[i].enforce_equal(r)?; + } + let (Az_claim, Bz_claim, Cz_claim, prod_Az_Bz_claims) = &self.claims_phase2; let Az_claim_var = FpVar::::new_witness(cs.clone(), || Ok(Az_claim))?; @@ -344,6 +359,7 @@ impl ConstraintSynthesizer for R1CSVerificationCircuit { // claimed point, coming from the prover, is actually the point derived // inside the circuit. These additional checks will be removed // when the commitment verification is done inside the circuit. + // Moreover, (rx, ry) will be used in the evaluation proof. for (i, r) in claimed_ry_vars.iter().enumerate() { ry_var[i].enforce_equal(r)?; } @@ -401,6 +417,7 @@ pub struct VerifierConfig { pub eval_vars_at_ry: E::ScalarField, pub polys_sc1: Vec>, pub polys_sc2: Vec>, + pub rx: Vec, pub ry: Vec, pub transcript_sat_state: E::ScalarField, } diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 92acabf5..92538529 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -58,7 +58,6 @@ pub struct R1CSVerifierProof { initial_state: E::ScalarField, transcript_sat_state: E::ScalarField, eval_vars_at_ry: E::ScalarField, - ry: Vec, proof_eval_vars_at_ry: Proof, t: E::TargetField, mipp_proof: MippProof, @@ -138,6 +137,9 @@ where sc_phase2: SumcheckVerificationCircuit { polys: uni_polys_round2, }, + claimed_rx: (0..num_cons.log_2()) + .map(|_i| E::ScalarField::rand(&mut rng)) + .collect_vec(), claimed_ry: (0..num_vars.log_2() + 1) .map(|_i| E::ScalarField::rand(&mut rng)) .collect_vec(), @@ -407,6 +409,7 @@ where eval_vars_at_ry: self.eval_vars_at_ry, input_as_sparse_poly, comm: self.comm.clone(), + rx: self.rx.clone(), ry: self.ry.clone(), transcript_sat_state: self.transcript_sat_state, }; @@ -423,7 +426,6 @@ where initial_state: self.initial_state, transcript_sat_state: self.transcript_sat_state, eval_vars_at_ry: self.eval_vars_at_ry, - ry: self.ry.clone(), proof_eval_vars_at_ry: self.proof_eval_vars_at_ry.clone(), t: self.t, mipp_proof: self.mipp_proof.clone(), @@ -439,15 +441,18 @@ where // commitment opening. pub fn verify( &self, + r: (Vec, Vec), input: &[E::ScalarField], evals: &(E::ScalarField, E::ScalarField, E::ScalarField), transcript: &mut PoseidonTranscript, gens: &R1CSGens, ) -> Result { + let (rx, ry) = &r; let (Ar, Br, Cr) = evals; let mut pubs = vec![self.initial_state]; pubs.extend(input.clone()); - pubs.extend(self.ry.clone()); + pubs.extend(rx.clone()); + pubs.extend(ry.clone()); pubs.extend(vec![ self.eval_vars_at_ry, *Ar, @@ -466,7 +471,7 @@ where transcript, &gens.gens_pc.vk, &self.comm, - &self.ry[1..], + &ry[1..], self.eval_vars_at_ry, &self.proof_eval_vars_at_ry, &self.mipp_proof, @@ -616,7 +621,13 @@ mod tests { let mut verifier_transcript = PoseidonTranscript::new(¶ms.clone()); assert!(verifer_proof - .verify(&input, &inst_evals, &mut verifier_transcript, &gens) + .verify( + (rx, ry), + &input, + &inst_evals, + &mut verifier_transcript, + &gens + ) .is_ok()); } } diff --git a/src/testudo_nizk.rs b/src/testudo_nizk.rs index d0426789..a8063f15 100644 --- a/src/testudo_nizk.rs +++ b/src/testudo_nizk.rs @@ -126,7 +126,7 @@ where // Verifies the satisfiability proof for the R1CS instance. In NIZK mode, the // verifier evaluates matrices A, B and C themselves, which is a linear - // operation and hence this is not a SNARK. + // operation and hence this is not a SNARK. // However, for highly structured circuits this operation is fast. pub fn verify( &self, @@ -141,6 +141,7 @@ where let inst_evals = inst.inst.evaluate(claimed_rx, claimed_ry); let sat_verified = self.r1cs_verifier_proof.verify( + (claimed_rx.clone(), claimed_ry.clone()), &input.assignment, &inst_evals, transcript, diff --git a/src/testudo_snark.rs b/src/testudo_snark.rs index fee87a57..2a665fa3 100644 --- a/src/testudo_snark.rs +++ b/src/testudo_snark.rs @@ -202,8 +202,11 @@ where transcript: &mut PoseidonTranscript, _poseidon: PoseidonConfig, ) -> Result { + let (rx, ry) = &self.r; + let timer_sat_verification = Timer::new("r1cs_sat_verification"); let sat_verified = self.r1cs_verifier_proof.verify( + (rx.clone(), ry.clone()), &input.assignment, &self.inst_evals, transcript, @@ -217,7 +220,6 @@ where transcript.append_scalar(b"", Br); transcript.append_scalar(b"", Cr); - let (rx, ry) = &self.r; let timer_eval_verification = Timer::new("r1cs_eval_verification"); let eval_verified = self.r1cs_eval_proof.verify( &comm.comm, From df9e890e68fc622008727c998cafa655320def95 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Fri, 10 Mar 2023 10:23:45 +0000 Subject: [PATCH 48/64] add support for odd case in sqrt_pst --- Cargo.toml | 2 +- benches/testudo.rs | 2 +- src/dense_mlpoly.rs | 6 ++-- src/mipp.rs | 2 +- src/sqrt_pst.rs | 73 +++++++++++++++++++-------------------------- 5 files changed, 37 insertions(+), 48 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 529316f9..a5ac867f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -72,7 +72,7 @@ parallel = [ "std", "ark-ff/parallel", "ark-std/parallel", "ark-ec/parallel", "a std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-serialize/std"] [patch.crates-io] -ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/pst_on_g2"} +ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/variable-crs"} ark-groth16 = { git = "https://github.com/arkworks-rs/groth16" } blstrs = { git = "https://github.com/nikkolasg/blstrs", branch = "feat/arkwork" } ark-ec = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } diff --git a/benches/testudo.rs b/benches/testudo.rs index 413429da..ffab6bc6 100644 --- a/benches/testudo.rs +++ b/benches/testudo.rs @@ -52,7 +52,7 @@ where E::ScalarField: Absorb, { let mut writer = csv::Writer::from_path(file_name).expect("unable to open csv writer"); - for &s in [4, 10, 12, 14, 16, 18, 20, 22, 24, 26].iter() { + for &s in [4, 5, 10, 12, 14, 16, 18, 20, 22, 24, 26].iter() { println!("Running for {} inputs", s); let mut br = BenchmarkResults::default(); let num_vars = (2_usize).pow(s as u32); diff --git a/src/dense_mlpoly.rs b/src/dense_mlpoly.rs index 4dce01f7..a53315fd 100644 --- a/src/dense_mlpoly.rs +++ b/src/dense_mlpoly.rs @@ -185,12 +185,12 @@ impl PolyCommitmentGens { pub fn setup(num_vars: usize, label: &'static [u8]) -> PolyCommitmentGens { let (_left, right) = EqPolynomial::::compute_factored_lens(num_vars); let gens = DotProductProofGens::new(right.pow2(), label); - + let odd = if num_vars % 2 == 1 { 1 } else { 0 }; // Generates the SRS and trims it based on the number of variables in the // multilinear polynomial. let mut rng = ark_std::test_rng(); - let pst_gens = MultilinearPC::::setup(num_vars / 2, &mut rng); - let (ck, vk) = MultilinearPC::::trim(&pst_gens, num_vars / 2); + let pst_gens = MultilinearPC::::setup(num_vars / 2 + odd, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&pst_gens, num_vars / 2 + odd); PolyCommitmentGens { gens, ck, vk } } diff --git a/src/mipp.rs b/src/mipp.rs index 77da290e..93f7a9c1 100644 --- a/src/mipp.rs +++ b/src/mipp.rs @@ -304,7 +304,7 @@ impl MippProof { let check_u = ref_final_res.uc == final_u; assert!(check_u == true); - check_h & check_u & check_t + check_h & check_u } } diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index cc167f81..8810cb30 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -13,7 +13,7 @@ use crate::{ pub struct Polynomial { m: usize, - odd: bool, + odd: usize, polys: Vec>, q: Option>, chis_b: Option>, @@ -30,15 +30,18 @@ impl Polynomial { let pl_timer = Timer::new("poly_list_build"); // check the evaluation list is a power of 2 debug_assert!(Z.len() & (Z.len() - 1) == 0); - let m_col = Z.len().log_2() / 2; - let m_row = if Z.len().log_2() % 2 == 0 { + + let num_vars = Z.len().log_2(); + let m_col = num_vars / 2; + let m_row = if num_vars % 2 == 0 { Z.len().log_2() / 2 } else { Z.len().log_2() / 2 + 1 }; - println!("{:?}", m_row); + let pow_m_col = 2_usize.pow(m_col as u32); let pow_m_row = 2_usize.pow(m_row as u32); + let polys: Vec> = (0..pow_m_col) .into_par_iter() .map(|i| { @@ -46,20 +49,22 @@ impl Polynomial { .into_par_iter() // viewing the list of evaluation as a square matrix // we select by row j and column i - - // to handle the odd case, we add another row to the matrix - // + // to handle the odd case, we add another row to the matrix i.e. + // we add an extra variable to the polynomials while keeping their + // number tje same .map(|j| Z[(j << m_col) | i]) .collect(); DensePolynomial::new(z) }) .collect(); + debug_assert!(polys.len() == pow_m_col); debug_assert!(polys[0].len == pow_m_row); + pl_timer.stop(); Self { m: m_col, - odd: if m_row % 2 == 0 { false } else { true }, + odd: if num_vars % 2 == 1 { 1 } else { 0 }, polys, q: None, chis_b: None, @@ -72,9 +77,9 @@ impl Polynomial { // and p(a,b) = q(a) where p is the initial polynomial fn get_q(&mut self, point: &[E::ScalarField]) { let q_timer = Timer::new("build_q"); - let odd = if self.odd { 1 } else { 0 }; - debug_assert!(point.len() == 2 * self.m + odd); - let b = &point[self.m + odd..]; + + debug_assert!(point.len() == 2 * self.m + self.odd); + let b = &point[self.m + self.odd..]; let pow_m = 2_usize.pow(self.m as u32); let chis: Vec = (0..pow_m) @@ -82,7 +87,7 @@ impl Polynomial { .map(|i| Self::get_chi_i(b, i)) .collect(); - let z_q: Vec = (0..(pow_m * 2_usize.pow(odd as u32))) + let z_q: Vec = (0..(pow_m * 2_usize.pow(self.odd as u32))) .into_par_iter() .map(|j| (0..pow_m).map(|i| self.polys[i].Z[j] * chis[i]).sum()) .collect(); @@ -95,8 +100,7 @@ impl Polynomial { // Given point = (\vec{a}, \vec{b}) used to construct q // compute q(a) = p(a,b). pub fn eval(&mut self, point: &[E::ScalarField]) -> E::ScalarField { - let odd = if self.odd { 1 } else { 0 }; - let a = &point[0..point.len() / 2 + odd]; + let a = &point[0..point.len() / 2 + self.odd]; if self.q.is_none() { self.get_q(point); } @@ -109,9 +113,7 @@ impl Polynomial { pub fn commit(&self, ck: &CommitterKey) -> (Vec>, E::TargetField) { let timer_commit = Timer::new("sqrt_commit"); - let timer_list = Timer::new("comm_list"); - // commit to each of the sqrt sized p_i let comm_list: Vec> = self .polys @@ -120,8 +122,7 @@ impl Polynomial { .collect(); timer_list.stop(); - let mut h_vec = ck.powers_of_h[0].clone(); - h_vec = h_vec[..comm_list.len()].to_vec(); + let h_vec = ck.powers_of_h[self.odd].clone(); assert!(comm_list.len() == h_vec.len()); let ipp_timer = Timer::new("ipp"); @@ -169,8 +170,7 @@ impl Polynomial { point: &[E::ScalarField], t: &E::TargetField, ) -> (Commitment, Proof, MippProof) { - let odd = if self.odd { 1 } else { 0 }; - let a = &point[0..self.m + 1]; + let a = &point[0..self.m + self.odd]; if self.q.is_none() { self.get_q(point); } @@ -201,8 +201,7 @@ impl Polynomial { }; let comm = MultilinearPC::::commit(ck, &q); debug_assert!(c_u == comm.g_product); - let mut h_vec = ck.powers_of_h[0].clone(); - h_vec = h_vec[..comm_list.len()].to_vec(); + let h_vec = ck.powers_of_h[self.odd].clone(); // construct MIPP proof that U is the inner product of the vector A // and the vector y, where A is the opening vector to T @@ -275,7 +274,7 @@ mod tests { #[test] fn check_sqrt_poly_eval() { let mut rng = ark_std::test_rng(); - let num_vars = 5; + let num_vars = 6; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); let r: Vec = (0..num_vars) @@ -293,9 +292,16 @@ mod tests { } #[test] - fn check_new_poly_commit() { + fn check_commit() { + // check odd case + check_sqrt_poly_commit(5); + + // check even case + check_sqrt_poly_commit(6); + } + + fn check_sqrt_poly_commit(num_vars: u32) { let mut rng = ark_std::test_rng(); - let num_vars = 5; let len = 2_usize.pow(num_vars); let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); let r: Vec = (0..num_vars) @@ -331,21 +337,4 @@ mod tests { ); assert!(res == true); } - - #[test] - fn test_odd() { - let mut rng = ark_std::test_rng(); - let num_vars = 5; - let len = 2_usize.pow(num_vars); - let Z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); - let r: Vec = (0..num_vars) - .into_iter() - .map(|_| F::rand(&mut rng)) - .collect(); - - // let gens = MultilinearPC::::setup(2, &mut rng); - // let (ck, vk) = MultilinearPC::::trim(&gens, 2); - - let mut pl: Polynomial = Polynomial::from_evaluations(&Z.clone()); - } } From a26b9d050c96fb7236195583543b2327ffde7b88 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Mon, 13 Mar 2023 15:39:31 +0000 Subject: [PATCH 49/64] fix typo in comment --- src/sqrt_pst.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index e2afe340..e4aec374 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -26,7 +26,7 @@ impl Polynomial { // where p(X,Y) = \sum_{i \in \{0,\1}^m} // (\sum_{j \in \{0, 1\}^{m}} p(j, i) * \chi_j(X)) * \chi_i(Y) // and m is n/2. - // To handle the case in which m is odd, the number of variables in the + // To handle the case in which n is odd, the number of variables in the // sqrt-sized polynomials will be increased by a factor of 2 (i.e. 2^{m+1}) // while the number of polynomials remains the same (i.e. 2^m) pub fn from_evaluations(Z: &[E::ScalarField]) -> Self { From 2164b542b41bdf684f27f1460ba824a7c33e3b20 Mon Sep 17 00:00:00 2001 From: maramihali Date: Wed, 22 Mar 2023 18:49:55 +0000 Subject: [PATCH 50/64] Documentation #31 --- src/r1csinstance.rs | 5 +++++ src/sparse_mlpoly.rs | 13 +++++++++++++ 2 files changed, 18 insertions(+) diff --git a/src/r1csinstance.rs b/src/r1csinstance.rs index ede46c1b..c961b436 100644 --- a/src/r1csinstance.rs +++ b/src/r1csinstance.rs @@ -314,6 +314,9 @@ impl R1CSInstance { &self, gens: &R1CSCommitmentGens, ) -> (R1CSCommitment, R1CSDecommitment) { + // Noting that matrices A, B and C are sparse, produces a combined dense + // dense polynomial from the non-zero entry that we commit to. This + // represents the computational commitment. let (comm, dense) = SparseMatPolynomial::multi_commit(&[&self.A, &self.B, &self.C], &gens.gens); let r1cs_comm = R1CSCommitment { num_cons: self.num_cons, @@ -322,6 +325,8 @@ impl R1CSInstance { comm, }; + // The decommitment is used by the prover to convince the verifier + // the received openings of A, B and C are correct. let r1cs_decomm = R1CSDecommitment { dense }; (r1cs_comm, r1cs_decomm) diff --git a/src/sparse_mlpoly.rs b/src/sparse_mlpoly.rs index c3340067..17f6aa09 100644 --- a/src/sparse_mlpoly.rs +++ b/src/sparse_mlpoly.rs @@ -20,6 +20,8 @@ use ark_serialize::*; use core::cmp::Ordering; #[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] +// Each SparseMatEntry is a tuple (row, col, val) representing a non-zero value +// in an R1CS matrix. pub struct SparseMatEntry { row: usize, col: usize, @@ -33,9 +35,11 @@ impl SparseMatEntry { } #[derive(Debug, CanonicalSerialize, CanonicalDeserialize, Clone)] +// The sparse multilinearrepresentation of an R1CS matrix of size x*y pub struct SparseMatPolynomial { num_vars_x: usize, num_vars_y: usize, + // The non-zero entries in the matrix, represented by the tuple (row, col,val) M: Vec>, } @@ -346,6 +350,7 @@ impl SparseMatPolynomial { } } + // get the number of non_zero entries in a sparse R1CS matrix pub fn get_num_nz_entries(&self) -> usize { self.M.len().next_power_of_two() } @@ -364,6 +369,7 @@ impl SparseMatPolynomial { (ops_row, ops_col, val) } + // Produce the dense representation of sparse matrices A, B and C. fn multi_sparse_to_dense_rep( sparse_polys: &[&SparseMatPolynomial], ) -> MultiSparseMatPolynomialAsDense { @@ -384,11 +390,17 @@ impl SparseMatPolynomial { let mut val_vec: Vec> = Vec::new(); for poly in sparse_polys { let (ops_row, ops_col, val) = poly.sparse_to_dense_vecs(N); + // aggregate all the row and columns that contain non-zero values in the + // three matrices ops_row_vec.push(ops_row); ops_col_vec.push(ops_col); + // create dense polynomials, in Lagrange representation, for the non-zero + // values of each matrix val_vec.push(DensePolynomial::new(val)); } + // Note: everything else from + let any_poly = &sparse_polys[0]; let num_mem_cells = if any_poly.num_vars_x > any_poly.num_vars_y { @@ -401,6 +413,7 @@ impl SparseMatPolynomial { let col = AddrTimestamps::new(num_mem_cells, N, ops_col_vec); // combine polynomials into a single polynomial for commitment purposes + // this is done because the commitment used has a public setup let comb_ops = DensePolynomial::merge( row .ops_addr From 661214d449dc899cb87ee3ef800100e176356e89 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Mon, 13 Mar 2023 15:39:31 +0000 Subject: [PATCH 51/64] fix typo in comment --- Cargo.toml | 35 ++++++++++++++++++----------------- src/sqrt_pst.rs | 2 +- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index a5ac867f..8930a954 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,21 +18,21 @@ itertools = "0.10.0" colored = "2.0.0" thiserror = "1.0" json = "0.12.4" -ark-ff = { version = "^0.4.0", default-features = false } -ark-ec = { version = "^0.4.0", default-features = false } -ark-std = { version = "^0.4.0"} -ark-bls12-377 = { version = "^0.4.0", features = ["r1cs","curve"] } -ark-bls12-381 = { version = "^0.4.0", features = ["curve"] } +ark-ff = { version = "0.4.0", default-features = false } +ark-ec = { version = "0.4.0", default-features = false } +ark-std = { version = "0.4.0"} +ark-bls12-377 = { version = "0.4.0", features = ["r1cs","curve"] } +ark-bls12-381 = { version = "0.4.0", features = ["curve"] } ark-blst = { git = "https://github.com/nikkolasg/ark-blst" } -ark-serialize = { version = "^0.4.0", features = ["derive"] } -ark-crypto-primitives = {version = "^0.4.0", features = ["sponge","r1cs","snark"] } -ark-r1cs-std = { version = "^0.4.0", default-features = false } -ark-relations = { version = "^0.4.0", default-features = false, optional = true } -ark-snark = { version = "^0.4.0", default-features = false } -ark-groth16 = { version = "^0.3.0" } -ark-bw6-761 = { version = "^0.4.0" } -ark-poly-commit = { version = "^0.4.0" } -ark-poly = {version = "^0.4.0"} +ark-serialize = { version = "0.4.0", features = ["derive"] } +ark-crypto-primitives = {version = "0.4.0", features = ["sponge","r1cs","snark"] } +ark-r1cs-std = { version = "0.4.0", default-features = false } +ark-relations = { version = "0.4.0", default-features = false, optional = true } +ark-snark = { version = "0.4.0", default-features = false } +ark-groth16 = { version = "0.3.0" } +ark-bw6-761 = { version = "0.4.0" } +ark-poly-commit = { version = "0.4.0" } +ark-poly = {version = "0.4.0"} poseidon-paramgen = { git = "https://github.com/nikkolasg/poseidon377", branch = "feat/v0.4" } poseidon-parameters = { git = "https://github.com/nikkolasg/poseidon377", branch = "feat/v0.4" } @@ -75,6 +75,7 @@ std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-seri ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/variable-crs"} ark-groth16 = { git = "https://github.com/arkworks-rs/groth16" } blstrs = { git = "https://github.com/nikkolasg/blstrs", branch = "feat/arkwork" } -ark-ec = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } -ark-ff = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } -ark-serialize = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } \ No newline at end of file +ark-ec = { git = "https://github.com/vmx/algebra", branch="affine-repr-xy-owned" } +ark-ff = { git = "https://github.com/vmx/algebra", branch="affine-repr-xy-owned" } +ark-poly = { git = "https://github.com/vmx/algebra", branch="affine-repr-xy-owned" } +ark-serialize = { git = "https://github.com/vmx/algebra", branch="affine-repr-xy-owned" } \ No newline at end of file diff --git a/src/sqrt_pst.rs b/src/sqrt_pst.rs index e2afe340..e4aec374 100644 --- a/src/sqrt_pst.rs +++ b/src/sqrt_pst.rs @@ -26,7 +26,7 @@ impl Polynomial { // where p(X,Y) = \sum_{i \in \{0,\1}^m} // (\sum_{j \in \{0, 1\}^{m}} p(j, i) * \chi_j(X)) * \chi_i(Y) // and m is n/2. - // To handle the case in which m is odd, the number of variables in the + // To handle the case in which n is odd, the number of variables in the // sqrt-sized polynomials will be increased by a factor of 2 (i.e. 2^{m+1}) // while the number of polynomials remains the same (i.e. 2^m) pub fn from_evaluations(Z: &[E::ScalarField]) -> Self { From 425c2747af2c9c83f85153ef111a7910a980b055 Mon Sep 17 00:00:00 2001 From: maramihali Date: Wed, 22 Mar 2023 20:38:09 +0000 Subject: [PATCH 52/64] Fix Cargo.toml and add benchmark for sqrt pst (#34) * add benchmark for sqrt pst * fix typo in comment --- Cargo.toml | 13 ++++-- benches/pst.rs | 98 +++++++++++++++++++++++++++++++++++++++++++++ benches/testudo.rs | 4 +- examples/cubic.rs | 4 +- profiler/testudo.rs | 8 ++-- src/lib.rs | 2 +- 6 files changed, 116 insertions(+), 13 deletions(-) create mode 100644 benches/pst.rs diff --git a/Cargo.toml b/Cargo.toml index a5ac867f..cf8de899 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,7 +52,7 @@ csv = "1.1.5" criterion = "0.3.6" [lib] -name = "libspartan" +name = "libtestudo" path = "src/lib.rs" [[bin]] @@ -63,6 +63,10 @@ path = "profiler/testudo.rs" name = "testudo" harness = false +[[bench]] +name = "pst" +harness = false + [features] multicore = ["rayon"] profile = [] @@ -75,6 +79,7 @@ std = ["ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std", "ark-seri ark-poly-commit = {git = "https://github.com/cryptonetlab/ark-polycommit", branch="feat/variable-crs"} ark-groth16 = { git = "https://github.com/arkworks-rs/groth16" } blstrs = { git = "https://github.com/nikkolasg/blstrs", branch = "feat/arkwork" } -ark-ec = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } -ark-ff = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } -ark-serialize = { git = "https://github.com/vmx/algebra", branch = "affine-repr-xy-owned" } \ No newline at end of file +ark-ec = { git = "https://github.com/vmx/algebra", branch="affine-repr-xy-owned" } +ark-ff = { git = "https://github.com/vmx/algebra", branch="affine-repr-xy-owned" } +ark-poly = { git = "https://github.com/vmx/algebra", branch="affine-repr-xy-owned" } +ark-serialize = { git = "https://github.com/vmx/algebra", branch="affine-repr-xy-owned" } \ No newline at end of file diff --git a/benches/pst.rs b/benches/pst.rs new file mode 100644 index 00000000..a9b821a0 --- /dev/null +++ b/benches/pst.rs @@ -0,0 +1,98 @@ +use std::time::Instant; + +use ark_poly_commit::multilinear_pc::MultilinearPC; +use ark_serialize::CanonicalSerialize; +use libtestudo::{ + parameters::PoseidonConfiguration, poseidon_transcript::PoseidonTranscript, sqrt_pst::Polynomial, +}; +use serde::Serialize; +type F = ark_bls12_377::Fr; +type E = ark_bls12_377::Bls12_377; +use ark_std::UniformRand; + +#[derive(Default, Clone, Serialize)] +struct BenchmarkResults { + power: usize, + commit_time: u128, + opening_time: u128, + verification_time: u128, + proof_size: usize, + commiter_key_size: usize, +} +fn main() { + let params = ark_bls12_377::Fr::poseidon_params(); + + let mut writer = csv::Writer::from_path("sqrt_pst.csv").expect("unable to open csv writer"); + for &s in [4, 5, 20, 27].iter() { + println!("Running for {} inputs", s); + let mut rng = ark_std::test_rng(); + let mut br = BenchmarkResults::default(); + br.power = s; + let num_vars = s; + let len = 2_usize.pow(num_vars as u32); + let z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); + let r: Vec = (0..num_vars) + .into_iter() + .map(|_| F::rand(&mut rng)) + .collect(); + + let setup_vars = (num_vars as f32 / 2.0).ceil() as usize; + let gens = MultilinearPC::::setup((num_vars as f32 / 2.0).ceil() as usize, &mut rng); + let (ck, vk) = MultilinearPC::::trim(&gens, setup_vars); + + let mut cks = Vec::::new(); + ck.serialize_with_mode(&mut cks, ark_serialize::Compress::Yes) + .unwrap(); + br.commiter_key_size = cks.len(); + + let mut pl = Polynomial::from_evaluations(&z.clone()); + + let v = pl.eval(&r); + + let start = Instant::now(); + let (comm_list, t) = pl.commit(&ck); + let duration = start.elapsed().as_millis(); + br.commit_time = duration; + + let mut prover_transcript = PoseidonTranscript::new(¶ms); + + let start = Instant::now(); + let (u, pst_proof, mipp_proof) = pl.open(&mut prover_transcript, comm_list, &ck, &r, &t); + let duration = start.elapsed().as_millis(); + br.opening_time = duration; + + let mut p1 = Vec::::new(); + let mut p2 = Vec::::new(); + pst_proof + .serialize_with_mode(&mut p1, ark_serialize::Compress::Yes) + .unwrap(); + + mipp_proof + .serialize_with_mode(&mut p2, ark_serialize::Compress::Yes) + .unwrap(); + + br.proof_size = p1.len() + p2.len(); + + let mut verifier_transcript = PoseidonTranscript::new(¶ms); + + let start = Instant::now(); + let res = Polynomial::verify( + &mut verifier_transcript, + &vk, + &u, + &r, + v, + &pst_proof, + &mipp_proof, + &t, + ); + let duration = start.elapsed().as_millis(); + br.verification_time = duration; + assert!(res == true); + + writer + .serialize(br) + .expect("unable to write results to csv"); + writer.flush().expect("wasn't able to flush"); + } +} diff --git a/benches/testudo.rs b/benches/testudo.rs index ffab6bc6..bd9cc75a 100644 --- a/benches/testudo.rs +++ b/benches/testudo.rs @@ -5,8 +5,8 @@ use ark_crypto_primitives::sponge::Absorb; use ark_ec::pairing::Pairing; use ark_ff::PrimeField; use ark_serialize::*; -use libspartan::parameters::PoseidonConfiguration; -use libspartan::{ +use libtestudo::parameters::PoseidonConfiguration; +use libtestudo::{ poseidon_transcript::PoseidonTranscript, testudo_snark::{TestudoSnark, TestudoSnarkGens}, Instance, diff --git a/examples/cubic.rs b/examples/cubic.rs index ca777f01..dcc69eb5 100644 --- a/examples/cubic.rs +++ b/examples/cubic.rs @@ -11,8 +11,8 @@ use ark_ec::pairing::Pairing; use ark_ff::{BigInteger, PrimeField}; use ark_std::{One, UniformRand, Zero}; -use libspartan::testudo_snark::{TestudoSnark, TestudoSnarkGens}; -use libspartan::{ +use libtestudo::testudo_snark::{TestudoSnark, TestudoSnarkGens}; +use libtestudo::{ parameters::poseidon_params, poseidon_transcript::PoseidonTranscript, InputsAssignment, Instance, VarsAssignment, }; diff --git a/profiler/testudo.rs b/profiler/testudo.rs index f56d1335..92f7c267 100644 --- a/profiler/testudo.rs +++ b/profiler/testudo.rs @@ -1,16 +1,16 @@ #![allow(non_snake_case)] #![allow(clippy::assertions_on_result_states)] -extern crate libspartan; +extern crate libtestudo; extern crate merlin; use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; use ark_crypto_primitives::sponge::Absorb; use ark_ec::pairing::Pairing; use ark_ff::PrimeField; use ark_serialize::*; -use libspartan::parameters::PoseidonConfiguration; -use libspartan::poseidon_transcript::PoseidonTranscript; -use libspartan::{ +use libtestudo::parameters::PoseidonConfiguration; +use libtestudo::poseidon_transcript::PoseidonTranscript; +use libtestudo::{ testudo_snark::{TestudoSnark, TestudoSnarkGens}, Instance, }; diff --git a/src/lib.rs b/src/lib.rs index b1aca3a4..44893a94 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -28,7 +28,7 @@ mod product_tree; mod r1csinstance; mod r1csproof; mod sparse_mlpoly; -mod sqrt_pst; +pub mod sqrt_pst; mod sumcheck; pub mod testudo_nizk; pub mod testudo_snark; From af03d8f778def5b74da2c6c979364d3fde2b81a1 Mon Sep 17 00:00:00 2001 From: Mara Mihali Date: Wed, 22 Mar 2023 20:51:57 +0000 Subject: [PATCH 53/64] add README --- README.md | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index b194ce2b..992057ed 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,25 @@ -# Testudo: Spartan + Groth16 +# Testudo -TODO documentation +Testudo is a linear-time prover SNARK with a small and universal trusted setup. For a deep dive, please refer to [this](https://www.notion.so/pl-strflt/Testudo-Blog-Post-Final-a18db71f8e634ebbb9f68383f7904c51) blog post. + +In the current stage, the repository contains: + +- a modified version of [Spartan](https://github.com/microsoft/Spartan) using [arkworks](https://github.com/arkworks-rs) with the sumchecks verified using Groth16 +- a fast version of the [PST](https://eprint.iacr.org/2011/587.pdf) commitment scheme with a square-root trusted setup +- support for an arkworks wrapper around the fast blst library with GPU integration [repo](https://github.com/nikkolasg/ark-blst) + +## Building `testudo` + +Testudo is available with stable Rust. + +Run `cargo build` or `cargo test` to build, respectively test the repository. + +To run the current benchmarks on BLS12-377: + +``` +cargo bench --bench testudo --all-features release -- --nocapture +``` + +## Join us! + +If you want to contribute, reach out to the Discord server of [cryptonet](https://discord.com/invite/CFnTSkVTCk). From 8e45a8f2ee6405b9784d08aeb41c903d1e1951a6 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Wed, 22 Mar 2023 22:36:01 +0100 Subject: [PATCH 54/64] comment from readme not executing --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d82e0e63..fd7e65c2 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ Run `cargo build` or `cargo test` to build, respectively test the repository. To run the current benchmarks on BLS12-377: -``` +```console cargo bench --bench testudo --all-features release -- --nocapture ``` From c820325d24f07a2255abac0c28a4b3e0dd3eada8 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Mon, 5 Jun 2023 21:15:27 +0200 Subject: [PATCH 55/64] adding groth16 comparison --- benches/testudo.rs | 84 +++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 76 insertions(+), 8 deletions(-) diff --git a/benches/testudo.rs b/benches/testudo.rs index bd9cc75a..e6007566 100644 --- a/benches/testudo.rs +++ b/benches/testudo.rs @@ -1,9 +1,16 @@ +use std::marker::PhantomData; use std::time::Instant; use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; use ark_crypto_primitives::sponge::Absorb; use ark_ec::pairing::Pairing; use ark_ff::PrimeField; +use ark_groth16::prepare_verifying_key; +use ark_groth16::Groth16; +use ark_r1cs_std::fields::fp::FpVar; +use ark_r1cs_std::prelude::AllocVar; +use ark_relations::r1cs::ConstraintSynthesizer; +use ark_relations::r1cs::ConstraintSystem; use ark_serialize::*; use libtestudo::parameters::PoseidonConfiguration; use libtestudo::{ @@ -12,6 +19,42 @@ use libtestudo::{ Instance, }; use serde::Serialize; +use std::ops::Mul; + +fn main() { + bench_with_bls12_377(); + // bench_with_bls12_381(); + // bench_with_ark_blst(); +} +struct GrothCircuit { + n_constraints: usize, + _p: PhantomData, +} + +impl GrothCircuit { + pub fn new(n_constraints: usize) -> Self { + GrothCircuit { + n_constraints, + _p: PhantomData, + } + } +} + +impl ConstraintSynthesizer for GrothCircuit { + fn generate_constraints( + self, + cs: ark_relations::r1cs::ConstraintSystemRef, + ) -> ark_relations::r1cs::Result<()> { + let a = F::rand(&mut rand::thread_rng()); + let b = F::rand(&mut rand::thread_rng()); + let av = FpVar::new_witness(cs.clone(), || Ok(a))?; + let bv = FpVar::new_witness(cs.clone(), || Ok(b))?; + for _ in 0..self.n_constraints { + av.clone().mul(bv.clone()); + } + Ok(()) + } +} #[derive(Default, Clone, Serialize)] struct BenchmarkResults { @@ -22,12 +65,7 @@ struct BenchmarkResults { sat_proof_size: usize, eval_proof_size: usize, total_proof_size: usize, -} - -fn main() { - bench_with_bls12_377(); - // bench_with_bls12_381(); - // bench_with_ark_blst(); + g16_proving_time: u128, } fn bench_with_ark_blst() { @@ -52,8 +90,9 @@ where E::ScalarField: Absorb, { let mut writer = csv::Writer::from_path(file_name).expect("unable to open csv writer"); - for &s in [4, 5, 10, 12, 14, 16, 18, 20, 22, 24, 26].iter() { - println!("Running for {} inputs", s); + for &s in [5, 10, 15, 20, 24, 26].iter() { + //for &s in [4].iter() { + println!("Running for {} constraints", s); let mut br = BenchmarkResults::default(); let num_vars = (2_usize).pow(s as u32); let num_cons = num_vars; @@ -119,9 +158,38 @@ where let duration = start.elapsed().as_millis(); br.testudo_verification_time = duration; + groth16_bench::(num_cons, &mut br); writer .serialize(br) .expect("unable to write results to csv"); writer.flush().expect("wasn't able to flush"); } } + +fn groth16_bench(n_constraints: usize, res: &mut BenchmarkResults) { + let params = { + let c = GrothCircuit::::new(n_constraints); + Groth16::::generate_random_parameters_with_reduction(c, &mut rand::thread_rng()).unwrap() + }; + let pvk = prepare_verifying_key(¶ms.vk); + println!("Running G16 proving for {} constraints", n_constraints); + let number_constraints = { + let circuit = GrothCircuit::::new(n_constraints); + let cs = ConstraintSystem::::new_ref(); + circuit.generate_constraints(cs.clone()).unwrap(); + cs.num_constraints() as u64 + }; + assert_eq!(number_constraints as usize, n_constraints); + let start = Instant::now(); + let proof = Groth16::::create_random_proof_with_reduction( + GrothCircuit::::new(n_constraints), + ¶ms, + &mut rand::thread_rng(), + ) + .expect("proof creation failed"); + let proving_time = start.elapsed().as_millis(); + res.g16_proving_time = proving_time; + + let r = Groth16::::verify_proof(&pvk, &proof, &[]).unwrap(); + assert!(r); +} From f2b224cf87c61892916e6b86194c77002ca9f548 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 6 Jun 2023 11:24:02 +0200 Subject: [PATCH 56/64] reduced benchmark size --- benches/testudo.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benches/testudo.rs b/benches/testudo.rs index e6007566..5d5d97cf 100644 --- a/benches/testudo.rs +++ b/benches/testudo.rs @@ -90,7 +90,7 @@ where E::ScalarField: Absorb, { let mut writer = csv::Writer::from_path(file_name).expect("unable to open csv writer"); - for &s in [5, 10, 15, 20, 24, 26].iter() { + for &s in [5, 10, 15, 20, 24].iter() { //for &s in [4].iter() { println!("Running for {} constraints", s); let mut br = BenchmarkResults::default(); From a374ea8d4d9900e43ca83bee6d81151210d3927f Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 6 Jun 2023 11:55:14 +0200 Subject: [PATCH 57/64] do not verify for blst because of zero MSM bug --- benches/testudo.rs | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/benches/testudo.rs b/benches/testudo.rs index 5d5d97cf..36e57dd1 100644 --- a/benches/testudo.rs +++ b/benches/testudo.rs @@ -22,9 +22,9 @@ use serde::Serialize; use std::ops::Mul; fn main() { - bench_with_bls12_377(); + // bench_with_bls12_377(); // bench_with_bls12_381(); - // bench_with_ark_blst(); + bench_with_ark_blst(); } struct GrothCircuit { n_constraints: usize, @@ -70,20 +70,20 @@ struct BenchmarkResults { fn bench_with_ark_blst() { let params = ark_blst::Scalar::poseidon_params(); - testudo_snark_bench::(params, "testudo_blst"); + testudo_snark_bench::(params, "testudo_blst", false); } fn bench_with_bls12_377() { let params = ark_bls12_377::Fr::poseidon_params(); - testudo_snark_bench::(params, "testudo_bls12_377"); + testudo_snark_bench::(params, "testudo_bls12_377", true); } fn bench_with_bls12_381() { let params = ark_bls12_381::Fr::poseidon_params(); - testudo_snark_bench::(params, "testudo_bls12_381"); + testudo_snark_bench::(params, "testudo_bls12_381", true); } -fn testudo_snark_bench(params: PoseidonConfig, file_name: &str) +fn testudo_snark_bench(params: PoseidonConfig, file_name: &str, verify: bool) where E: Pairing, E::ScalarField: PrimeField, @@ -147,16 +147,18 @@ where let mut verifier_transcript = PoseidonTranscript::new(¶ms.clone()); let start = Instant::now(); - let res = proof.verify( - &gens, - &comm, - &inputs, - &mut verifier_transcript, - params.clone(), - ); - assert!(res.is_ok()); - let duration = start.elapsed().as_millis(); - br.testudo_verification_time = duration; + if verify { + let res = proof.verify( + &gens, + &comm, + &inputs, + &mut verifier_transcript, + params.clone(), + ); + assert!(res.is_ok()); + let duration = start.elapsed().as_millis(); + br.testudo_verification_time = duration; + } groth16_bench::(num_cons, &mut br); writer From 8312fd1daf9ec2d3d01d014434e3780930593e53 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 6 Jun 2023 12:28:46 +0200 Subject: [PATCH 58/64] adding pst comparison --- Cargo.toml | 2 +- benches/{pst.rs => commitment.rs} | 58 ++++++++++++++++++++++++++----- 2 files changed, 50 insertions(+), 10 deletions(-) rename benches/{pst.rs => commitment.rs} (57%) diff --git a/Cargo.toml b/Cargo.toml index 11a37363..9cc8fd79 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -64,7 +64,7 @@ name = "testudo" harness = false [[bench]] -name = "pst" +name = "commitment" harness = false [features] diff --git a/benches/pst.rs b/benches/commitment.rs similarity index 57% rename from benches/pst.rs rename to benches/commitment.rs index a9b821a0..d25a317b 100644 --- a/benches/pst.rs +++ b/benches/commitment.rs @@ -1,14 +1,14 @@ use std::time::Instant; +use ark_bls12_377::Bls12_377; +use ark_ec::pairing::Pairing; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::CanonicalSerialize; +use ark_std::UniformRand; use libtestudo::{ parameters::PoseidonConfiguration, poseidon_transcript::PoseidonTranscript, sqrt_pst::Polynomial, }; use serde::Serialize; -type F = ark_bls12_377::Fr; -type E = ark_bls12_377::Bls12_377; -use ark_std::UniformRand; #[derive(Default, Clone, Serialize)] struct BenchmarkResults { @@ -18,22 +18,36 @@ struct BenchmarkResults { verification_time: u128, proof_size: usize, commiter_key_size: usize, + pst_commit: u128, + pst_opening: u128, + pst_verification: u128, + pst_proof_size: u128, } fn main() { - let params = ark_bls12_377::Fr::poseidon_params(); + testudo_commitment_benchmark::("testudo_commitment_bls12377.csv"); +} - let mut writer = csv::Writer::from_path("sqrt_pst.csv").expect("unable to open csv writer"); - for &s in [4, 5, 20, 27].iter() { +fn testudo_commitment_benchmark(fname: &str) +where + E::ScalarField: PoseidonConfiguration, +{ + let params = E::ScalarField::poseidon_params(); + let mut writer = csv::Writer::from_path(fname).expect("unable to open csv writer"); + for &s in [4, 5, 15, 20, 15, 27].iter() { println!("Running for {} inputs", s); let mut rng = ark_std::test_rng(); let mut br = BenchmarkResults::default(); br.power = s; let num_vars = s; let len = 2_usize.pow(num_vars as u32); - let z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); - let r: Vec = (0..num_vars) + bench_pst::(num_vars, &mut br); + let z: Vec = (0..len) .into_iter() - .map(|_| F::rand(&mut rng)) + .map(|_| E::ScalarField::rand(&mut rng)) + .collect(); + let r: Vec = (0..num_vars) + .into_iter() + .map(|_| E::ScalarField::rand(&mut rng)) .collect(); let setup_vars = (num_vars as f32 / 2.0).ceil() as usize; @@ -96,3 +110,29 @@ fn main() { writer.flush().expect("wasn't able to flush"); } } + +fn bench_pst(num_vars: usize, res: &mut BenchmarkResults) { + use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; + use ark_poly_commit::multilinear_pc::MultilinearPC; + let params = MultilinearPC::::setup(num_vars, &mut rand::thread_rng()); + let (comkey, vkey) = MultilinearPC::trim(¶ms, num_vars); + let poly = DenseMultilinearExtension::rand(num_vars, &mut rand::thread_rng()); + + let start = Instant::now(); + let comm = MultilinearPC::commit(&comkey, &poly); + res.pst_commit = start.elapsed().as_millis(); + + let xs = (0..num_vars) + .map(|_| E::ScalarField::rand(&mut rand::thread_rng())) + .collect::>(); + let y = poly.evaluate(&xs).unwrap(); + let start = Instant::now(); + let proof = MultilinearPC::open(&comkey, &poly, &xs); + res.pst_opening = start.elapsed().as_millis(); + + let start = Instant::now(); + let check = MultilinearPC::check(&vkey, &comm, &xs, y, &proof); + res.pst_verification = start.elapsed().as_millis(); + + assert!(check); +} From 23ac14e64a6ea6df0acf350676ddf115b663d785 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 6 Jun 2023 12:29:29 +0200 Subject: [PATCH 59/64] adding blst --- benches/commitment.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/benches/commitment.rs b/benches/commitment.rs index d25a317b..ecf6cf3f 100644 --- a/benches/commitment.rs +++ b/benches/commitment.rs @@ -25,6 +25,7 @@ struct BenchmarkResults { } fn main() { testudo_commitment_benchmark::("testudo_commitment_bls12377.csv"); + testudo_commitment_benchmark::("testudo_commitment_bls12381.csv"); } fn testudo_commitment_benchmark(fname: &str) From 6ececf52e84d7a80a69cb065a6bda735cae6ba3f Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 6 Jun 2023 15:26:51 +0200 Subject: [PATCH 60/64] right sizes for bench --- benches/commitment.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benches/commitment.rs b/benches/commitment.rs index ecf6cf3f..2adc0ea2 100644 --- a/benches/commitment.rs +++ b/benches/commitment.rs @@ -34,7 +34,7 @@ where { let params = E::ScalarField::poseidon_params(); let mut writer = csv::Writer::from_path(fname).expect("unable to open csv writer"); - for &s in [4, 5, 15, 20, 15, 27].iter() { + for &s in [4, 5, 15, 20, 25].iter() { println!("Running for {} inputs", s); let mut rng = ark_std::test_rng(); let mut br = BenchmarkResults::default(); From d8a2e696c9da2920555181aa9009a6b0fbb01a2e Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 6 Jun 2023 16:16:44 +0200 Subject: [PATCH 61/64] adding pst size --- benches/commitment.rs | 4 ++++ benches/testudo.rs | 6 ++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/benches/commitment.rs b/benches/commitment.rs index 2adc0ea2..71f3676c 100644 --- a/benches/commitment.rs +++ b/benches/commitment.rs @@ -135,5 +135,9 @@ fn bench_pst(num_vars: usize, res: &mut BenchmarkResults) { let check = MultilinearPC::check(&vkey, &comm, &xs, y, &proof); res.pst_verification = start.elapsed().as_millis(); + let mut b = Vec::new(); + proof.serialize_compressed(&mut b).unwrap(); + res.pst_proof_size = b.len() as u128; + assert!(check); } diff --git a/benches/testudo.rs b/benches/testudo.rs index 36e57dd1..e949fa2f 100644 --- a/benches/testudo.rs +++ b/benches/testudo.rs @@ -46,11 +46,9 @@ impl ConstraintSynthesizer for GrothCircuit { cs: ark_relations::r1cs::ConstraintSystemRef, ) -> ark_relations::r1cs::Result<()> { let a = F::rand(&mut rand::thread_rng()); - let b = F::rand(&mut rand::thread_rng()); - let av = FpVar::new_witness(cs.clone(), || Ok(a))?; - let bv = FpVar::new_witness(cs.clone(), || Ok(b))?; + let mut av = FpVar::new_witness(cs.clone(), || Ok(a))?; for _ in 0..self.n_constraints { - av.clone().mul(bv.clone()); + let av = av.clone().mul(av.clone()); } Ok(()) } From 0c05f20776304b8196d232c9423e75b717a1bd8c Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 6 Jun 2023 17:43:42 +0200 Subject: [PATCH 62/64] adding groth16 comparison separaletly --- Cargo.toml | 4 ++++ src/r1csproof.rs | 11 ++++++----- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 9cc8fd79..310d5d81 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -67,6 +67,10 @@ harness = false name = "commitment" harness = false +[[bench]] +name = "groth16" +harness = false + [features] multicore = ["rayon"] profile = [] diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 92538529..6a21e168 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -569,26 +569,27 @@ mod tests { #[test] fn check_r1cs_proof_ark_blst() { let params = ark_blst::Scalar::poseidon_params(); - check_r1cs_proof::(params); + check_r1cs_proof::(10, params); } #[test] fn check_r1cs_proof_bls12_377() { let params = ark_bls12_377::Fr::poseidon_params(); - check_r1cs_proof::(params); + check_r1cs_proof::(10, params); } #[test] fn check_r1cs_proof_bls12_381() { let params = ark_bls12_381::Fr::poseidon_params(); - check_r1cs_proof::(params); + check_r1cs_proof::(10, params); } - fn check_r1cs_proof

(params: PoseidonConfig) + fn check_r1cs_proof

(size: usize, params: PoseidonConfig) where P: Pairing, P::ScalarField: PrimeField, P::ScalarField: Absorb, { - let num_vars = 1024; + + let num_vars = (2_usize).pow(size as u32); let num_cons = num_vars; let num_inputs = 3; let (inst, vars, input) = From e4a41a04116ed2c2dd11cd77e05a5262f447206f Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 6 Jun 2023 17:44:19 +0200 Subject: [PATCH 63/64] add file groth16 --- benches/groth16.rs | 84 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 benches/groth16.rs diff --git a/benches/groth16.rs b/benches/groth16.rs new file mode 100644 index 00000000..57d54bd1 --- /dev/null +++ b/benches/groth16.rs @@ -0,0 +1,84 @@ +use ark_ec::pairing::Pairing; +use ark_ff::PrimeField; +use ark_groth16::prepare_verifying_key; +use ark_groth16::Groth16; +use ark_r1cs_std::alloc::AllocVar; +use ark_r1cs_std::fields::fp::FpVar; +use ark_relations::r1cs::ConstraintSynthesizer; +use ark_relations::r1cs::ConstraintSystem; +use ark_std::marker::PhantomData; +use ark_std::time::Instant; +use serde::Serialize; +use std::ops::Mul; +#[derive(Default, Clone, Serialize)] +struct BenchmarkResults { + power: usize, + input_constraints: usize, + g16_proving_time: u128, +} + +fn main() { + let n = 10; + let nconstraints = (2_usize).pow(n as u32); + let mut res = BenchmarkResults::default(); + groth16_bench::(nconstraints, &mut res); + let mut writer = csv::Writer::from_path("groth16.csv").expect("unable to open csv writer"); + writer + .serialize(res) + .expect("unable to write results to csv"); + writer.flush().expect("wasn't able to flush"); +} +struct GrothCircuit { + n_constraints: usize, + _p: PhantomData, +} + +impl GrothCircuit { + pub fn new(n_constraints: usize) -> Self { + GrothCircuit { + n_constraints, + _p: PhantomData, + } + } +} + +impl ConstraintSynthesizer for GrothCircuit { + fn generate_constraints( + self, + cs: ark_relations::r1cs::ConstraintSystemRef, + ) -> ark_relations::r1cs::Result<()> { + let a = F::rand(&mut rand::thread_rng()); + let mut av = FpVar::new_witness(cs.clone(), || Ok(a))?; + for _ in 0..self.n_constraints { + let av = av.clone().mul(av.clone()); + } + Ok(()) + } +} +fn groth16_bench(n_constraints: usize, res: &mut BenchmarkResults) { + let params = { + let c = GrothCircuit::::new(n_constraints); + Groth16::::generate_random_parameters_with_reduction(c, &mut rand::thread_rng()).unwrap() + }; + let pvk = prepare_verifying_key(¶ms.vk); + println!("Running G16 proving for {} constraints", n_constraints); + let number_constraints = { + let circuit = GrothCircuit::::new(n_constraints); + let cs = ConstraintSystem::::new_ref(); + circuit.generate_constraints(cs.clone()).unwrap(); + cs.num_constraints() as u64 + }; + assert_eq!(number_constraints as usize, n_constraints); + let start = Instant::now(); + let proof = Groth16::::create_random_proof_with_reduction( + GrothCircuit::::new(n_constraints), + ¶ms, + &mut rand::thread_rng(), + ) + .expect("proof creation failed"); + let proving_time = start.elapsed().as_millis(); + res.g16_proving_time = proving_time; + + let r = Groth16::::verify_proof(&pvk, &proof, &[]).unwrap(); + assert!(r); +} From 7c71b8fcbed66dc102efbdfbd33fa25e8a3bab48 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Tue, 6 Jun 2023 18:09:45 +0200 Subject: [PATCH 64/64] putting rightfields in benchmark --- benches/groth16.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/benches/groth16.rs b/benches/groth16.rs index 57d54bd1..fe3408e4 100644 --- a/benches/groth16.rs +++ b/benches/groth16.rs @@ -21,6 +21,8 @@ fn main() { let n = 10; let nconstraints = (2_usize).pow(n as u32); let mut res = BenchmarkResults::default(); + res.power = n; + res.input_constraints = nconstraints; groth16_bench::(nconstraints, &mut res); let mut writer = csv::Writer::from_path("groth16.csv").expect("unable to open csv writer"); writer