diff --git a/Cargo.toml b/Cargo.toml index 11a37363..310d5d81 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -64,7 +64,11 @@ name = "testudo" harness = false [[bench]] -name = "pst" +name = "commitment" +harness = false + +[[bench]] +name = "groth16" harness = false [features] diff --git a/benches/pst.rs b/benches/commitment.rs similarity index 55% rename from benches/pst.rs rename to benches/commitment.rs index a9b821a0..71f3676c 100644 --- a/benches/pst.rs +++ b/benches/commitment.rs @@ -1,14 +1,14 @@ use std::time::Instant; +use ark_bls12_377::Bls12_377; +use ark_ec::pairing::Pairing; use ark_poly_commit::multilinear_pc::MultilinearPC; use ark_serialize::CanonicalSerialize; +use ark_std::UniformRand; use libtestudo::{ parameters::PoseidonConfiguration, poseidon_transcript::PoseidonTranscript, sqrt_pst::Polynomial, }; use serde::Serialize; -type F = ark_bls12_377::Fr; -type E = ark_bls12_377::Bls12_377; -use ark_std::UniformRand; #[derive(Default, Clone, Serialize)] struct BenchmarkResults { @@ -18,22 +18,37 @@ struct BenchmarkResults { verification_time: u128, proof_size: usize, commiter_key_size: usize, + pst_commit: u128, + pst_opening: u128, + pst_verification: u128, + pst_proof_size: u128, } fn main() { - let params = ark_bls12_377::Fr::poseidon_params(); + testudo_commitment_benchmark::("testudo_commitment_bls12377.csv"); + testudo_commitment_benchmark::("testudo_commitment_bls12381.csv"); +} - let mut writer = csv::Writer::from_path("sqrt_pst.csv").expect("unable to open csv writer"); - for &s in [4, 5, 20, 27].iter() { +fn testudo_commitment_benchmark(fname: &str) +where + E::ScalarField: PoseidonConfiguration, +{ + let params = E::ScalarField::poseidon_params(); + let mut writer = csv::Writer::from_path(fname).expect("unable to open csv writer"); + for &s in [4, 5, 15, 20, 25].iter() { println!("Running for {} inputs", s); let mut rng = ark_std::test_rng(); let mut br = BenchmarkResults::default(); br.power = s; let num_vars = s; let len = 2_usize.pow(num_vars as u32); - let z: Vec = (0..len).into_iter().map(|_| F::rand(&mut rng)).collect(); - let r: Vec = (0..num_vars) + bench_pst::(num_vars, &mut br); + let z: Vec = (0..len) .into_iter() - .map(|_| F::rand(&mut rng)) + .map(|_| E::ScalarField::rand(&mut rng)) + .collect(); + let r: Vec = (0..num_vars) + .into_iter() + .map(|_| E::ScalarField::rand(&mut rng)) .collect(); let setup_vars = (num_vars as f32 / 2.0).ceil() as usize; @@ -96,3 +111,33 @@ fn main() { writer.flush().expect("wasn't able to flush"); } } + +fn bench_pst(num_vars: usize, res: &mut BenchmarkResults) { + use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; + use ark_poly_commit::multilinear_pc::MultilinearPC; + let params = MultilinearPC::::setup(num_vars, &mut rand::thread_rng()); + let (comkey, vkey) = MultilinearPC::trim(¶ms, num_vars); + let poly = DenseMultilinearExtension::rand(num_vars, &mut rand::thread_rng()); + + let start = Instant::now(); + let comm = MultilinearPC::commit(&comkey, &poly); + res.pst_commit = start.elapsed().as_millis(); + + let xs = (0..num_vars) + .map(|_| E::ScalarField::rand(&mut rand::thread_rng())) + .collect::>(); + let y = poly.evaluate(&xs).unwrap(); + let start = Instant::now(); + let proof = MultilinearPC::open(&comkey, &poly, &xs); + res.pst_opening = start.elapsed().as_millis(); + + let start = Instant::now(); + let check = MultilinearPC::check(&vkey, &comm, &xs, y, &proof); + res.pst_verification = start.elapsed().as_millis(); + + let mut b = Vec::new(); + proof.serialize_compressed(&mut b).unwrap(); + res.pst_proof_size = b.len() as u128; + + assert!(check); +} diff --git a/benches/groth16.rs b/benches/groth16.rs new file mode 100644 index 00000000..fe3408e4 --- /dev/null +++ b/benches/groth16.rs @@ -0,0 +1,86 @@ +use ark_ec::pairing::Pairing; +use ark_ff::PrimeField; +use ark_groth16::prepare_verifying_key; +use ark_groth16::Groth16; +use ark_r1cs_std::alloc::AllocVar; +use ark_r1cs_std::fields::fp::FpVar; +use ark_relations::r1cs::ConstraintSynthesizer; +use ark_relations::r1cs::ConstraintSystem; +use ark_std::marker::PhantomData; +use ark_std::time::Instant; +use serde::Serialize; +use std::ops::Mul; +#[derive(Default, Clone, Serialize)] +struct BenchmarkResults { + power: usize, + input_constraints: usize, + g16_proving_time: u128, +} + +fn main() { + let n = 10; + let nconstraints = (2_usize).pow(n as u32); + let mut res = BenchmarkResults::default(); + res.power = n; + res.input_constraints = nconstraints; + groth16_bench::(nconstraints, &mut res); + let mut writer = csv::Writer::from_path("groth16.csv").expect("unable to open csv writer"); + writer + .serialize(res) + .expect("unable to write results to csv"); + writer.flush().expect("wasn't able to flush"); +} +struct GrothCircuit { + n_constraints: usize, + _p: PhantomData, +} + +impl GrothCircuit { + pub fn new(n_constraints: usize) -> Self { + GrothCircuit { + n_constraints, + _p: PhantomData, + } + } +} + +impl ConstraintSynthesizer for GrothCircuit { + fn generate_constraints( + self, + cs: ark_relations::r1cs::ConstraintSystemRef, + ) -> ark_relations::r1cs::Result<()> { + let a = F::rand(&mut rand::thread_rng()); + let mut av = FpVar::new_witness(cs.clone(), || Ok(a))?; + for _ in 0..self.n_constraints { + let av = av.clone().mul(av.clone()); + } + Ok(()) + } +} +fn groth16_bench(n_constraints: usize, res: &mut BenchmarkResults) { + let params = { + let c = GrothCircuit::::new(n_constraints); + Groth16::::generate_random_parameters_with_reduction(c, &mut rand::thread_rng()).unwrap() + }; + let pvk = prepare_verifying_key(¶ms.vk); + println!("Running G16 proving for {} constraints", n_constraints); + let number_constraints = { + let circuit = GrothCircuit::::new(n_constraints); + let cs = ConstraintSystem::::new_ref(); + circuit.generate_constraints(cs.clone()).unwrap(); + cs.num_constraints() as u64 + }; + assert_eq!(number_constraints as usize, n_constraints); + let start = Instant::now(); + let proof = Groth16::::create_random_proof_with_reduction( + GrothCircuit::::new(n_constraints), + ¶ms, + &mut rand::thread_rng(), + ) + .expect("proof creation failed"); + let proving_time = start.elapsed().as_millis(); + res.g16_proving_time = proving_time; + + let r = Groth16::::verify_proof(&pvk, &proof, &[]).unwrap(); + assert!(r); +} diff --git a/benches/testudo.rs b/benches/testudo.rs index bd9cc75a..e949fa2f 100644 --- a/benches/testudo.rs +++ b/benches/testudo.rs @@ -1,9 +1,16 @@ +use std::marker::PhantomData; use std::time::Instant; use ark_crypto_primitives::sponge::poseidon::PoseidonConfig; use ark_crypto_primitives::sponge::Absorb; use ark_ec::pairing::Pairing; use ark_ff::PrimeField; +use ark_groth16::prepare_verifying_key; +use ark_groth16::Groth16; +use ark_r1cs_std::fields::fp::FpVar; +use ark_r1cs_std::prelude::AllocVar; +use ark_relations::r1cs::ConstraintSynthesizer; +use ark_relations::r1cs::ConstraintSystem; use ark_serialize::*; use libtestudo::parameters::PoseidonConfiguration; use libtestudo::{ @@ -12,6 +19,40 @@ use libtestudo::{ Instance, }; use serde::Serialize; +use std::ops::Mul; + +fn main() { + // bench_with_bls12_377(); + // bench_with_bls12_381(); + bench_with_ark_blst(); +} +struct GrothCircuit { + n_constraints: usize, + _p: PhantomData, +} + +impl GrothCircuit { + pub fn new(n_constraints: usize) -> Self { + GrothCircuit { + n_constraints, + _p: PhantomData, + } + } +} + +impl ConstraintSynthesizer for GrothCircuit { + fn generate_constraints( + self, + cs: ark_relations::r1cs::ConstraintSystemRef, + ) -> ark_relations::r1cs::Result<()> { + let a = F::rand(&mut rand::thread_rng()); + let mut av = FpVar::new_witness(cs.clone(), || Ok(a))?; + for _ in 0..self.n_constraints { + let av = av.clone().mul(av.clone()); + } + Ok(()) + } +} #[derive(Default, Clone, Serialize)] struct BenchmarkResults { @@ -22,38 +63,34 @@ struct BenchmarkResults { sat_proof_size: usize, eval_proof_size: usize, total_proof_size: usize, -} - -fn main() { - bench_with_bls12_377(); - // bench_with_bls12_381(); - // bench_with_ark_blst(); + g16_proving_time: u128, } fn bench_with_ark_blst() { let params = ark_blst::Scalar::poseidon_params(); - testudo_snark_bench::(params, "testudo_blst"); + testudo_snark_bench::(params, "testudo_blst", false); } fn bench_with_bls12_377() { let params = ark_bls12_377::Fr::poseidon_params(); - testudo_snark_bench::(params, "testudo_bls12_377"); + testudo_snark_bench::(params, "testudo_bls12_377", true); } fn bench_with_bls12_381() { let params = ark_bls12_381::Fr::poseidon_params(); - testudo_snark_bench::(params, "testudo_bls12_381"); + testudo_snark_bench::(params, "testudo_bls12_381", true); } -fn testudo_snark_bench(params: PoseidonConfig, file_name: &str) +fn testudo_snark_bench(params: PoseidonConfig, file_name: &str, verify: bool) where E: Pairing, E::ScalarField: PrimeField, E::ScalarField: Absorb, { let mut writer = csv::Writer::from_path(file_name).expect("unable to open csv writer"); - for &s in [4, 5, 10, 12, 14, 16, 18, 20, 22, 24, 26].iter() { - println!("Running for {} inputs", s); + for &s in [5, 10, 15, 20, 24].iter() { + //for &s in [4].iter() { + println!("Running for {} constraints", s); let mut br = BenchmarkResults::default(); let num_vars = (2_usize).pow(s as u32); let num_cons = num_vars; @@ -108,20 +145,51 @@ where let mut verifier_transcript = PoseidonTranscript::new(¶ms.clone()); let start = Instant::now(); - let res = proof.verify( - &gens, - &comm, - &inputs, - &mut verifier_transcript, - params.clone(), - ); - assert!(res.is_ok()); - let duration = start.elapsed().as_millis(); - br.testudo_verification_time = duration; - + if verify { + let res = proof.verify( + &gens, + &comm, + &inputs, + &mut verifier_transcript, + params.clone(), + ); + assert!(res.is_ok()); + let duration = start.elapsed().as_millis(); + br.testudo_verification_time = duration; + } + + groth16_bench::(num_cons, &mut br); writer .serialize(br) .expect("unable to write results to csv"); writer.flush().expect("wasn't able to flush"); } } + +fn groth16_bench(n_constraints: usize, res: &mut BenchmarkResults) { + let params = { + let c = GrothCircuit::::new(n_constraints); + Groth16::::generate_random_parameters_with_reduction(c, &mut rand::thread_rng()).unwrap() + }; + let pvk = prepare_verifying_key(¶ms.vk); + println!("Running G16 proving for {} constraints", n_constraints); + let number_constraints = { + let circuit = GrothCircuit::::new(n_constraints); + let cs = ConstraintSystem::::new_ref(); + circuit.generate_constraints(cs.clone()).unwrap(); + cs.num_constraints() as u64 + }; + assert_eq!(number_constraints as usize, n_constraints); + let start = Instant::now(); + let proof = Groth16::::create_random_proof_with_reduction( + GrothCircuit::::new(n_constraints), + ¶ms, + &mut rand::thread_rng(), + ) + .expect("proof creation failed"); + let proving_time = start.elapsed().as_millis(); + res.g16_proving_time = proving_time; + + let r = Groth16::::verify_proof(&pvk, &proof, &[]).unwrap(); + assert!(r); +} diff --git a/src/r1csproof.rs b/src/r1csproof.rs index 92538529..6a21e168 100644 --- a/src/r1csproof.rs +++ b/src/r1csproof.rs @@ -569,26 +569,27 @@ mod tests { #[test] fn check_r1cs_proof_ark_blst() { let params = ark_blst::Scalar::poseidon_params(); - check_r1cs_proof::(params); + check_r1cs_proof::(10, params); } #[test] fn check_r1cs_proof_bls12_377() { let params = ark_bls12_377::Fr::poseidon_params(); - check_r1cs_proof::(params); + check_r1cs_proof::(10, params); } #[test] fn check_r1cs_proof_bls12_381() { let params = ark_bls12_381::Fr::poseidon_params(); - check_r1cs_proof::(params); + check_r1cs_proof::(10, params); } - fn check_r1cs_proof

(params: PoseidonConfig) + fn check_r1cs_proof

(size: usize, params: PoseidonConfig) where P: Pairing, P::ScalarField: PrimeField, P::ScalarField: Absorb, { - let num_vars = 1024; + + let num_vars = (2_usize).pow(size as u32); let num_cons = num_vars; let num_inputs = 3; let (inst, vars, input) =