diff --git a/Cargo.lock b/Cargo.lock index 6658d5d184..c443639daf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -2979,6 +2979,7 @@ dependencies = [ "ark-poly", "ark-serialize", "arkworks", + "base64 0.21.7", "getrandom 0.2.15", "kimchi", "libc", diff --git a/plonk-napi/Cargo.toml b/plonk-napi/Cargo.toml index 74ee5691f1..c06aac8990 100644 --- a/plonk-napi/Cargo.toml +++ b/plonk-napi/Cargo.toml @@ -26,9 +26,10 @@ arkworks.workspace = true mina-curves = { path = "../curves" } mina-poseidon = { path = "../poseidon" } o1-utils = { path = "../utils" } -poly-commitment = { path = "../poly-commitment" } plonk_wasm.workspace = true +poly-commitment = { path = "../poly-commitment" } +base64.workspace = true getrandom.workspace = true kimchi.workspace = true libc.workspace = true diff --git a/plonk-napi/src/lib.rs b/plonk-napi/src/lib.rs index 8d520f3db4..c7be89aa28 100644 --- a/plonk-napi/src/lib.rs +++ b/plonk-napi/src/lib.rs @@ -1,6 +1,7 @@ mod build_info; mod circuit; mod gate_vector; +mod oracles; mod pasta_fp_plonk_index; mod pasta_fq_plonk_index; mod plonk_verifier_index; @@ -30,6 +31,10 @@ pub use gate_vector::{ NapiFpGateVector as WasmFpGateVector, NapiFqGate as WasmFqGate, NapiFqGateVector as WasmFqGateVector, }; +pub use oracles::{ + fp::{fp_oracles_create, fp_oracles_deep_copy, fp_oracles_dummy}, + fq::{fq_oracles_create, fq_oracles_deep_copy, fq_oracles_dummy}, +}; pub use pasta_fp_plonk_index::{ prover_index_fp_from_bytes, prover_index_fp_to_bytes, WasmPastaFpPlonkIndex, }; @@ -43,11 +48,25 @@ pub use poly_comm::{ pallas::NapiFqPolyComm as WasmFqPolyComm, vesta::NapiFpPolyComm as WasmFpPolyComm, }; pub use poseidon::{caml_pasta_fp_poseidon_block_cipher, caml_pasta_fq_poseidon_block_cipher}; -pub use proof::caml_pasta_fp_plonk_proof_create; +pub use proof::{ + fp::{ + caml_pasta_fp_plonk_proof_batch_verify, caml_pasta_fp_plonk_proof_create, + caml_pasta_fp_plonk_proof_deep_copy, caml_pasta_fp_plonk_proof_dummy, + caml_pasta_fp_plonk_proof_verify, NapiFpLookupCommitments, NapiFpOpeningProof, + NapiFpProofEvaluations, NapiFpProverCommitments, NapiFpProverProof, + }, + fq::{ + caml_pasta_fq_plonk_proof_batch_verify, caml_pasta_fq_plonk_proof_create, + caml_pasta_fq_plonk_proof_deep_copy, caml_pasta_fq_plonk_proof_dummy, + caml_pasta_fq_plonk_proof_verify, NapiFqLookupCommitments, NapiFqOpeningProof, + NapiFqProofEvaluations, NapiFqProverCommitments, NapiFqProverProof, + }, +}; pub use srs::{ caml_fp_srs_from_bytes, caml_fp_srs_from_bytes_external, caml_fp_srs_to_bytes, - caml_fq_srs_from_bytes, caml_fq_srs_from_bytes_external, caml_fq_srs_to_bytes, - fp::NapiFpSrs as WasmFpSrs, fq::NapiFqSrs as WasmFqSrs, *, + caml_fp_srs_to_bytes_external, caml_fq_srs_from_bytes, caml_fq_srs_from_bytes_external, + caml_fq_srs_to_bytes, caml_fq_srs_to_bytes_external, fp::NapiFpSrs as WasmFpSrs, + fq::NapiFqSrs as WasmFqSrs, *, }; pub use tables::{JsLookupTableFp, JsLookupTableFq, JsRuntimeTableCfgFp, JsRuntimeTableCfgFq}; pub use vector::{ diff --git a/plonk-napi/src/oracles.rs b/plonk-napi/src/oracles.rs new file mode 100644 index 0000000000..3c46822d03 --- /dev/null +++ b/plonk-napi/src/oracles.rs @@ -0,0 +1,331 @@ +use crate::{ + vector::{NapiFlatVector, NapiVector}, + wrappers::field::{NapiPastaFp, NapiPastaFq}, +}; +use ark_ff::{One, Zero}; +use kimchi::{ + circuits::scalars::RandomOracles, proof::ProverProof, + verifier_index::VerifierIndex as DlogVerifierIndex, +}; +use mina_poseidon::{ + self, + constants::PlonkSpongeConstantsKimchi, + sponge::{DefaultFqSponge, DefaultFrSponge}, + FqSponge, +}; +use napi::{bindgen_prelude::*, Error as NapiError, Status}; +use napi_derive::napi; +use paste::paste; +use poly_commitment::{ + commitment::{shift_scalar, PolyComm}, + ipa::OpeningProof, + SRS, +}; + +macro_rules! impl_oracles { + ($NapiF: ty, + $F: ty, + $NapiG: ty, + $G: ty, + $NapiPolyComm: ty, + $NapiProverProof: ty, + $index: ty, + $curve_params: ty, + $field_name: ident) => { + + paste! { + use mina_poseidon::sponge::ScalarChallenge; + + #[napi(js_name = [])] + #[derive(Clone, Copy)] + pub struct [] { + pub joint_combiner_chal: Option<$NapiF>, + pub joint_combiner: Option<$NapiF>, + pub beta: $NapiF, + pub gamma: $NapiF, + pub alpha_chal: $NapiF, + pub alpha: $NapiF, + pub zeta: $NapiF, + pub v: $NapiF, + pub u: $NapiF, + pub zeta_chal: $NapiF, + pub v_chal: $NapiF, + pub u_chal: $NapiF, + } + type NapiRandomOracles = []; + + #[napi] + impl [] { + #[napi(constructor)] + #[allow(clippy::too_many_arguments)] + pub fn new( + joint_combiner_chal: Option<$NapiF>, + joint_combiner: Option<$NapiF>, + beta: $NapiF, + gamma: $NapiF, + alpha_chal: $NapiF, + alpha: $NapiF, + zeta: $NapiF, + v: $NapiF, + u: $NapiF, + zeta_chal: $NapiF, + v_chal: $NapiF, + u_chal: $NapiF) -> Self { + Self { + joint_combiner_chal, + joint_combiner, + beta, + gamma, + alpha_chal, + alpha, + zeta, + v, + u, + zeta_chal, + v_chal, + u_chal, + } + } + } + + impl From> for NapiRandomOracles + { + fn from(ro: RandomOracles<$F>) -> Self { + Self { + joint_combiner_chal: ro.joint_combiner.as_ref().map(|x| x.0.0.into()), + joint_combiner: ro.joint_combiner.as_ref().map(|x| x.1.into()), + beta: ro.beta.into(), + gamma: ro.gamma.into(), + alpha_chal: ro.alpha_chal.0.into(), + alpha: ro.alpha.into(), + zeta: ro.zeta.into(), + v: ro.v.into(), + u: ro.u.into(), + zeta_chal: ro.zeta_chal.0.into(), + v_chal: ro.v_chal.0.into(), + u_chal: ro.u_chal.0.into(), + } + } + } + + impl From for RandomOracles<$F> + { + fn from(ro: NapiRandomOracles) -> Self { + Self { + joint_combiner: ro.joint_combiner_chal.and_then(|x| { + ro.joint_combiner.map(|y| (ScalarChallenge(x.into()), y.into())) + }), + beta: ro.beta.into(), + gamma: ro.gamma.into(), + alpha_chal: ScalarChallenge(ro.alpha_chal.into()), + alpha: ro.alpha.into(), + zeta: ro.zeta.into(), + v: ro.v.into(), + u: ro.u.into(), + zeta_chal: ScalarChallenge(ro.zeta_chal.into()), + v_chal: ScalarChallenge(ro.v_chal.into()), + u_chal: ScalarChallenge(ro.u_chal.into()), + } + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + impl<'a> ToNapiValue for &'a mut [] { + unsafe fn to_napi_value( + env: sys::napi_env, + val: Self, + ) -> Result { + <[] as ToNapiValue>::to_napi_value(env, val.clone()) + } + } + + #[napi(js_name = [])] + #[derive(Clone)] + pub struct [] { + pub o: [], + pub p_eval0: $NapiF, + pub p_eval1: $NapiF, + #[napi(skip)] + pub opening_prechallenges: NapiFlatVector<$NapiF>, + pub digest_before_evaluations: $NapiF, + } + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + o: NapiRandomOracles, + p_eval0: $NapiF, + p_eval1: $NapiF, + opening_prechallenges: NapiFlatVector<$NapiF>, + digest_before_evaluations: $NapiF) -> Self { + Self {o, p_eval0, p_eval1, opening_prechallenges, digest_before_evaluations} + } + + #[napi(getter, js_name="opening_prechallenges")] + pub fn opening_prechallenges(&self) -> NapiFlatVector<$NapiF> { + self.opening_prechallenges.clone() + } + + #[napi(setter, js_name="set_opening_prechallenges")] + pub fn set_opening_prechallenges(&mut self, x: NapiFlatVector<$NapiF>) { + self.opening_prechallenges = x; + } + } + + #[napi(js_name = [<$F:snake _oracles_create>])] + pub fn [<$F:snake _oracles_create>]( + lgr_comm: NapiVector<$NapiPolyComm>, // the bases to commit polynomials + index: $index, // parameters + proof: $NapiProverProof, // the final proof (contains public elements at the beginning) + ) -> Result<[]> { + // conversions + let result: Result<(RandomOracles<$F>, [Vec<$F>; 2], NapiFlatVector<$NapiF>, $F), String> = { + let index: DlogVerifierIndex<$G, OpeningProof<$G>> = index.into(); + + let lgr_comm: Vec> = lgr_comm + .into_iter() + .take(proof.public.len()) + .map(Into::into) + .collect(); + let lgr_comm_refs: Vec<_> = lgr_comm.iter().collect(); + + let p_comm = PolyComm::<$G>::multi_scalar_mul( + &lgr_comm_refs, + &proof + .public + .iter() + .map(|a| a.clone().into()) + .map(|s: $F| -s) + .collect::>(), + ); + let p_comm = { + index + .srs() + .mask_custom( + p_comm.clone(), + &p_comm.map(|_| $F::one()), + ) + .unwrap() + .commitment + }; + + let (proof, public_input): (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) = proof.into(); + + let oracles_result = + proof.oracles::< + DefaultFqSponge<$curve_params, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<$F, PlonkSpongeConstantsKimchi> + >(&index, &p_comm, Some(&public_input)); + let oracles_result = match oracles_result { + Err(e) => { + return Err(NapiError::new(Status::GenericFailure, format!("oracles_create: {}", e))); + } + Ok(cs) => cs, + }; + + let (mut sponge, combined_inner_product, p_eval, digest, oracles) = ( + oracles_result.fq_sponge, + oracles_result.combined_inner_product, + oracles_result.public_evals, + oracles_result.digest, + oracles_result.oracles, + ); + + sponge.absorb_fr(&[shift_scalar::<$G>(combined_inner_product)]); + + let opening_prechallenges = proof + .proof + .prechallenges(&mut sponge) + .into_iter() + .map(|x| x.0.into()) + .collect(); + + Ok((oracles, p_eval, opening_prechallenges, digest)) + }; + + match result { + Ok((oracles, p_eval, opening_prechallenges, digest)) => Ok([] { + o: oracles.into(), + p_eval0: p_eval[0][0].into(), + p_eval1: p_eval[1][0].into(), + opening_prechallenges, + digest_before_evaluations: digest.into() + }), + Err(err) => Err(NapiError::new(Status::GenericFailure, err)), + } + } + + #[napi(js_name = [<$F:snake _oracles_dummy>])] + pub fn [<$F:snake _oracles_dummy>]() -> [] { + [] { + o: RandomOracles::<$F>::default().into(), + p_eval0: $F::zero().into(), + p_eval1: $F::zero().into(), + opening_prechallenges: vec![].into(), + digest_before_evaluations: $F::zero().into(), + } + } + + #[napi(js_name = [<$F:snake _oracles_deep_copy>])] + pub fn [<$F:snake _oracles_deep_copy>]( + x: $NapiProverProof, + ) -> $NapiProverProof { + x + } + } + } +} + +pub mod fp { + use super::*; + use crate::{ + plonk_verifier_index::fp::NapiFpPlonkVerifierIndex as WasmPlonkVerifierIndex, + poly_comm::vesta::NapiFpPolyComm as WasmPolyComm, + proof::fp::NapiFpProverProof as WasmProverProof, + }; + use mina_curves::pasta::{Fp, Vesta as GAffine, VestaParameters}; + + impl_oracles!( + NapiPastaFp, + Fp, + WasmGVesta, + GAffine, + WasmPolyComm, + WasmProverProof, + WasmPlonkVerifierIndex, + VestaParameters, + Fp + ); +} + +pub mod fq { + use super::*; + use crate::{ + plonk_verifier_index::fq::NapiFqPlonkVerifierIndex as WasmPlonkVerifierIndex, + poly_comm::pallas::NapiFqPolyComm as WasmPolyComm, + proof::fq::NapiFqProverProof as WasmProverProof, + }; + use mina_curves::pasta::{Fq, Pallas as GAffine, PallasParameters}; + + impl_oracles!( + NapiPastaFq, + Fq, + WasmGPallas, + GAffine, + WasmPolyComm, + WasmProverProof, + WasmPlonkVerifierIndex, + PallasParameters, + Fq + ); +} diff --git a/plonk-napi/src/plonk_verifier_index/fp.rs b/plonk-napi/src/plonk_verifier_index/fp.rs index 6832d47e28..a39acbad91 100644 --- a/plonk-napi/src/plonk_verifier_index/fp.rs +++ b/plonk-napi/src/plonk_verifier_index/fp.rs @@ -16,15 +16,12 @@ use kimchi::{ }, }, linearization::expr_linearization, - verifier_index::{LookupVerifierIndex, VerifierIndex as DlogVerifierIndex}, + verifier_index::{LookupVerifierIndex, VerifierIndex}, }; use mina_curves::pasta::{Fp, Pallas as GAffineOther, Vesta as GAffine}; use napi::bindgen_prelude::{Error, Status}; use napi_derive::napi; -use poly_commitment::{ - commitment::PolyComm, - ipa::{OpeningProof, SRS}, -}; +use poly_commitment::{commitment::PolyComm, ipa::OpeningProof}; use serde::{Deserialize, Serialize}; use std::sync::Arc; @@ -35,6 +32,13 @@ pub struct NapiFpDomain { pub group_gen: NapiPastaFp, } +impl From for Domain { + fn from(domain: NapiFpDomain) -> Self { + let size = 1 << domain.log_size_of_group; + Domain::::new(size).expect("Failed to create evaluation domain") + } +} + #[napi(object, js_name = "WasmFpShifts")] #[derive(Clone, Debug, Serialize, Deserialize, Default)] pub struct NapiFpShifts { @@ -223,7 +227,7 @@ pub fn caml_pasta_fp_plonk_verifier_index_shifts( }) } -impl From for DlogVerifierIndex> { +impl From for VerifierIndex> { fn from(index: NapiFpPlonkVerifierIndex) -> Self { let max_poly_size = index.max_poly_size; let public_ = index.public_; @@ -242,7 +246,7 @@ impl From for DlogVerifierIndex for DlogVerifierIndex> { +impl From for VerifierIndex> { fn from(index: NapiFqPlonkVerifierIndex) -> Self { let max_poly_size = index.max_poly_size; let public_ = index.public_; @@ -242,7 +239,7 @@ impl From for DlogVerifierIndex>, - pub public_input: Vec, -} +macro_rules! impl_proof { + ( + $NapiG: ty, + $G: ty, + $NapiF: ty, + $F: ty, + $NapiPolyComm: ty, + $NapiSrs: ty, + $NapiIndex: ty, + $NapiVerifierIndex: ty, + $field_name: ident + ) => { + paste! { + type NapiVecVecF = []; -#[napi] -pub fn caml_pasta_fp_plonk_proof_create( - index: &External, - witness: WasmVecVecFp, - runtime_tables: NapiVector, - prev_challenges: NapiFlatVector, - prev_sgs: NapiVector, -) -> Result> { - let (maybe_proof, public_input) = { - index - .0 - .srs - .as_ref() - .get_lagrange_basis(index.0.as_ref().cs.domain.d1); - let prev: Vec> = { - if prev_challenges.is_empty() { - Vec::new() - } else { - let challenges_per_sg = prev_challenges.len() / prev_sgs.len(); - let d = prev_sgs - .into_iter() - .map(Into::::into) - .enumerate() - .map(|(i, sg)| { - let chals = prev_challenges - [(i * challenges_per_sg)..(i + 1) * challenges_per_sg] - .iter() - .cloned() - .map(Into::into) - .collect(); - let comm = PolyComm:: { chunks: vec![sg] }; - RecursionChallenge { chals, comm } - }) - .collect(); - d + #[napi(js_name = [])] + #[derive(Clone)] + pub struct []( + ProofEvaluations>> + ); + + type NapiProofEvaluations = []; + + impl From for ProofEvaluations>> { + fn from(x: NapiProofEvaluations) -> Self { + x.0 + } + } + + impl From>>> for NapiProofEvaluations { + fn from(x: ProofEvaluations>>) -> Self { + Self(x) + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + #[napi(js_name = [])] + #[derive(Clone)] + pub struct [] + { + #[napi(skip)] + pub sorted: NapiVector<$NapiPolyComm>, + #[napi(skip)] + pub aggreg: $NapiPolyComm, + #[napi(skip)] + pub runtime: Option<$NapiPolyComm>, + } + + type NapiLookupCommitments = []; + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + sorted: NapiVector<$NapiPolyComm>, + aggreg: $NapiPolyComm, + runtime: Option<$NapiPolyComm>) -> Self { + NapiLookupCommitments { sorted, aggreg, runtime } + } + + #[napi(getter)] + pub fn sorted(&self) -> NapiVector<$NapiPolyComm> { + self.sorted.clone() + } + + #[napi(getter)] + pub fn aggreg(&self) -> $NapiPolyComm { + self.aggreg.clone() + } + + #[napi(getter)] + pub fn runtime(&self) -> Option<$NapiPolyComm> { + self.runtime.clone() + } + + #[napi(setter, js_name="set_sorted")] + pub fn set_sorted(&mut self, s: NapiVector<$NapiPolyComm>) { + self.sorted = s + } + + #[napi(setter, js_name="set_aggreg")] + pub fn set_aggreg(&mut self, a: $NapiPolyComm) { + self.aggreg = a + } + + #[napi(setter, js_name="set_runtime")] + pub fn set_runtime(&mut self, r: Option<$NapiPolyComm>) { + self.runtime = r + } + } + + impl From> for NapiLookupCommitments { + fn from(x: LookupCommitments<$G>) -> Self { + NapiLookupCommitments { + sorted: x.sorted.into_iter().map(Into::into).collect(), + aggreg: x.aggreg.into(), + runtime: x.runtime.map(Into::into) + } + } + } + + impl From for LookupCommitments<$G> { + fn from(x: NapiLookupCommitments) -> Self { + LookupCommitments { + sorted: x.sorted.into_iter().map(Into::into).collect(), + aggreg: x.aggreg.into(), + runtime: x.runtime.map(Into::into) + } + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + #[napi(js_name = [])] + #[derive(Clone)] + pub struct [] + { + #[napi(skip)] + pub w_comm: NapiVector<$NapiPolyComm>, + #[napi(skip)] + pub z_comm: $NapiPolyComm, + #[napi(skip)] + pub t_comm: $NapiPolyComm, + #[napi(skip)] + pub lookup: Option, + } + + type NapiProverCommitments = []; + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + w_comm: NapiVector<$NapiPolyComm>, + z_comm: $NapiPolyComm, + t_comm: $NapiPolyComm, + lookup: Option + ) -> Self { + NapiProverCommitments { w_comm, z_comm, t_comm, lookup } + } + + #[napi(getter, js_name="w_comm")] + pub fn w_comm(&self) -> NapiVector<$NapiPolyComm> { + self.w_comm.clone() + } + #[napi(getter, js_name="z_comm")] + pub fn z_comm(&self) -> $NapiPolyComm { + self.z_comm.clone() + } + #[napi(getter, js_name="t_comm")] + pub fn t_comm(&self) -> $NapiPolyComm { + self.t_comm.clone() + } + + #[napi(getter)] + pub fn lookup(&self) -> Option { + self.lookup.clone() + } + + #[napi(setter, js_name="set_w_comm")] + pub fn set_w_comm(&mut self, x: NapiVector<$NapiPolyComm>) { + self.w_comm = x + } + #[napi(setter, js_name="set_z_comm")] + pub fn set_z_comm(&mut self, x: $NapiPolyComm) { + self.z_comm = x + } + #[napi(setter, js_name="set_t_comm")] + pub fn set_t_comm(&mut self, x: $NapiPolyComm) { + self.t_comm = x + } + + #[napi(setter, js_name="set_lookup")] + pub fn set_lookup(&mut self, l: Option) { + self.lookup = l + } + } + + impl From> for NapiProverCommitments { + fn from(x: ProverCommitments<$G>) -> Self { + NapiProverCommitments { + w_comm: x.w_comm.iter().map(Into::into).collect(), + z_comm: x.z_comm.into(), + t_comm: x.t_comm.into(), + lookup: x.lookup.map(Into::into), + } + } + } + + impl From for ProverCommitments<$G> { + fn from(x: NapiProverCommitments) -> Self { + ProverCommitments { + w_comm: core::array::from_fn(|i| (&x.w_comm[i]).into()), + z_comm: x.z_comm.into(), + t_comm: x.t_comm.into(), + lookup: x.lookup.map(Into::into), + } + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + #[napi(js_name = [] )] + #[derive(Clone, Debug)] + pub struct [] { + #[napi(skip)] + pub lr_0: NapiVector<$NapiG>, // vector of rounds of L commitments + #[napi(skip)] + pub lr_1: NapiVector<$NapiG>, // vector of rounds of R commitments + #[napi(skip)] + pub delta: $NapiG, + pub z1: $NapiF, + pub z2: $NapiF, + #[napi(skip)] + pub sg: $NapiG, + } + + type NapiOpeningProof = []; + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + lr_0: NapiVector<$NapiG>, + lr_1: NapiVector<$NapiG>, + delta: $NapiG, + z1: $NapiF, + z2: $NapiF, + sg: $NapiG) -> Self { + NapiOpeningProof { lr_0, lr_1, delta, z1, z2, sg } + } + + #[napi(getter, js_name="lr_0")] + pub fn lr_0(&self) -> NapiVector<$NapiG> { + self.lr_0.clone() + } + #[napi(getter, js_name="lr_1")] + pub fn lr_1(&self) -> NapiVector<$NapiG> { + self.lr_1.clone() + } + #[napi(getter)] + pub fn delta(&self) -> $NapiG { + self.delta.clone() + } + #[napi(getter)] + pub fn sg(&self) -> $NapiG { + self.sg.clone() + } + + #[napi(setter, js_name="set_lr_0")] + pub fn set_lr_0(&mut self, lr_0: NapiVector<$NapiG>) { + self.lr_0 = lr_0 + } + #[napi(setter, js_name="set_lr_1")] + pub fn set_lr_1(&mut self, lr_1: NapiVector<$NapiG>) { + self.lr_1 = lr_1 + } + #[napi(setter, js_name="set_delta")] + pub fn set_delta(&mut self, delta: $NapiG) { + self.delta = delta + } + #[napi(setter, js_name="set_sg")] + pub fn set_sg(&mut self, sg: $NapiG) { + self.sg = sg + } + } + + impl From for OpeningProof<$G> { + fn from(x: NapiOpeningProof) -> Self { + let NapiOpeningProof {lr_0, lr_1, delta, z1, z2, sg} = x; + OpeningProof { + lr: lr_0.into_iter().zip(lr_1.into_iter()).map(|(x, y)| (x.into(), y.into())).collect(), + delta: delta.into(), + z1: z1.into(), + z2: z2.into(), + sg: sg.into(), + } + } + } + + impl From> for NapiOpeningProof { + fn from(x: OpeningProof<$G>) -> Self { + let (lr_0, lr_1) = x.lr.clone().into_iter().map(|(x, y)| (x.into(), y.into())).unzip(); + NapiOpeningProof { + lr_0, + lr_1, + delta: x.delta.clone().into(), + z1: x.z1.into(), + z2: x.z2.into(), + sg: x.sg.clone().into(), + } + } } - }; - let rust_runtime_tables: Vec> = runtime_tables - .into_iter() - .flat_map(|table| { - let JsRuntimeTableFp { id, data } = table; - data.into_iter().map(move |column| { - let values = NapiFlatVector::::from_bytes(column.to_vec()) + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + #[napi(js_name = [])] + #[derive(Clone)] + pub struct [] { + #[napi(skip)] + pub commitments: NapiProverCommitments, + #[napi(skip)] + pub proof: NapiOpeningProof, + // OCaml doesn't have sized arrays, so we have to convert to a tuple.. + #[napi(skip)] + pub evals: NapiProofEvaluations, + pub ft_eval1: $NapiF, + #[napi(skip)] + pub public: NapiFlatVector<$NapiF>, + #[napi(skip)] + pub prev_challenges_scalars: Vec>, + #[napi(skip)] + pub prev_challenges_comms:NapiVector<$NapiPolyComm>, + } + + type NapiProverProof = []; + + impl From<&NapiProverProof> for (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) { + fn from(x: &NapiProverProof) -> Self { + let proof = ProverProof { + commitments: x.commitments.clone().into(), + proof: x.proof.clone().into(), + evals: x.evals.clone().into(), + prev_challenges: + (&x.prev_challenges_scalars) + .into_iter() + .zip((&x.prev_challenges_comms).into_iter()) + .map(|(chals, comm)| { + RecursionChallenge { + chals: chals.clone(), + comm: comm.into(), + } + }) + .collect(), + ft_eval1: x.ft_eval1.clone().into() + }; + let public = x.public.clone().into_iter().map(Into::into).collect(); + (proof, public) + } + } + + impl From for (ProverProof<$G, OpeningProof<$G>>, Vec<$F>) { + fn from(x: NapiProverProof) -> Self { + let proof = ProverProof { + commitments: x.commitments.into(), + proof: x.proof.into(), + evals: x.evals.into(), + prev_challenges: + (x.prev_challenges_scalars) + .into_iter() + .zip((x.prev_challenges_comms).into_iter()) + .map(|(chals, comm)| { + RecursionChallenge { + chals: chals.into(), + comm: comm.into(), + } + }) + .collect(), + ft_eval1: x.ft_eval1.into() + }; + let public = x.public.into_iter().map(Into::into).collect(); + (proof, public) + } + } + + impl FromNapiValue for [] { + unsafe fn from_napi_value( + env: sys::napi_env, + napi_val: sys::napi_value, + ) -> Result { + let instance = ]> as FromNapiValue>::from_napi_value(env, napi_val)?; + Ok((*instance).clone()) + } + } + + #[napi] + impl [] { + #[napi(constructor)] + pub fn new( + commitments: NapiProverCommitments, // maybe remove FromNapiValue trait implementation and wrap it in External instead + proof: NapiOpeningProof, + evals: NapiProofEvaluations, // maybe remove FromNapiValue trait implementation and wrap it in External instead + ft_eval1: $NapiF, + public_: NapiFlatVector<$NapiF>, + prev_challenges_scalars: NapiVecVecF, + prev_challenges_comms: NapiVector<$NapiPolyComm>) -> Self { + NapiProverProof { + commitments, + proof, + evals, + ft_eval1, + public: public_, + prev_challenges_scalars: prev_challenges_scalars.0, + prev_challenges_comms, + } + } + + #[napi(getter)] + pub fn commitments(&self) -> NapiProverCommitments { + self.commitments.clone() + } + #[napi(getter)] + pub fn proof(&self) -> NapiOpeningProof { + self.proof.clone() + } + #[napi(getter)] + pub fn evals(&self) -> NapiProofEvaluations { + self.evals.clone() + } + #[napi(getter, js_name="public_")] + pub fn public_(&self) -> NapiFlatVector<$NapiF> { + self.public.clone() + } + #[napi(getter, js_name="prev_challenges_scalars")] + pub fn prev_challenges_scalars(&self) -> NapiVecVecF { + [](self.prev_challenges_scalars.clone()) + } + #[napi(getter, js_name="prev_challenges_comms")] + pub fn prev_challenges_comms(&self) -> NapiVector<$NapiPolyComm> { + self.prev_challenges_comms.clone() + } + + #[napi(setter, js_name="set_commitments")] + pub fn set_commitments(&mut self, commitments: NapiProverCommitments) { + self.commitments = commitments + } + #[napi(setter, js_name="set_proof")] + pub fn set_proof(&mut self, proof: NapiOpeningProof) { + self.proof = proof + } + #[napi(setter, js_name="set_evals")] + pub fn set_evals(&mut self, evals: NapiProofEvaluations) { + self.evals = evals + } + #[napi(setter, js_name="set_public_")] + pub fn set_public_(&mut self, public_: NapiFlatVector<$NapiF>) { + self.public = public_ + } + #[napi(setter, js_name="set_prev_challenges_scalars")] + pub fn set_prev_challenges_scalars(&mut self, prev_challenges_scalars: NapiVecVecF) { + self.prev_challenges_scalars = prev_challenges_scalars.0 + } + #[napi(setter, js_name="set_prev_challenges_comms")] + pub fn set_prev_challenges_comms(&mut self, prev_challenges_comms: NapiVector<$NapiPolyComm>) { + self.prev_challenges_comms = prev_challenges_comms + } + + #[napi] + #[allow(deprecated)] + pub fn serialize(&self) -> String { + let (proof, _public_input) = self.into(); + let serialized = rmp_serde::to_vec(&proof).unwrap(); + // Deprecated used on purpose: updating this leads to a bug in o1js + base64::encode(serialized) + } + } + + #[derive(Clone)] + pub struct [] { + pub proof: ProverProof<$G, OpeningProof<$G>>, + pub public_input: Vec<$F>, + } + + type NapiProofF = []; + type JsRuntimeTableF = []; + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_create">])] + pub fn []( + index: &External<$NapiIndex>, + witness: NapiVecVecF, + runtime_tables: NapiVector, + prev_challenges: NapiFlatVector<$NapiF>, + prev_sgs: NapiVector<$NapiG>, + ) -> Result> { + let (maybe_proof, public_input) = { + index + .0 + .srs + .as_ref() + .get_lagrange_basis(index.0.as_ref().cs.domain.d1); + let prev: Vec> = { + if prev_challenges.is_empty() { + Vec::new() + } else { + let challenges_per_sg = prev_challenges.len() / prev_sgs.len(); + let d = prev_sgs + .into_iter() + .map(Into::<$G>::into) + .enumerate() + .map(|(i, sg)| { + let chals = prev_challenges + [(i * challenges_per_sg)..(i + 1) * challenges_per_sg] + .iter() + .cloned() + .map(Into::into) + .collect(); + let comm = PolyComm::<$G> { chunks: vec![sg] }; + RecursionChallenge { chals, comm } + }) + .collect(); + d + } + }; + + let rust_runtime_tables: Vec> = runtime_tables .into_iter() - .map(Into::into) + .flat_map(|table| { + let JsRuntimeTableF { id, data } = table; + data.into_iter().map(move |column| { + let values = NapiFlatVector::<$NapiF>::from_bytes(column.to_vec()) + .into_iter() + .map(Into::into) + .collect(); + RuntimeTable { id, data: values } + }) + }) .collect(); - RuntimeTable { id, data: values } - }) - }) - .collect(); - - let witness: [Vec<_>; COLUMNS] = witness - .0 - .try_into() - .expect("the witness should be a column of 15 vectors"); - - let index: &ProverIndex> = &index.0.as_ref(); - - let public_input = witness[0][0..index.cs.public].to_vec(); - - // Release the runtime lock so that other threads can run using it while we generate the proof. - let group_map = GroupMap::<_>::setup(); - let maybe_proof = ProverProof::create_recursive::< - DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, - DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, - _, - >( - &group_map, - witness, - &rust_runtime_tables, - index, - prev, - None, - &mut rand::rngs::OsRng, - ); - (maybe_proof, public_input) + + let witness: [Vec<_>; COLUMNS] = witness + .0 + .try_into() + .expect("the witness should be a column of 15 vectors"); + + let index: &ProverIndex<$G, OpeningProof<$G>> = &index.0.as_ref(); + + let public_input = witness[0][0..index.cs.public].to_vec(); + + // Release the runtime lock so that other threads can run using it while we generate the proof. + let group_map = GroupMap::<_>::setup(); + let maybe_proof = ProverProof::create_recursive::< + DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + _, + >( + &group_map, + witness, + &rust_runtime_tables, + index, + prev, + None, + &mut rand::rngs::OsRng, + ); + (maybe_proof, public_input) + }; + + match maybe_proof { + Ok(proof) => Ok(External::new([] { + proof, + public_input, + })), + Err(err) => Err(NapiError::new(Status::GenericFailure, err.to_string())), + } + } + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_verify">])] + pub fn []( + index: $NapiVerifierIndex, + proof: &External, + ) -> bool { + let group_map = <$G as CommitmentCurve>::Map::setup(); + let verifier_index = &index.into(); + let (proof, public_input) = (&proof.as_ref().proof, &proof.as_ref().public_input); + batch_verify::< + $G, + DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + OpeningProof<$G> + >( + &group_map, + &[Context { verifier_index, proof, public_input }] + ).is_ok() + } + + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_batch_verify">])] + pub fn []( + indexes: NapiVector<$NapiVerifierIndex>, + proofs: &External>, + ) -> bool { + let indexes: Vec<_> = indexes.into_iter().map(Into::into).collect(); + let proofs_ref = proofs.as_ref(); + + if indexes.len() != proofs_ref.len() { + return false; + } + + let contexts: Vec<_> = indexes + .iter() + .zip(proofs_ref.iter()) + .map(|(index, proof)| Context { + verifier_index: index, + proof: &proof.proof, + public_input: &proof.public_input, + }) + .collect(); + + let group_map = GroupMap::<_>::setup(); + + batch_verify::< + $G, + DefaultFqSponge<_, PlonkSpongeConstantsKimchi>, + DefaultFrSponge<_, PlonkSpongeConstantsKimchi>, + OpeningProof<$G> + >(&group_map, &contexts) + .is_ok() + } + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_dummy">])] + pub fn []() -> External { + fn comm() -> PolyComm<$G> { + let g = $G::generator(); + PolyComm { + chunks: vec![g, g, g], + } + } + + let prev = RecursionChallenge { + chals: vec![$F::one(), $F::one()], + comm: comm(), + }; + let prev_challenges = vec![prev.clone(), prev.clone(), prev.clone()]; + + let g = $G::generator(); + let proof = OpeningProof { + lr: vec![(g, g), (g, g), (g, g)], + z1: $F::one(), + z2: $F::one(), + delta: g, + sg: g, + }; + let eval = || PointEvaluations { + zeta: vec![$F::one()], + zeta_omega: vec![$F::one()], + }; + let evals = ProofEvaluations { + w: core::array::from_fn(|_| eval()), + coefficients: core::array::from_fn(|_| eval()), + z: eval(), + s: core::array::from_fn(|_| eval()), + generic_selector: eval(), + poseidon_selector: eval(), + complete_add_selector: eval(), + mul_selector: eval(), + emul_selector: eval(), + endomul_scalar_selector: eval(), + range_check0_selector: None, + range_check1_selector: None, + foreign_field_add_selector: None, + foreign_field_mul_selector: None, + xor_selector: None, + rot_selector: None, + lookup_aggregation: None, + lookup_table: None, + lookup_sorted: array::from_fn(|_| None), + runtime_lookup_table: None, + runtime_lookup_table_selector: None, + xor_lookup_selector: None, + lookup_gate_lookup_selector: None, + range_check_lookup_selector: None, + foreign_field_mul_lookup_selector: None, + public: None, + }; + + let dlogproof = ProverProof { + commitments: ProverCommitments { + w_comm: core::array::from_fn(|_| comm()), + z_comm: comm(), + t_comm: comm(), + lookup: None, + }, + proof, + evals, + ft_eval1: $F::one(), + prev_challenges, + }; + + let public = vec![$F::one(), $F::one()]; + External::new(NapiProofF{proof: dlogproof, public_input: public}) + } + + #[napi(js_name = [<"caml_pasta_" $field_name:snake "_plonk_proof_deep_copy">])] + pub fn []( + x: &External + ) -> External { + External::new(x.as_ref().clone()) + } + } + }; +} + +pub mod fp { + use super::*; + use crate::{ + pasta_fp_plonk_index::WasmPastaFpPlonkIndex as NapiPastaFpPlonkIndex, + poly_comm::vesta::NapiFpPolyComm, + wrappers::{field::NapiPastaFp, group::NapiGVesta}, + NapiFpPlonkVerifierIndex, + }; + use mina_curves::pasta::{Fp, Vesta}; + + impl_proof!( + NapiGVesta, + Vesta, + NapiPastaFp, + Fp, + NapiFpPolyComm, + NapiSrs, + NapiPastaFpPlonkIndex, + NapiFpPlonkVerifierIndex, + Fp + ); +} + +pub mod fq { + use super::*; + use crate::{ + pasta_fq_plonk_index::WasmPastaFqPlonkIndex as NapiPastaFqPlonkIndex, + poly_comm::pallas::NapiFqPolyComm, + wrappers::{field::NapiPastaFq, group::NapiGPallas}, + NapiFqPlonkVerifierIndex, }; + use mina_curves::pasta::{Fq, Pallas}; - match maybe_proof { - Ok(proof) => Ok(External::new(Proof { - proof, - public_input, - })), - Err(err) => Err(NapiError::new(Status::GenericFailure, err.to_string())), - } + impl_proof!( + NapiGPallas, + Pallas, + NapiPastaFq, + Fq, + NapiFqPolyComm, + NapiSrs, + NapiPastaFqPlonkIndex, + NapiFqPlonkVerifierIndex, + Fq + ); } diff --git a/plonk-napi/src/srs.rs b/plonk-napi/src/srs.rs index dcfe743447..3981d2e040 100644 --- a/plonk-napi/src/srs.rs +++ b/plonk-napi/src/srs.rs @@ -109,15 +109,15 @@ macro_rules! impl_srs { } #[napi(js_name = [<"caml_" $name:snake "_srs_create">])] - pub fn [](depth: i32) -> External<[]> { + pub fn [](depth: i32) -> [] { println!("Creating SRS with napi"); - External::new(Arc::new(SRS::<$G>::create(depth as usize)).into()) + Arc::new(SRS::<$G>::create(depth as usize)).into() } #[napi(js_name = [<"caml_" $name:snake "_srs_create_parallel">])] - pub fn [](depth: i32) -> External<[]> { + pub fn [](depth: i32) -> [] { println!("Creating SRS in parallel with napi"); - External::new(Arc::new(SRS::<$G>::create_parallel(depth as usize)).into()) + Arc::new(SRS::<$G>::create_parallel(depth as usize)).into() } #[napi(js_name = [<"caml_" $name:snake "_srs_add_lagrange_basis">])] @@ -164,6 +164,32 @@ macro_rules! impl_srs { } } + #[napi(js_name = [<"caml_" $name:snake "_srs_lagrange_commitment">])] + pub fn []( + srs: &[], + domain_size: i32, + i: i32, + ) -> Result<[<$NapiPolyComm>]> { + let x_domain = EvaluationDomain::<$F>::new(domain_size as usize) + .ok_or_else(invalid_domain_error)?; + let basis = srs.get_lagrange_basis(x_domain); + Ok(basis[i as usize].clone().into()) + } + + // Fake overwrite of the plonk_wasm equivalent, but without pointers. + // In the srs bindings, the same symbol will be used to either provide + // the pointer for wasm, or the actual data for napi + #[napi(js_name = [<"caml_" $name:snake "_srs_lagrange_commitments_whole_domain_ptr">])] + pub fn []( + srs: &External<[]>, + domain_size: i32, + ) -> Result> { + let domain = EvaluationDomain::<$F>::new(domain_size as usize) + .ok_or_else(invalid_domain_error)?; + let basis = srs.0.get_lagrange_basis(domain); + Ok(basis.iter().cloned().map(Into::into).collect()) + } + #[napi(js_name = [<"caml_" $name:snake "_srs_get">])] pub fn [](srs: &External<[]>) -> Vec<$NapiG> { println!("Getting SRS with napi"); @@ -173,14 +199,14 @@ macro_rules! impl_srs { } #[napi(js_name = [<"caml_" $name:snake "_srs_set">])] - pub fn [](h_and_gs: Vec<$NapiG>) -> External<[]> { + pub fn [](h_and_gs: Vec<$NapiG>) -> [] { println!("Setting SRS with napi"); let mut h_and_gs: Vec<$G> = h_and_gs.into_iter().map(Into::into).collect(); let h = h_and_gs.remove(0); let g = h_and_gs; let srs = SRS::<$G> { h, g, lagrange_bases: HashMapCache::new() }; - External::new(Arc::new(srs).into()) + Arc::new(srs).into() } #[napi(js_name = [<"caml_" $name:snake "_srs_maybe_lagrange_commitment">])] @@ -223,15 +249,6 @@ macro_rules! impl_srs { Ok(basis.iter().cloned().map(Into::into).collect()) } - #[napi(js_name = [<"caml_" $name:snake "_srs_to_bytes">])] - pub fn [](srs: &[]) -> Result { - srs.serialize() - } - - #[napi(js_name = [<"caml_" $name:snake "_srs_from_bytes">])] - pub fn [](bytes: Uint8Array) -> Result<[]> { - []::deserialize(bytes) - } #[napi(js_name = [<"caml_" $name:snake "_srs_commit_evaluations">])] pub fn [](srs: &[], @@ -322,6 +339,11 @@ pub fn caml_fp_srs_to_bytes(srs: &fp::NapiFpSrs) -> Result { srs.serialize() } +#[napi(js_name = "caml_fp_srs_to_bytes_external")] +pub fn caml_fp_srs_to_bytes_external(srs: &External) -> Uint8Array { + caml_fp_srs_to_bytes(srs).expect("failed to serialize external fp srs") +} + #[napi(js_name = "caml_fp_srs_from_bytes")] pub fn caml_fp_srs_from_bytes(bytes: Uint8Array) -> Result { fp::NapiFpSrs::deserialize(bytes) @@ -338,6 +360,11 @@ pub fn caml_fq_srs_to_bytes(srs: &fq::NapiFqSrs) -> Result { srs.serialize() } +#[napi(js_name = "caml_fq_srs_to_bytes_external")] +pub fn caml_fq_srs_to_bytes_external(srs: &External) -> Uint8Array { + caml_fq_srs_to_bytes(srs).expect("failed to serialize external fq srs") +} + #[napi(js_name = "caml_fq_srs_from_bytes")] pub fn caml_fq_srs_from_bytes(bytes: Uint8Array) -> Result { fq::NapiFqSrs::deserialize(bytes) diff --git a/plonk-napi/src/wrappers/field.rs b/plonk-napi/src/wrappers/field.rs index 022636f39d..7ebc27763e 100644 --- a/plonk-napi/src/wrappers/field.rs +++ b/plonk-napi/src/wrappers/field.rs @@ -113,6 +113,13 @@ macro_rules! impl_field_wrapper { ::to_napi_value(env, buffer) } } + + impl<'a> ToNapiValue for &'a mut $name { + unsafe fn to_napi_value(env: sys::napi_env, val: Self) -> Result { + let buffer = Buffer::from(val.to_bytes()); + ::to_napi_value(env, buffer) + } + } }; } diff --git a/plonk-napi/src/wrappers/wires.rs b/plonk-napi/src/wrappers/wires.rs index 9a2f67ea11..399de6ba9e 100644 --- a/plonk-napi/src/wrappers/wires.rs +++ b/plonk-napi/src/wrappers/wires.rs @@ -1,4 +1,4 @@ -use kimchi::circuits::wires::Wire as KimchiWire; +use kimchi::circuits::wires::Wire; use napi_derive::napi; #[napi(object)] @@ -8,17 +8,17 @@ pub struct NapiWire { pub col: u32, } -impl From for KimchiWire { +impl From for Wire { fn from(value: NapiWire) -> Self { - KimchiWire { + Wire { row: value.row as usize, col: value.col as usize, } } } -impl From for NapiWire { - fn from(value: KimchiWire) -> Self { +impl From for NapiWire { + fn from(value: Wire) -> Self { Self { row: value.row as u32, col: value.col as u32,