From fe7ba7da204d3df2e57fe967465de0d5f2cf57ec Mon Sep 17 00:00:00 2001 From: Quang Dao Date: Fri, 27 Feb 2026 22:29:28 -0800 Subject: [PATCH 1/5] Evolve CommitmentScheme trait for PCS-generic batch opening Refactor the CommitmentScheme trait to abstract away Dory-specific assumptions (homomorphic batching via combine_commitments/combine_hints) and prepare a clean PCS-generic Stage 8 batch opening interface. Key changes: - Add BatchPolynomialSource trait for lazy polynomial access - Add StreamingBatchSource that wraps existing RLCPolynomial::new_streaming - Add batch_prove/batch_verify to CommitmentScheme trait - Add commitment param to prove() (needed by non-homomorphic PCS) - Remove combine_commitments/combine_hints from trait (moved to private Dory methods) - Refactor prove_stage8/verify_stage8 to use PCS::batch_prove/batch_verify - Remove DoryOpeningState (replaced by StreamingBatchSource) - Update JoltProof to store BatchedProof for joint opening Zero performance regression on Dory/BN254 path: the streaming RLC polynomial construction is preserved identically via StreamingBatchSource. Made-with: Cursor --- .../src/poly/commitment/commitment_scheme.rs | 87 +++++----------- .../poly/commitment/dory/commitment_scheme.rs | 88 ++++++++++++----- jolt-core/src/poly/commitment/dory/tests.rs | 38 +++---- jolt-core/src/poly/commitment/hyperkzg.rs | 80 ++++++++++++--- jolt-core/src/poly/commitment/mock.rs | 43 +++++--- jolt-core/src/poly/opening_proof.rs | 94 +++++++----------- jolt-core/src/zkvm/proof_serialization.rs | 2 +- jolt-core/src/zkvm/prover.rs | 98 +++++++++++-------- jolt-core/src/zkvm/verifier.rs | 87 +++++----------- 9 files changed, 317 insertions(+), 300 deletions(-) diff --git a/jolt-core/src/poly/commitment/commitment_scheme.rs b/jolt-core/src/poly/commitment/commitment_scheme.rs index e53b45ce4c..4a022e87c4 100644 --- a/jolt-core/src/poly/commitment/commitment_scheme.rs +++ b/jolt-core/src/poly/commitment/commitment_scheme.rs @@ -2,6 +2,7 @@ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use std::borrow::Borrow; use std::fmt::Debug; +use crate::poly::opening_proof::BatchPolynomialSource; use crate::transcripts::Transcript; use crate::{ field::JoltField, @@ -28,35 +29,15 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static { /// used as a hint for the opening proof. type OpeningProofHint: Sync + Send + Clone + Debug + PartialEq; - /// Generates the prover setup for this PCS. `max_num_vars` is the maximum number of - /// variables of any polynomial that will be committed using this setup. fn setup_prover(max_num_vars: usize) -> Self::ProverSetup; - /// Generates the verifier setup from the prover setup. fn setup_verifier(setup: &Self::ProverSetup) -> Self::VerifierSetup; - /// Commits to a multilinear polynomial using the provided setup. - /// - /// # Arguments - /// * `poly` - The multilinear polynomial to commit to - /// * `setup` - The prover setup for the commitment scheme - /// - /// # Returns - /// A tuple containing the commitment to the polynomial and a hint that can be used - /// to optimize opening proof generation fn commit( poly: &MultilinearPolynomial, setup: &Self::ProverSetup, ) -> (Self::Commitment, Self::OpeningProofHint); - /// Commits to multiple multilinear polynomials in batch. - /// - /// # Arguments - /// * `polys` - A slice of multilinear polynomials to commit to - /// * `gens` - The prover setup for the commitment scheme - /// - /// # Returns - /// A vector of commitments, one for each input polynomial fn batch_commit( polys: &[U], gens: &Self::ProverSetup, @@ -64,56 +45,15 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static { where U: Borrow> + Sync; - /// Homomorphically combines multiple commitments into a single commitment, computed as a - /// linear combination with the given coefficients. - fn combine_commitments>( - _commitments: &[C], - _coeffs: &[Self::Field], - ) -> Self::Commitment { - todo!("`combine_commitments` should be on a separate `AdditivelyHomomorphic` trait") - } - - /// Homomorphically combines multiple opening proof hints into a single hint, computed as a - /// linear combination with the given coefficients. - fn combine_hints( - _hints: Vec, - _coeffs: &[Self::Field], - ) -> Self::OpeningProofHint { - unimplemented!() - } - - /// Generates a proof of evaluation for a polynomial at a specific point. - /// - /// # Arguments - /// * `setup` - The prover setup for the commitment scheme - /// * `poly` - The multilinear polynomial being proved - /// * `opening_point` - The point at which the polynomial is evaluated - /// * `hint` - An optional hint that helps optimize the proof generation. - /// When `None`, implementations should compute the hint internally if needed. - /// * `transcript` - The transcript for Fiat-Shamir transformation - /// - /// # Returns - /// A proof of the polynomial evaluation at the specified point fn prove( setup: &Self::ProverSetup, poly: &MultilinearPolynomial, opening_point: &[::Challenge], hint: Option, transcript: &mut ProofTranscript, + commitment: &Self::Commitment, ) -> Self::Proof; - /// Verifies a proof of polynomial evaluation at a specific point. - /// - /// # Arguments - /// * `proof` - The proof to be verified - /// * `setup` - The verifier setup for the commitment scheme - /// * `transcript` - The transcript for Fiat-Shamir transformation - /// * `opening_point` - The point at which the polynomial is evaluated - /// * `opening` - The claimed evaluation value of the polynomial at the opening point - /// * `commitment` - The commitment to the polynomial - /// - /// # Returns - /// Ok(()) if the proof is valid, otherwise a ProofVerifyError fn verify( proof: &Self::Proof, setup: &Self::VerifierSetup, @@ -123,6 +63,29 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static { commitment: &Self::Commitment, ) -> Result<(), ProofVerifyError>; + #[allow(clippy::too_many_arguments)] + fn batch_prove>( + setup: &Self::ProverSetup, + poly_source: &S, + hints: Vec, + commitments: &[&Self::Commitment], + opening_point: &[::Challenge], + claims: &[Self::Field], + coeffs: &[Self::Field], + transcript: &mut ProofTranscript, + ) -> Self::BatchedProof; + + /// Verifies a batch opening proof for multiple polynomials evaluated at a single point. + fn batch_verify( + proof: &Self::BatchedProof, + setup: &Self::VerifierSetup, + transcript: &mut ProofTranscript, + opening_point: &[::Challenge], + commitments: &[&Self::Commitment], + claims: &[Self::Field], + coeffs: &[Self::Field], + ) -> Result<(), ProofVerifyError>; + fn protocol_name() -> &'static [u8]; } diff --git a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs index d9b890c6ef..6727d91840 100644 --- a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs +++ b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs @@ -10,6 +10,7 @@ use crate::{ field::JoltField, poly::commitment::commitment_scheme::{CommitmentScheme, StreamingCommitmentScheme}, poly::multilinear_polynomial::MultilinearPolynomial, + poly::opening_proof::BatchPolynomialSource, transcripts::Transcript, utils::{errors::ProofVerifyError, math::Math, small_scalar::SmallScalar}, }; @@ -24,7 +25,6 @@ use rand_chacha::ChaCha20Rng; use rand_core::SeedableRng; use rayon::prelude::*; use sha3::{Digest, Sha3_256}; -use std::borrow::Borrow; use tracing::trace_span; #[derive(Clone)] @@ -105,6 +105,7 @@ impl CommitmentScheme for DoryCommitmentScheme { opening_point: &[::Challenge], hint: Option, transcript: &mut ProofTranscript, + _commitment: &Self::Commitment, ) -> Self::Proof { let _span = trace_span!("DoryCommitmentScheme::prove").entered(); @@ -182,26 +183,70 @@ impl CommitmentScheme for DoryCommitmentScheme { Ok(()) } + fn batch_prove>( + setup: &Self::ProverSetup, + poly_source: &S, + hints: Vec, + commitments: &[&Self::Commitment], + opening_point: &[::Challenge], + _claims: &[Self::Field], + coeffs: &[Self::Field], + transcript: &mut ProofTranscript, + ) -> Self::BatchedProof { + let joint_poly = poly_source.build_joint_polynomial(coeffs); + let combined_hint = Self::combine_hints_internal(hints, coeffs); + let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); + let proof = Self::prove( + setup, + &joint_poly, + opening_point, + Some(combined_hint), + transcript, + &joint_commitment, + ); + vec![proof] + } + + fn batch_verify( + proof: &Self::BatchedProof, + setup: &Self::VerifierSetup, + transcript: &mut ProofTranscript, + opening_point: &[::Challenge], + commitments: &[&Self::Commitment], + claims: &[Self::Field], + coeffs: &[Self::Field], + ) -> Result<(), ProofVerifyError> { + let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); + let joint_claim: ark_bn254::Fr = coeffs.iter().zip(claims).map(|(c, v)| *c * *v).sum(); + Self::verify( + &proof[0], + setup, + transcript, + opening_point, + &joint_claim, + &joint_commitment, + ) + } + fn protocol_name() -> &'static [u8] { b"Dory" } +} - /// In Dory, the opening proof hint consists of the Pedersen commitments to the rows - /// of the polynomial coefficient matrix. In the context of a batch opening proof, we - /// can homomorphically combine the row commitments for multiple polynomials into the - /// row commitments for the RLC of those polynomials. This is more efficient than computing - /// the row commitments for the RLC from scratch. - #[tracing::instrument(skip_all, name = "DoryCommitmentScheme::combine_hints")] - fn combine_hints( - hints: Vec, - coeffs: &[Self::Field], - ) -> Self::OpeningProofHint { +impl DoryCommitmentScheme { + /// Homomorphically combines row commitment hints using a random linear combination. + #[tracing::instrument(skip_all, name = "DoryCommitmentScheme::combine_hints_internal")] + pub(crate) fn combine_hints_internal( + hints: Vec>, + coeffs: &[ark_bn254::Fr], + ) -> Vec { let num_rows = DoryGlobals::get_max_num_rows(); let mut rlc_hint = vec![ArkG1(G1Projective::zero()); num_rows]; for (coeff, mut hint) in coeffs.iter().zip(hints.into_iter()) { hint.resize(num_rows, ArkG1(G1Projective::zero())); + // SAFETY: ArkG1 is repr(transparent) over G1Projective let row_commitments: &mut [G1Projective] = unsafe { std::slice::from_raw_parts_mut(hint.as_mut_ptr() as *mut G1Projective, hint.len()) }; @@ -213,8 +258,6 @@ impl CommitmentScheme for DoryCommitmentScheme { let _span = trace_span!("vector_scalar_mul_add_gamma_g1_online"); let _enter = _span.enter(); - // Scales the row commitments for the current polynomial by - // its coefficient jolt_optimizations::vector_scalar_mul_add_gamma_g1_online( row_commitments, *coeff, @@ -227,20 +270,15 @@ impl CommitmentScheme for DoryCommitmentScheme { rlc_hint } - /// Homomorphically combines multiple commitments using a random linear combination. - /// Computes: sum_i(coeff_i * commitment_i) for the GT elements. - #[tracing::instrument(skip_all, name = "DoryCommitmentScheme::combine_commitments")] - fn combine_commitments>( - commitments: &[C], - coeffs: &[Self::Field], - ) -> Self::Commitment { - let _span = trace_span!("DoryCommitmentScheme::combine_commitments").entered(); - - // Combine GT elements using parallel RLC - let commitments_vec: Vec<&ArkGT> = commitments.iter().map(|c| c.borrow()).collect(); + /// Homomorphically combines multiple GT commitments using a random linear combination. + #[tracing::instrument(skip_all, name = "DoryCommitmentScheme::combine_commitments_internal")] + pub(crate) fn combine_commitments_internal( + commitments: &[&ArkGT], + coeffs: &[ark_bn254::Fr], + ) -> ArkGT { coeffs .par_iter() - .zip(commitments_vec.par_iter()) + .zip(commitments.par_iter()) .map(|(coeff, commitment)| { let ark_coeff = jolt_to_ark(coeff); ark_coeff * **commitment diff --git a/jolt-core/src/poly/commitment/dory/tests.rs b/jolt-core/src/poly/commitment/dory/tests.rs index c3cf91e897..aa8c151811 100644 --- a/jolt-core/src/poly/commitment/dory/tests.rs +++ b/jolt-core/src/poly/commitment/dory/tests.rs @@ -41,6 +41,7 @@ mod tests { &opening_point, Some(row_commitments), &mut prove_transcript, + &commitment, ); let mut verify_transcript = Blake2bTranscript::new(b"dory_test"); @@ -267,6 +268,7 @@ mod tests { &opening_point, Some(row_commitments), &mut prove_transcript, + &commitment, ); // Test 1: Tamper with the evaluation @@ -426,6 +428,7 @@ mod tests { &opening_point, Some(row_commitments), &mut prove_transcript, + &commitment, ); let mut verify_transcript = Blake2bTranscript::new(b"dory_test"); @@ -480,28 +483,25 @@ mod tests { // Step 3: Generate 5 random coefficients let coeffs: Vec = (0..num_polys).map(|_| Fr::rand(&mut rng)).collect(); - // Step 4: Homomorphically combine commitments and hints - let combined_commitment = DoryCommitmentScheme::combine_commitments(&commitments, &coeffs); - let combined_hint = DoryCommitmentScheme::combine_hints(hints, &coeffs); + let commitment_refs: Vec<&ArkGT> = commitments.iter().collect(); + let combined_commitment = + DoryCommitmentScheme::combine_commitments_internal(&commitment_refs, &coeffs); + let combined_hint = DoryCommitmentScheme::combine_hints_internal(hints, &coeffs); - // Step 5: Generate evaluation point first let opening_point: Vec<::Challenge> = (0..num_vars) .map(|_| ::Challenge::random(&mut rng)) .collect(); - // Step 6: Compute expected evaluation as linear combination: eval = coeff[0]*P0(r) + ... + coeff[4]*P4(r) let mut evaluation = Fr::zero(); for (poly, coeff) in polys.iter().zip(coeffs.iter()) { let poly_eval = poly.evaluate(&opening_point); evaluation += *coeff * poly_eval; } - // Step 7: Compute combined polynomial: P = coeff[0]*P0 + coeff[1]*P1 + ... + coeff[4]*P4 let poly_refs: Vec<&MultilinearPolynomial> = polys.iter().collect(); let combined_poly = DensePolynomial::linear_combination(&poly_refs, &coeffs); let combined_poly = MultilinearPolynomial::from(combined_poly.Z); - // Step 8: Create evaluation proof using combined commitment and hint let mut prove_transcript = Blake2bTranscript::new(b"dory_homomorphic_test"); let proof = DoryCommitmentScheme::prove( &prover_setup, @@ -509,9 +509,9 @@ mod tests { &opening_point, Some(combined_hint), &mut prove_transcript, + &combined_commitment, ); - // Step 9: Verify the proof let mut verify_transcript = Blake2bTranscript::new(b"dory_homomorphic_test"); let result = DoryCommitmentScheme::verify( &proof, @@ -541,7 +541,6 @@ mod tests { let mut rng = thread_rng(); - // Step 1: Generate 5 random polynomials let polys: Vec> = (0..num_polys) .map(|_| { let coeffs: Vec = (0..num_coeffs).map(|_| Fr::rand(&mut rng)).collect(); @@ -552,48 +551,40 @@ mod tests { let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup); - // Step 2: Use batch_commit let commitments_and_hints = DoryCommitmentScheme::batch_commit(&polys, &prover_setup); let commitments: Vec<_> = commitments_and_hints.iter().map(|(c, _)| *c).collect(); let hints: Vec<_> = commitments_and_hints.into_iter().map(|(_, h)| h).collect(); - // Step 3: Generate random coefficients (like gamma powers in opening_proof.rs) let coeffs: Vec = (0..num_polys).map(|_| Fr::rand(&mut rng)).collect(); - // Step 4: Homomorphically combine commitments and hints - let combined_commitment = DoryCommitmentScheme::combine_commitments(&commitments, &coeffs); - let combined_hint = DoryCommitmentScheme::combine_hints(hints, &coeffs); + let commitment_refs: Vec<&ArkGT> = commitments.iter().collect(); + let combined_commitment = + DoryCommitmentScheme::combine_commitments_internal(&commitment_refs, &coeffs); + let combined_hint = DoryCommitmentScheme::combine_hints_internal(hints, &coeffs); - // Step 5: Generate evaluation point let opening_point: Vec<::Challenge> = (0..num_vars) .map(|_| ::Challenge::random(&mut rng)) .collect(); - // Step 6: Compute expected evaluation as linear combination let mut evaluation = Fr::zero(); for (poly, coeff) in polys.iter().zip(coeffs.iter()) { let poly_eval = poly.evaluate(&opening_point); evaluation += *coeff * poly_eval; } - // Step 7: Create combined polynomial let poly_refs: Vec<&MultilinearPolynomial> = polys.iter().collect(); let combined_poly = DensePolynomial::linear_combination(&poly_refs, &coeffs); let combined_poly = MultilinearPolynomial::from(combined_poly.Z); - // Step 8: Verify that directly committing to the combined polynomial gives the same result - // as homomorphically combining the individual commitments let (direct_commitment, direct_hint) = DoryCommitmentScheme::commit(&combined_poly, &prover_setup); - // The commitments should match assert_eq!( combined_commitment, direct_commitment, "Homomorphically combined commitment should match direct commitment to RLC" ); - // Step 9: Create evaluation proof using combined hint let mut prove_transcript = Blake2bTranscript::new(b"dory_batch_commit_e2e_test"); let proof = DoryCommitmentScheme::prove( &prover_setup, @@ -601,9 +592,9 @@ mod tests { &opening_point, Some(combined_hint), &mut prove_transcript, + &combined_commitment, ); - // Step 10: Verify the proof let mut verify_transcript = Blake2bTranscript::new(b"dory_batch_commit_e2e_test"); let result = DoryCommitmentScheme::verify( &proof, @@ -619,7 +610,6 @@ mod tests { "Verification should succeed with batch_commit flow: {result:?}" ); - // Step 11: Also verify that proving with the direct hint works let mut prove_transcript2 = Blake2bTranscript::new(b"dory_batch_commit_e2e_test"); let proof2 = DoryCommitmentScheme::prove( &prover_setup, @@ -627,6 +617,7 @@ mod tests { &opening_point, Some(direct_hint), &mut prove_transcript2, + &direct_commitment, ); let mut verify_transcript2 = Blake2bTranscript::new(b"dory_batch_commit_e2e_test"); @@ -830,6 +821,7 @@ mod tests { &opening_point, Some(row_commitments), &mut prove_transcript, + &commitment, ); let mut verify_transcript = Blake2bTranscript::new(b"dory_test"); diff --git a/jolt-core/src/poly/commitment/hyperkzg.rs b/jolt-core/src/poly/commitment/hyperkzg.rs index 745e97d6d9..1d30c39822 100644 --- a/jolt-core/src/poly/commitment/hyperkzg.rs +++ b/jolt-core/src/poly/commitment/hyperkzg.rs @@ -14,6 +14,7 @@ use super::{ }; use crate::field::JoltField; use crate::poly::multilinear_polynomial::{MultilinearPolynomial, PolynomialEvaluation}; +use crate::poly::opening_proof::BatchPolynomialSource; use crate::poly::rlc_polynomial::RLCPolynomial; use crate::zkvm::witness::CommittedPolynomial; use crate::{ @@ -470,24 +471,13 @@ where .collect() } - fn combine_commitments>( - commitments: &[C], - coeffs: &[Self::Field], - ) -> Self::Commitment { - let combined_commitment: P::G1 = commitments - .iter() - .zip(coeffs.iter()) - .map(|(commitment, coeff)| commitment.borrow().0 * coeff) - .sum(); - HyperKZGCommitment(combined_commitment.into_affine()) - } - fn prove( setup: &Self::ProverSetup, poly: &MultilinearPolynomial, - opening_point: &[::Challenge], // point at which the polynomial is evaluated + opening_point: &[::Challenge], _hint: Option, transcript: &mut ProofTranscript, + _commitment: &Self::Commitment, ) -> Self::Proof { let eval = poly.evaluate(opening_point); HyperKZG::

::open(setup, poly, opening_point, &eval, transcript).unwrap() @@ -497,18 +487,78 @@ where proof: &Self::Proof, setup: &Self::VerifierSetup, transcript: &mut ProofTranscript, - opening_point: &[::Challenge], // point at which the polynomial is evaluated - opening: &Self::Field, // evaluation \widetilde{Z}(r) + opening_point: &[::Challenge], + opening: &Self::Field, commitment: &Self::Commitment, ) -> Result<(), ProofVerifyError> { HyperKZG::

::verify(setup, commitment, opening_point, opening, proof, transcript) } + fn batch_prove>( + setup: &Self::ProverSetup, + poly_source: &S, + _hints: Vec, + commitments: &[&Self::Commitment], + opening_point: &[::Challenge], + _claims: &[Self::Field], + coeffs: &[Self::Field], + transcript: &mut ProofTranscript, + ) -> Self::BatchedProof { + let joint_poly = poly_source.build_joint_polynomial(coeffs); + let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); + Self::prove( + setup, + &joint_poly, + opening_point, + None, + transcript, + &joint_commitment, + ) + } + + fn batch_verify( + proof: &Self::BatchedProof, + setup: &Self::VerifierSetup, + transcript: &mut ProofTranscript, + opening_point: &[::Challenge], + commitments: &[&Self::Commitment], + claims: &[Self::Field], + coeffs: &[Self::Field], + ) -> Result<(), ProofVerifyError> { + let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); + let joint_claim: Self::Field = coeffs.iter().zip(claims).map(|(c, v)| *c * *v).sum(); + HyperKZG::

::verify( + setup, + &joint_commitment, + opening_point, + &joint_claim, + proof, + transcript, + ) + } + fn protocol_name() -> &'static [u8] { b"hyperkzg" } } +impl HyperKZG

+where +

::ScalarField: JoltField, +{ + fn combine_commitments_internal( + commitments: &[&HyperKZGCommitment

], + coeffs: &[P::ScalarField], + ) -> HyperKZGCommitment

{ + let combined: P::G1 = commitments + .iter() + .zip(coeffs.iter()) + .map(|(commitment, coeff)| commitment.0 * coeff) + .sum(); + HyperKZGCommitment(combined.into_affine()) + } +} + impl super::commitment_scheme::StreamingCommitmentScheme for HyperKZG

where

::ScalarField: JoltField, diff --git a/jolt-core/src/poly/commitment/mock.rs b/jolt-core/src/poly/commitment/mock.rs index aac236066d..e14e8425cd 100644 --- a/jolt-core/src/poly/commitment/mock.rs +++ b/jolt-core/src/poly/commitment/mock.rs @@ -6,6 +6,7 @@ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use crate::{ field::JoltField, poly::multilinear_polynomial::MultilinearPolynomial, + poly::opening_proof::BatchPolynomialSource, transcripts::Transcript, utils::{errors::ProofVerifyError, small_scalar::SmallScalar}, }; @@ -63,25 +64,13 @@ where .collect() } - fn combine_commitments>( - _commitments: &[C], - _coeffs: &[Self::Field], - ) -> Self::Commitment { - MockCommitment::default() - } - - fn combine_hints( - _hints: Vec, - _coeffs: &[Self::Field], - ) -> Self::OpeningProofHint { - } - fn prove( _setup: &Self::ProverSetup, _poly: &MultilinearPolynomial, opening_point: &[::Challenge], _hint: Option, _transcript: &mut ProofTranscript, + _commitment: &Self::Commitment, ) -> Self::Proof { MockProof { opening_point: opening_point.to_owned(), @@ -100,6 +89,34 @@ where Ok(()) } + fn batch_prove>( + _setup: &Self::ProverSetup, + _poly_source: &S, + _hints: Vec, + _commitments: &[&Self::Commitment], + opening_point: &[::Challenge], + _claims: &[Self::Field], + _coeffs: &[Self::Field], + _transcript: &mut ProofTranscript, + ) -> Self::BatchedProof { + MockProof { + opening_point: opening_point.to_owned(), + } + } + + fn batch_verify( + proof: &Self::BatchedProof, + _setup: &Self::VerifierSetup, + _transcript: &mut ProofTranscript, + opening_point: &[::Challenge], + _commitments: &[&Self::Commitment], + _claims: &[Self::Field], + _coeffs: &[Self::Field], + ) -> Result<(), ProofVerifyError> { + assert_eq!(proof.opening_point, opening_point); + Ok(()) + } + fn protocol_name() -> &'static [u8] { b"mock_commit" } diff --git a/jolt-core/src/poly/opening_proof.rs b/jolt-core/src/poly/opening_proof.rs index 217e14f4e3..7812855cd2 100644 --- a/jolt-core/src/poly/opening_proof.rs +++ b/jolt-core/src/poly/opening_proof.rs @@ -16,15 +16,48 @@ use std::cell::RefCell; use std::collections::{BTreeMap, HashMap}; use std::sync::Arc; -use super::{ - commitment::commitment_scheme::CommitmentScheme, multilinear_polynomial::MultilinearPolynomial, -}; +use super::multilinear_polynomial::MultilinearPolynomial; use crate::{ field::JoltField, transcripts::Transcript, zkvm::witness::{CommittedPolynomial, VirtualPolynomial}, }; +/// Provides lazy access to polynomial data for batch opening proofs. +/// Constructed by the Jolt prover from trace + preprocessing data. +/// Each PCS calls methods on the source as needed: +/// - Dory calls `build_joint_polynomial` to get a streaming RLC polynomial +/// - Hachi ignores the source and uses ring_coeffs from its OpeningProofHint +pub trait BatchPolynomialSource: Send + Sync { + /// Construct the joint (RLC) polynomial: `sum_i coeffs[i] * poly_i`. + /// The returned polynomial may evaluate lazily (e.g., streaming from trace data). + fn build_joint_polynomial(&self, coeffs: &[F]) -> MultilinearPolynomial; +} + +/// Streams polynomial data from the execution trace for Dory batch opening. +/// Wraps the existing `RLCPolynomial::new_streaming` path so that Dory's +/// `batch_prove` avoids regenerating witness polynomials. +pub struct StreamingBatchSource { + pub one_hot_params: OneHotParams, + pub trace_source: TraceSource, + pub streaming_data: Arc, + pub advice_polys: HashMap>, + pub poly_ids: Vec, +} + +impl BatchPolynomialSource for StreamingBatchSource { + fn build_joint_polynomial(&self, coeffs: &[F]) -> MultilinearPolynomial { + MultilinearPolynomial::RLC(RLCPolynomial::new_streaming( + self.one_hot_params.clone(), + Arc::clone(&self.streaming_data), + self.trace_source.clone(), + self.poly_ids.clone(), + coeffs, + self.advice_polys.clone(), + )) + } +} + pub type Endianness = bool; pub const BIG_ENDIAN: Endianness = false; pub const LITTLE_ENDIAN: Endianness = true; @@ -229,61 +262,6 @@ pub trait OpeningAccumulator { ) -> Option<(OpeningPoint, F)>; } -/// State for Dory batch opening (Stage 8). -/// This is a generic interface for batch opening proofs. -#[derive(Clone, Allocative)] -pub struct DoryOpeningState { - /// Unified opening point for all polynomials (length = log_k_chunk + log_T) - pub opening_point: Vec, - /// γ^i coefficients for the RLC polynomial - pub gamma_powers: Vec, - /// (polynomial, claim) pairs at the opening point - /// (with Lagrange factors already applied for shorter polys) - pub polynomial_claims: Vec<(CommittedPolynomial, F)>, -} - -impl DoryOpeningState { - /// Build streaming RLC polynomial from this state. - /// Streams directly from trace - no witness regeneration needed. - /// Advice polynomials are passed separately (not streamed from trace). - #[tracing::instrument(skip_all)] - pub fn build_streaming_rlc>( - &self, - one_hot_params: OneHotParams, - trace_source: TraceSource, - rlc_streaming_data: Arc, - mut opening_hints: HashMap, - advice_polys: HashMap>, - ) -> (MultilinearPolynomial, PCS::OpeningProofHint) { - // Accumulate gamma coefficients per polynomial - let mut rlc_map = BTreeMap::new(); - for (gamma, (poly, _claim)) in self.gamma_powers.iter().zip(self.polynomial_claims.iter()) { - *rlc_map.entry(*poly).or_insert(F::zero()) += *gamma; - } - - let (poly_ids, coeffs): (Vec, Vec) = - rlc_map.iter().map(|(k, v)| (*k, *v)).unzip(); - - let joint_poly = MultilinearPolynomial::RLC(RLCPolynomial::new_streaming( - one_hot_params, - rlc_streaming_data, - trace_source, - poly_ids.clone(), - &coeffs, - advice_polys, - )); - - let hints: Vec = rlc_map - .into_keys() - .map(|k| opening_hints.remove(&k).unwrap()) - .collect(); - - let hint = PCS::combine_hints(hints, &coeffs); - - (joint_poly, hint) - } -} - impl Default for ProverOpeningAccumulator where F: JoltField, diff --git a/jolt-core/src/zkvm/proof_serialization.rs b/jolt-core/src/zkvm/proof_serialization.rs index 174a256135..d9a82ec04e 100644 --- a/jolt-core/src/zkvm/proof_serialization.rs +++ b/jolt-core/src/zkvm/proof_serialization.rs @@ -40,7 +40,7 @@ pub struct JoltProof, FS: Transcr pub stage5_sumcheck_proof: SumcheckInstanceProof, pub stage6_sumcheck_proof: SumcheckInstanceProof, pub stage7_sumcheck_proof: SumcheckInstanceProof, - pub joint_opening_proof: PCS::Proof, + pub joint_opening_proof: PCS::BatchedProof, pub untrusted_advice_commitment: Option, pub trace_length: usize, pub ram_K: usize, diff --git a/jolt-core/src/zkvm/prover.rs b/jolt-core/src/zkvm/prover.rs index 041eb37774..72c3b7ffdc 100644 --- a/jolt-core/src/zkvm/prover.rs +++ b/jolt-core/src/zkvm/prover.rs @@ -34,8 +34,8 @@ use crate::{ }, multilinear_polynomial::MultilinearPolynomial, opening_proof::{ - compute_advice_lagrange_factor, DoryOpeningState, OpeningAccumulator, - ProverOpeningAccumulator, SumcheckId, + compute_advice_lagrange_factor, OpeningAccumulator, ProverOpeningAccumulator, + StreamingBatchSource, SumcheckId, }, rlc_polynomial::{RLCStreamingData, TraceSource}, }, @@ -449,7 +449,19 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip let untrusted_advice_commitment = self.generate_and_commit_untrusted_advice(); self.generate_and_commit_trusted_advice(); - // Add advice hints for batched Stage 8 opening + // Build per-polynomial commitment map for Stage 8 + let main_polys = all_committed_polynomials(&self.one_hot_params); + let mut commitment_map: HashMap = main_polys + .into_iter() + .zip(commitments.iter().cloned()) + .collect(); + if let Some(ref c) = self.advice.trusted_advice_commitment { + commitment_map.insert(CommittedPolynomial::TrustedAdvice, c.clone()); + } + if let Some(ref c) = untrusted_advice_commitment { + commitment_map.insert(CommittedPolynomial::UntrustedAdvice, c.clone()); + } + if let Some(hint) = self.advice.trusted_advice_hint.take() { opening_proof_hints.insert(CommittedPolynomial::TrustedAdvice, hint); } @@ -465,7 +477,7 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip let stage6_sumcheck_proof = self.prove_stage6(); let stage7_sumcheck_proof = self.prove_stage7(); - let joint_opening_proof = self.prove_stage8(opening_proof_hints); + let joint_opening_proof = self.prove_stage8(opening_proof_hints, commitment_map); #[cfg(test)] assert!( @@ -1304,14 +1316,15 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip sumcheck_proof } - /// Stage 8: Dory batch opening proof. - /// Builds streaming RLC polynomial directly from trace (no witness regeneration needed). + /// Stage 8: PCS batch opening proof. + /// Streams polynomial data lazily from trace via `StreamingBatchSource` -- no witness regeneration. #[tracing::instrument(skip_all)] fn prove_stage8( &mut self, opening_proof_hints: HashMap, - ) -> PCS::Proof { - tracing::info!("Stage 8 proving (Dory batch opening)"); + commitment_map: HashMap, + ) -> PCS::BatchedProof { + tracing::info!("Stage 8 proving (batch opening)"); let _guard = DoryGlobals::initialize_context( self.one_hot_params.k_chunk, @@ -1320,8 +1333,6 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip Some(DoryGlobals::get_layout()), ); - // Get the unified opening point from HammingWeightClaimReduction - // This contains (r_address_stage7 || r_cycle_stage6) in big-endian let (opening_point, _) = self.opening_accumulator.get_committed_polynomial_opening( CommittedPolynomial::InstructionRa(0), SumcheckId::HammingWeightClaimReduction, @@ -1329,11 +1340,8 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip let log_k_chunk = self.one_hot_params.log_k_chunk; let r_address_stage7 = &opening_point.r[..log_k_chunk]; - // 1. Collect all (polynomial, claim) pairs let mut polynomial_claims = Vec::new(); - // Dense polynomials: RamInc and RdInc (from IncClaimReduction in Stage 6) - // These are at r_cycle_stage6 only (length log_T) let (_ram_inc_point, ram_inc_claim) = self.opening_accumulator.get_committed_polynomial_opening( CommittedPolynomial::RamInc, @@ -1347,9 +1355,7 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip #[cfg(test)] { - // Verify that Inc openings are at the same point as r_cycle from HammingWeightClaimReduction let r_cycle_stage6 = &opening_point.r[log_k_chunk..]; - debug_assert_eq!( _ram_inc_point.r.as_slice(), r_cycle_stage6, @@ -1362,16 +1368,11 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip ); } - // Apply Lagrange factor for dense polys: ∏_{i, ProofTranscrip polynomial_claims.push((CommittedPolynomial::RamRa(i), claim)); } - // Advice polynomials: TrustedAdvice and UntrustedAdvice (from AdviceClaimReduction in Stage 6) - // These are committed with smaller dimensions, so we apply Lagrange factors to embed - // them in the top-left block of the main Dory matrix. if let Some((advice_point, advice_claim)) = self .opening_accumulator .get_advice_opening(AdviceKind::Trusted, SumcheckId::AdviceClaimReduction) @@ -1433,24 +1431,41 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip )); } - // 2. Sample gamma and compute powers for RLC + // Sample gamma and compute powers for RLC let claims: Vec = polynomial_claims.iter().map(|(_, c)| *c).collect(); self.transcript.append_scalars(b"rlc_claims", &claims); let gamma_powers: Vec = self.transcript.challenge_scalar_powers(claims.len()); - // Build DoryOpeningState - let state = DoryOpeningState { - opening_point: opening_point.r.clone(), - gamma_powers, - polynomial_claims, - }; + // Accumulate gamma coefficients per unique polynomial (BTreeMap orders by CommittedPolynomial) + let mut rlc_map = std::collections::BTreeMap::new(); + for (gamma, (poly, claim)) in gamma_powers.iter().zip(polynomial_claims.iter()) { + let entry = rlc_map.entry(*poly).or_insert((F::zero(), F::zero())); + entry.0 += *gamma; + entry.1 = *claim; + } + + let (poly_ids, coeffs_and_claims): (Vec, Vec<(F, F)>) = + rlc_map.into_iter().collect(); + let (coeffs, sorted_claims): (Vec, Vec) = coeffs_and_claims.into_iter().unzip(); + + // Collect per-polynomial hints and commitments in the same order + let mut hint_map = opening_proof_hints; + let hints: Vec = poly_ids + .iter() + .map(|id| hint_map.remove(id).unwrap()) + .collect(); + let mut commit_map = commitment_map; + let commitment_refs: Vec = poly_ids + .iter() + .map(|id| commit_map.remove(id).unwrap()) + .collect(); + let commitment_ref_slice: Vec<&PCS::Commitment> = commitment_refs.iter().collect(); let streaming_data = Arc::new(RLCStreamingData { bytecode: Arc::clone(&self.preprocessing.shared.bytecode), memory_layout: self.preprocessing.shared.memory_layout.clone(), }); - // Build advice polynomials map for RLC let mut advice_polys = HashMap::new(); if let Some(poly) = self.advice.trusted_advice_polynomial.take() { advice_polys.insert(CommittedPolynomial::TrustedAdvice, poly); @@ -1459,21 +1474,22 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip advice_polys.insert(CommittedPolynomial::UntrustedAdvice, poly); } - // Build streaming RLC polynomial directly (no witness poly regeneration!) - // Use materialized trace (default, single pass) instead of lazy trace - let (joint_poly, hint) = state.build_streaming_rlc::( - self.one_hot_params.clone(), - TraceSource::Materialized(Arc::clone(&self.trace)), + let poly_source = StreamingBatchSource { + one_hot_params: self.one_hot_params.clone(), + trace_source: TraceSource::Materialized(Arc::clone(&self.trace)), streaming_data, - opening_proof_hints, advice_polys, - ); + poly_ids, + }; - PCS::prove( + PCS::batch_prove( &self.preprocessing.generators, - &joint_poly, + &poly_source, + hints, + &commitment_ref_slice, &opening_point.r, - Some(hint), + &sorted_claims, + &coeffs, &mut self.transcript, ) } diff --git a/jolt-core/src/zkvm/verifier.rs b/jolt-core/src/zkvm/verifier.rs index aa4709c4ba..18c1d60895 100644 --- a/jolt-core/src/zkvm/verifier.rs +++ b/jolt-core/src/zkvm/verifier.rs @@ -51,8 +51,8 @@ use crate::zkvm::{ use crate::{ field::JoltField, poly::opening_proof::{ - compute_advice_lagrange_factor, DoryOpeningState, OpeningAccumulator, OpeningPoint, - SumcheckId, VerifierOpeningAccumulator, + compute_advice_lagrange_factor, OpeningAccumulator, OpeningPoint, SumcheckId, + VerifierOpeningAccumulator, }, pprof_scope, subprotocols::{ @@ -639,87 +639,50 @@ impl<'a, F: JoltField, PCS: CommitmentScheme, ProofTranscript: Transc self.transcript.append_scalars(b"rlc_claims", &claims); let gamma_powers: Vec = self.transcript.challenge_scalar_powers(claims.len()); - // Build state for computing joint commitment/claim - let state = DoryOpeningState { - opening_point: opening_point.r.clone(), - gamma_powers: gamma_powers.clone(), - polynomial_claims, - }; + // Accumulate gamma coefficients per unique polynomial (BTreeMap for deterministic ordering) + let mut rlc_map = std::collections::BTreeMap::new(); + for (gamma, (poly, claim)) in gamma_powers.iter().zip(polynomial_claims.iter()) { + let entry = rlc_map.entry(*poly).or_insert((F::zero(), F::zero())); + entry.0 += *gamma; + entry.1 = *claim; + } + + let (poly_ids, coeffs_and_claims): (Vec, Vec<(F, F)>) = + rlc_map.into_iter().collect(); + let (coeffs, sorted_claims): (Vec, Vec) = coeffs_and_claims.into_iter().unzip(); - // Build commitments map - let mut commitments_map = HashMap::new(); + // Build commitments map and collect in same order as poly_ids + let mut commitments_map: HashMap = HashMap::new(); for (polynomial, commitment) in all_committed_polynomials(&self.one_hot_params) .into_iter() .zip_eq(&self.proof.commitments) { commitments_map.insert(polynomial, commitment.clone()); } - - // Add advice commitments if they're part of the batch if let Some(ref commitment) = self.trusted_advice_commitment { - if state - .polynomial_claims - .iter() - .any(|(p, _)| *p == CommittedPolynomial::TrustedAdvice) - { - commitments_map.insert(CommittedPolynomial::TrustedAdvice, commitment.clone()); - } + commitments_map.insert(CommittedPolynomial::TrustedAdvice, commitment.clone()); } if let Some(ref commitment) = self.proof.untrusted_advice_commitment { - if state - .polynomial_claims - .iter() - .any(|(p, _)| *p == CommittedPolynomial::UntrustedAdvice) - { - commitments_map.insert(CommittedPolynomial::UntrustedAdvice, commitment.clone()); - } + commitments_map.insert(CommittedPolynomial::UntrustedAdvice, commitment.clone()); } - // Compute joint commitment: Σ γ_i · C_i - let joint_commitment = self.compute_joint_commitment(&mut commitments_map, &state); - - // Compute joint claim: Σ γ_i · claim_i - let joint_claim: F = gamma_powers + let commitment_refs: Vec = poly_ids .iter() - .zip(claims.iter()) - .map(|(gamma, claim)| *gamma * claim) - .sum(); + .map(|id| commitments_map.remove(id).unwrap()) + .collect(); + let commitment_ref_slice: Vec<&PCS::Commitment> = commitment_refs.iter().collect(); - // Verify opening - PCS::verify( + PCS::batch_verify( &self.proof.joint_opening_proof, &self.preprocessing.generators, &mut self.transcript, &opening_point.r, - &joint_claim, - &joint_commitment, + &commitment_ref_slice, + &sorted_claims, + &coeffs, ) .context("Stage 8") } - - /// Compute joint commitment for the batch opening. - fn compute_joint_commitment( - &self, - commitment_map: &mut HashMap, - state: &DoryOpeningState, - ) -> PCS::Commitment { - // Accumulate gamma coefficients per polynomial - let mut rlc_map = HashMap::new(); - for (gamma, (poly, _claim)) in state - .gamma_powers - .iter() - .zip(state.polynomial_claims.iter()) - { - *rlc_map.entry(*poly).or_insert(F::zero()) += *gamma; - } - - let (coeffs, commitments): (Vec, Vec) = rlc_map - .into_iter() - .map(|(k, v)| (v, commitment_map.remove(&k).unwrap())) - .unzip(); - - PCS::combine_commitments(&commitments, &coeffs) - } } #[derive(Debug, Clone)] From c713eb07ce7c07666d296dbe99b36957a0661239 Mon Sep 17 00:00:00 2001 From: Quang Dao Date: Sat, 28 Feb 2026 14:11:53 -0800 Subject: [PATCH 2/5] refactor: make CommitmentScheme methods take &self, add Config/from_proof/config Convert all crypto methods (commit, batch_commit, prove, verify, batch_prove, batch_verify) and StreamingCommitmentScheme methods from static to instance methods (&self). Add opaque Config associated type, from_proof() for reconstructing PCS from proof, and config() accessor. Require Default on CommitmentScheme for ergonomic instance creation at call sites. DoryCommitmentScheme now carries a layout field and wraps its batched proof in DoryBatchedProof (proof + layout). Methods still read DoryGlobals internally; the instance state will replace globals in a follow-up. Made-with: Cursor --- jolt-core/benches/commit.rs | 8 +-- .../src/poly/commitment/commitment_scheme.rs | 29 ++++++++-- .../poly/commitment/dory/commitment_scheme.rs | 51 +++++++++++++---- jolt-core/src/poly/commitment/dory/mod.rs | 2 +- jolt-core/src/poly/commitment/dory/tests.rs | 56 +++++++++---------- jolt-core/src/poly/commitment/hyperkzg.rs | 33 ++++++++++- jolt-core/src/poly/commitment/mock.rs | 35 +++++++++++- jolt-core/src/zkvm/prover.rs | 10 ++-- jolt-core/src/zkvm/verifier.rs | 2 +- jolt-core/src/zkvm/witness.rs | 11 ++-- jolt-sdk/macros/src/lib.rs | 2 +- 11 files changed, 173 insertions(+), 66 deletions(-) diff --git a/jolt-core/benches/commit.rs b/jolt-core/benches/commit.rs index 046935f9f5..0e45bac140 100644 --- a/jolt-core/benches/commit.rs +++ b/jolt-core/benches/commit.rs @@ -20,7 +20,7 @@ fn benchmark_dory_dense(c: &mut Criterion, name: &str, k: usize, t: usize) { c.bench_function(&format!("{name} Dory commit_rows"), |b| { b.iter(|| { let _ = globals; - DoryCommitmentScheme::commit(&poly, &setup); + DoryCommitmentScheme::default().commit(&poly, &setup); }); }); } @@ -43,9 +43,9 @@ fn benchmark_dory_one_hot_batch(c: &mut Criterion, name: &str, k: usize, t: usiz c.bench_function(&format!("{name} Dory one-hot commit"), |b| { b.iter(|| { let _ = globals; - DoryCommitmentScheme::batch_commit(&polys, &setup); + DoryCommitmentScheme::default().batch_commit(&polys, &setup); // polys.par_iter().for_each(|poly| { - // DoryCommitmentScheme::commit(&poly, &setup); + // DoryCommitmentScheme::default().commit(&poly, &setup); // }); }); }); @@ -75,7 +75,7 @@ fn benchmark_dory_mixed_batch(c: &mut Criterion, name: &str, k: usize, t: usize) c.bench_function(&format!("{name} Dory mixed batch commit"), |b| { b.iter(|| { let _ = globals; - DoryCommitmentScheme::batch_commit(&polys, &setup); + DoryCommitmentScheme::default().batch_commit(&polys, &setup); }); }); } diff --git a/jolt-core/src/poly/commitment/commitment_scheme.rs b/jolt-core/src/poly/commitment/commitment_scheme.rs index 4a022e87c4..6771d62207 100644 --- a/jolt-core/src/poly/commitment/commitment_scheme.rs +++ b/jolt-core/src/poly/commitment/commitment_scheme.rs @@ -10,8 +10,10 @@ use crate::{ utils::{errors::ProofVerifyError, small_scalar::SmallScalar}, }; -pub trait CommitmentScheme: Clone + Sync + Send + 'static { +pub trait CommitmentScheme: Clone + Sync + Send + Default + 'static { type Field: JoltField + Sized; + /// PCS-specific configuration carried by the instance. Opaque to generic code. + type Config: Clone + Sync + Send + CanonicalSerialize + CanonicalDeserialize; type ProverSetup: Clone + Sync + Send + Debug + CanonicalSerialize + CanonicalDeserialize; type VerifierSetup: Clone + Sync + Send + Debug + CanonicalSerialize + CanonicalDeserialize; type Commitment: Default @@ -33,12 +35,20 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static { fn setup_verifier(setup: &Self::ProverSetup) -> Self::VerifierSetup; + /// Reconstruct a PCS instance from a batched proof (e.g. for the verifier to + /// recover PCS-specific configuration serialized during proving). + fn from_proof(proof: &Self::BatchedProof) -> Self; + + fn config(&self) -> &Self::Config; + fn commit( + &self, poly: &MultilinearPolynomial, setup: &Self::ProverSetup, ) -> (Self::Commitment, Self::OpeningProofHint); fn batch_commit( + &self, polys: &[U], gens: &Self::ProverSetup, ) -> Vec<(Self::Commitment, Self::OpeningProofHint)> @@ -46,6 +56,7 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static { U: Borrow> + Sync; fn prove( + &self, setup: &Self::ProverSetup, poly: &MultilinearPolynomial, opening_point: &[::Challenge], @@ -55,6 +66,7 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static { ) -> Self::Proof; fn verify( + &self, proof: &Self::Proof, setup: &Self::VerifierSetup, transcript: &mut ProofTranscript, @@ -65,6 +77,7 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static { #[allow(clippy::too_many_arguments)] fn batch_prove>( + &self, setup: &Self::ProverSetup, poly_source: &S, hints: Vec, @@ -75,8 +88,9 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static { transcript: &mut ProofTranscript, ) -> Self::BatchedProof; - /// Verifies a batch opening proof for multiple polynomials evaluated at a single point. + #[allow(clippy::too_many_arguments)] fn batch_verify( + &self, proof: &Self::BatchedProof, setup: &Self::VerifierSetup, transcript: &mut ProofTranscript, @@ -93,18 +107,21 @@ pub trait StreamingCommitmentScheme: CommitmentScheme { /// The type representing chunk state (tier 1 commitments) type ChunkState: Send + Sync + Clone + PartialEq + Debug; - /// Compute tier 1 commitment for a chunk of small scalar values - fn process_chunk(setup: &Self::ProverSetup, chunk: &[T]) -> Self::ChunkState; + fn process_chunk( + &self, + setup: &Self::ProverSetup, + chunk: &[T], + ) -> Self::ChunkState; - /// Compute tier 1 commitment for a chunk of one-hot values fn process_chunk_onehot( + &self, setup: &Self::ProverSetup, onehot_k: usize, chunk: &[Option], ) -> Self::ChunkState; - /// Compute tier 2 commitment from accumulated tier 1 commitments fn aggregate_chunks( + &self, setup: &Self::ProverSetup, onehot_k: Option, tier1_commitments: &[Self::ChunkState], diff --git a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs index 6727d91840..8e2c132dbb 100644 --- a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs +++ b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs @@ -6,6 +6,7 @@ use super::wrappers::{ jolt_to_ark, ArkDoryProof, ArkFr, ArkG1, ArkGT, ArkworksProverSetup, ArkworksVerifierSetup, JoltToDoryTranscript, BN254, }; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use crate::{ field::JoltField, poly::commitment::commitment_scheme::{CommitmentScheme, StreamingCommitmentScheme}, @@ -27,16 +28,25 @@ use rayon::prelude::*; use sha3::{Digest, Sha3_256}; use tracing::trace_span; -#[derive(Clone)] -pub struct DoryCommitmentScheme; +#[derive(Clone, Default)] +pub struct DoryCommitmentScheme { + pub layout: DoryLayout, +} + +#[derive(CanonicalSerialize, CanonicalDeserialize)] +pub struct DoryBatchedProof { + pub proof: ArkDoryProof, + pub layout: DoryLayout, +} impl CommitmentScheme for DoryCommitmentScheme { type Field = ark_bn254::Fr; + type Config = DoryLayout; type ProverSetup = ArkworksProverSetup; type VerifierSetup = ArkworksVerifierSetup; type Commitment = ArkGT; type Proof = ArkDoryProof; - type BatchedProof = Vec; + type BatchedProof = DoryBatchedProof; type OpeningProofHint = Vec; fn setup_prover(max_num_vars: usize) -> Self::ProverSetup { @@ -63,7 +73,18 @@ impl CommitmentScheme for DoryCommitmentScheme { setup.to_verifier_setup() } + fn from_proof(proof: &DoryBatchedProof) -> Self { + Self { + layout: proof.layout, + } + } + + fn config(&self) -> &DoryLayout { + &self.layout + } + fn commit( + &self, poly: &MultilinearPolynomial, setup: &Self::ProverSetup, ) -> (Self::Commitment, Self::OpeningProofHint) { @@ -85,6 +106,7 @@ impl CommitmentScheme for DoryCommitmentScheme { } fn batch_commit( + &self, polys: &[U], gens: &Self::ProverSetup, ) -> Vec<(Self::Commitment, Self::OpeningProofHint)> @@ -95,11 +117,12 @@ impl CommitmentScheme for DoryCommitmentScheme { polys .par_iter() - .map(|poly| Self::commit(poly.borrow(), gens)) + .map(|poly| self.commit(poly.borrow(), gens)) .collect() } fn prove( + &self, setup: &Self::ProverSetup, poly: &MultilinearPolynomial, opening_point: &[::Challenge], @@ -110,7 +133,7 @@ impl CommitmentScheme for DoryCommitmentScheme { let _span = trace_span!("DoryCommitmentScheme::prove").entered(); let row_commitments = hint.unwrap_or_else(|| { - let (_commitment, row_commitments) = Self::commit(poly, setup); + let (_commitment, row_commitments) = self.commit(poly, setup); row_commitments }); @@ -146,6 +169,7 @@ impl CommitmentScheme for DoryCommitmentScheme { } fn verify( + &self, proof: &Self::Proof, setup: &Self::VerifierSetup, transcript: &mut ProofTranscript, @@ -184,6 +208,7 @@ impl CommitmentScheme for DoryCommitmentScheme { } fn batch_prove>( + &self, setup: &Self::ProverSetup, poly_source: &S, hints: Vec, @@ -196,7 +221,7 @@ impl CommitmentScheme for DoryCommitmentScheme { let joint_poly = poly_source.build_joint_polynomial(coeffs); let combined_hint = Self::combine_hints_internal(hints, coeffs); let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); - let proof = Self::prove( + let proof = self.prove( setup, &joint_poly, opening_point, @@ -204,10 +229,14 @@ impl CommitmentScheme for DoryCommitmentScheme { transcript, &joint_commitment, ); - vec![proof] + DoryBatchedProof { + proof, + layout: DoryGlobals::get_layout(), + } } fn batch_verify( + &self, proof: &Self::BatchedProof, setup: &Self::VerifierSetup, transcript: &mut ProofTranscript, @@ -218,8 +247,8 @@ impl CommitmentScheme for DoryCommitmentScheme { ) -> Result<(), ProofVerifyError> { let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); let joint_claim: ark_bn254::Fr = coeffs.iter().zip(claims).map(|(c, v)| *c * *v).sum(); - Self::verify( - &proof[0], + self.verify( + &proof.proof, setup, transcript, opening_point, @@ -291,7 +320,7 @@ impl StreamingCommitmentScheme for DoryCommitmentScheme { type ChunkState = Vec; // Tier 1 commitment chunks #[tracing::instrument(skip_all, name = "DoryCommitmentScheme::compute_tier1_commitment")] - fn process_chunk(setup: &Self::ProverSetup, chunk: &[T]) -> Self::ChunkState { + fn process_chunk(&self, setup: &Self::ProverSetup, chunk: &[T]) -> Self::ChunkState { debug_assert_eq!(chunk.len(), DoryGlobals::get_num_columns()); let row_len = DoryGlobals::get_num_columns(); @@ -313,6 +342,7 @@ impl StreamingCommitmentScheme for DoryCommitmentScheme { name = "DoryCommitmentScheme::compute_tier1_commitment_onehot" )] fn process_chunk_onehot( + &self, setup: &Self::ProverSetup, onehot_k: usize, chunk: &[Option], @@ -348,6 +378,7 @@ impl StreamingCommitmentScheme for DoryCommitmentScheme { #[tracing::instrument(skip_all, name = "DoryCommitmentScheme::compute_tier2_commitment")] fn aggregate_chunks( + &self, setup: &Self::ProverSetup, onehot_k: Option, chunks: &[Self::ChunkState], diff --git a/jolt-core/src/poly/commitment/dory/mod.rs b/jolt-core/src/poly/commitment/dory/mod.rs index a9b785bbb6..11f7f9a58a 100644 --- a/jolt-core/src/poly/commitment/dory/mod.rs +++ b/jolt-core/src/poly/commitment/dory/mod.rs @@ -11,7 +11,7 @@ mod wrappers; #[cfg(test)] mod tests; -pub use commitment_scheme::DoryCommitmentScheme; +pub use commitment_scheme::{DoryBatchedProof, DoryCommitmentScheme}; pub use dory_globals::{DoryContext, DoryGlobals, DoryLayout}; pub use jolt_dory_routines::{JoltG1Routines, JoltG2Routines}; pub use wrappers::{ diff --git a/jolt-core/src/poly/commitment/dory/tests.rs b/jolt-core/src/poly/commitment/dory/tests.rs index aa8c151811..d11289b20b 100644 --- a/jolt-core/src/poly/commitment/dory/tests.rs +++ b/jolt-core/src/poly/commitment/dory/tests.rs @@ -27,7 +27,7 @@ mod tests { .map(|_| ::Challenge::random(&mut rng)) .collect(); - let (commitment, row_commitments) = DoryCommitmentScheme::commit(&poly, prover_setup); + let (commitment, row_commitments) = DoryCommitmentScheme::default().commit(&poly, prover_setup); let evaluation = as PolynomialEvaluation>::evaluate( &poly, @@ -35,7 +35,7 @@ mod tests { ); let mut prove_transcript = Blake2bTranscript::new(b"dory_test"); - let proof = DoryCommitmentScheme::prove( + let proof = DoryCommitmentScheme::default().prove( prover_setup, &poly, &opening_point, @@ -45,7 +45,7 @@ mod tests { ); let mut verify_transcript = Blake2bTranscript::new(b"dory_test"); - let verification_result = DoryCommitmentScheme::verify( + let verification_result = DoryCommitmentScheme::default().verify( &proof, verifier_setup, &mut verify_transcript, @@ -256,13 +256,13 @@ mod tests { let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup); - let (commitment, row_commitments) = DoryCommitmentScheme::commit(&poly, &prover_setup); + let (commitment, row_commitments) = DoryCommitmentScheme::default().commit(&poly, &prover_setup); let mut prove_transcript = Blake2bTranscript::new(DoryCommitmentScheme::protocol_name()); let correct_evaluation = poly.evaluate(&opening_point); - let proof = DoryCommitmentScheme::prove( + let proof = DoryCommitmentScheme::default().prove( &prover_setup, &poly, &opening_point, @@ -277,7 +277,7 @@ mod tests { let mut verify_transcript = Blake2bTranscript::new(DoryCommitmentScheme::protocol_name()); - let result = DoryCommitmentScheme::verify( + let result = DoryCommitmentScheme::default().verify( &proof, &verifier_setup, &mut verify_transcript, @@ -300,7 +300,7 @@ mod tests { let mut verify_transcript = Blake2bTranscript::new(DoryCommitmentScheme::protocol_name()); - let result = DoryCommitmentScheme::verify( + let result = DoryCommitmentScheme::default().verify( &proof, &verifier_setup, &mut verify_transcript, @@ -321,11 +321,11 @@ mod tests { let wrong_coeffs: Vec = (0..num_coeffs).map(|_| Fr::rand(&mut rng)).collect(); let wrong_poly = MultilinearPolynomial::LargeScalars(DensePolynomial::new(wrong_coeffs)); - let (wrong_commitment, _) = DoryCommitmentScheme::commit(&wrong_poly, &prover_setup); + let (wrong_commitment, _) = DoryCommitmentScheme::default().commit(&wrong_poly, &prover_setup); let mut verify_transcript = Blake2bTranscript::new(DoryCommitmentScheme::protocol_name()); - let result = DoryCommitmentScheme::verify( + let result = DoryCommitmentScheme::default().verify( &proof, &verifier_setup, &mut verify_transcript, @@ -343,7 +343,7 @@ mod tests { // Test 4: Use wrong domain in transcript { let mut verify_transcript = Blake2bTranscript::new(b"wrong_domain"); - let result = DoryCommitmentScheme::verify( + let result = DoryCommitmentScheme::default().verify( &proof, &verifier_setup, &mut verify_transcript, @@ -362,7 +362,7 @@ mod tests { { let mut verify_transcript = Blake2bTranscript::new(DoryCommitmentScheme::protocol_name()); - let result = DoryCommitmentScheme::verify( + let result = DoryCommitmentScheme::default().verify( &proof, &verifier_setup, &mut verify_transcript, @@ -414,7 +414,7 @@ mod tests { let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup); - let (commitment, row_commitments) = DoryCommitmentScheme::commit(&poly, &prover_setup); + let (commitment, row_commitments) = DoryCommitmentScheme::default().commit(&poly, &prover_setup); let evaluation = as PolynomialEvaluation>::evaluate( &poly, @@ -422,7 +422,7 @@ mod tests { ); let mut prove_transcript = Blake2bTranscript::new(b"dory_test"); - let proof = DoryCommitmentScheme::prove( + let proof = DoryCommitmentScheme::default().prove( &prover_setup, &poly, &opening_point, @@ -432,7 +432,7 @@ mod tests { ); let mut verify_transcript = Blake2bTranscript::new(b"dory_test"); - let verification_result = DoryCommitmentScheme::verify( + let verification_result = DoryCommitmentScheme::default().verify( &proof, &verifier_setup, &mut verify_transcript, @@ -474,7 +474,7 @@ mod tests { // Step 2: Commit to each polynomial let commitments_and_hints: Vec<_> = polys .iter() - .map(|poly| DoryCommitmentScheme::commit(poly, &prover_setup)) + .map(|poly| DoryCommitmentScheme::default().commit(poly, &prover_setup)) .collect(); let commitments: Vec<_> = commitments_and_hints.iter().map(|(c, _)| *c).collect(); @@ -503,7 +503,7 @@ mod tests { let combined_poly = MultilinearPolynomial::from(combined_poly.Z); let mut prove_transcript = Blake2bTranscript::new(b"dory_homomorphic_test"); - let proof = DoryCommitmentScheme::prove( + let proof = DoryCommitmentScheme::default().prove( &prover_setup, &combined_poly, &opening_point, @@ -513,7 +513,7 @@ mod tests { ); let mut verify_transcript = Blake2bTranscript::new(b"dory_homomorphic_test"); - let result = DoryCommitmentScheme::verify( + let result = DoryCommitmentScheme::default().verify( &proof, &verifier_setup, &mut verify_transcript, @@ -551,7 +551,7 @@ mod tests { let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup); - let commitments_and_hints = DoryCommitmentScheme::batch_commit(&polys, &prover_setup); + let commitments_and_hints = DoryCommitmentScheme::default().batch_commit(&polys, &prover_setup); let commitments: Vec<_> = commitments_and_hints.iter().map(|(c, _)| *c).collect(); let hints: Vec<_> = commitments_and_hints.into_iter().map(|(_, h)| h).collect(); @@ -578,7 +578,7 @@ mod tests { let combined_poly = MultilinearPolynomial::from(combined_poly.Z); let (direct_commitment, direct_hint) = - DoryCommitmentScheme::commit(&combined_poly, &prover_setup); + DoryCommitmentScheme::default().commit(&combined_poly, &prover_setup); assert_eq!( combined_commitment, direct_commitment, @@ -586,7 +586,7 @@ mod tests { ); let mut prove_transcript = Blake2bTranscript::new(b"dory_batch_commit_e2e_test"); - let proof = DoryCommitmentScheme::prove( + let proof = DoryCommitmentScheme::default().prove( &prover_setup, &combined_poly, &opening_point, @@ -596,7 +596,7 @@ mod tests { ); let mut verify_transcript = Blake2bTranscript::new(b"dory_batch_commit_e2e_test"); - let result = DoryCommitmentScheme::verify( + let result = DoryCommitmentScheme::default().verify( &proof, &verifier_setup, &mut verify_transcript, @@ -611,7 +611,7 @@ mod tests { ); let mut prove_transcript2 = Blake2bTranscript::new(b"dory_batch_commit_e2e_test"); - let proof2 = DoryCommitmentScheme::prove( + let proof2 = DoryCommitmentScheme::default().prove( &prover_setup, &combined_poly, &opening_point, @@ -621,7 +621,7 @@ mod tests { ); let mut verify_transcript2 = Blake2bTranscript::new(b"dory_batch_commit_e2e_test"); - let result2 = DoryCommitmentScheme::verify( + let result2 = DoryCommitmentScheme::default().verify( &proof2, &verifier_setup, &mut verify_transcript2, @@ -724,11 +724,11 @@ mod tests { DoryGlobals::set_layout(DoryLayout::CycleMajor); let poly1 = MultilinearPolynomial::LargeScalars(DensePolynomial::new(coeffs.clone())); - let (commitment_cycle_major, _) = DoryCommitmentScheme::commit(&poly1, &prover_setup); + let (commitment_cycle_major, _) = DoryCommitmentScheme::default().commit(&poly1, &prover_setup); DoryGlobals::set_layout(DoryLayout::AddressMajor); let poly2 = MultilinearPolynomial::LargeScalars(DensePolynomial::new(coeffs)); - let (commitment_addr_major, _) = DoryCommitmentScheme::commit(&poly2, &prover_setup); + let (commitment_addr_major, _) = DoryCommitmentScheme::default().commit(&poly2, &prover_setup); assert_eq!( commitment_cycle_major, commitment_addr_major, @@ -807,7 +807,7 @@ mod tests { let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup); - let (commitment, row_commitments) = DoryCommitmentScheme::commit(&poly, &prover_setup); + let (commitment, row_commitments) = DoryCommitmentScheme::default().commit(&poly, &prover_setup); let evaluation = as PolynomialEvaluation>::evaluate( &poly, @@ -815,7 +815,7 @@ mod tests { ); let mut prove_transcript = Blake2bTranscript::new(b"dory_test"); - let proof = DoryCommitmentScheme::prove( + let proof = DoryCommitmentScheme::default().prove( &prover_setup, &poly, &opening_point, @@ -825,7 +825,7 @@ mod tests { ); let mut verify_transcript = Blake2bTranscript::new(b"dory_test"); - let verification_result = DoryCommitmentScheme::verify( + let verification_result = DoryCommitmentScheme::default().verify( &proof, &verifier_setup, &mut verify_transcript, diff --git a/jolt-core/src/poly/commitment/hyperkzg.rs b/jolt-core/src/poly/commitment/hyperkzg.rs index 1d30c39822..c9ff72166a 100644 --- a/jolt-core/src/poly/commitment/hyperkzg.rs +++ b/jolt-core/src/poly/commitment/hyperkzg.rs @@ -276,6 +276,14 @@ pub struct HyperKZG { _phantom: PhantomData

, } +impl Default for HyperKZG

{ + fn default() -> Self { + Self { + _phantom: PhantomData, + } + } +} + impl HyperKZG

where

::ScalarField: JoltField, @@ -416,6 +424,7 @@ where

::ScalarField: JoltField, { type Field = P::ScalarField; + type Config = (); type ProverSetup = HyperKZGProverKey

; type VerifierSetup = HyperKZGVerifierKey

; @@ -440,8 +449,17 @@ where } } + fn from_proof(_proof: &Self::BatchedProof) -> Self { + Self::default() + } + + fn config(&self) -> &() { + &() + } + #[tracing::instrument(skip_all, name = "HyperKZG::commit")] fn commit( + &self, poly: &MultilinearPolynomial, setup: &Self::ProverSetup, ) -> (Self::Commitment, Self::OpeningProofHint) { @@ -458,6 +476,7 @@ where #[tracing::instrument(skip_all, name = "HyperKZG::batch_commit")] fn batch_commit( + &self, polys: &[U], gens: &Self::ProverSetup, ) -> Vec<(Self::Commitment, Self::OpeningProofHint)> @@ -472,6 +491,7 @@ where } fn prove( + &self, setup: &Self::ProverSetup, poly: &MultilinearPolynomial, opening_point: &[::Challenge], @@ -484,6 +504,7 @@ where } fn verify( + &self, proof: &Self::Proof, setup: &Self::VerifierSetup, transcript: &mut ProofTranscript, @@ -495,6 +516,7 @@ where } fn batch_prove>( + &self, setup: &Self::ProverSetup, poly_source: &S, _hints: Vec, @@ -506,7 +528,7 @@ where ) -> Self::BatchedProof { let joint_poly = poly_source.build_joint_polynomial(coeffs); let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); - Self::prove( + self.prove( setup, &joint_poly, opening_point, @@ -517,6 +539,7 @@ where } fn batch_verify( + &self, proof: &Self::BatchedProof, setup: &Self::VerifierSetup, transcript: &mut ProofTranscript, @@ -565,10 +588,15 @@ where { type ChunkState = (); - fn process_chunk(_setup: &Self::ProverSetup, _chunk: &[T]) -> Self::ChunkState { + fn process_chunk( + &self, + _setup: &Self::ProverSetup, + _chunk: &[T], + ) -> Self::ChunkState { } fn process_chunk_onehot( + &self, _setup: &Self::ProverSetup, _onehot_k: usize, _chunk: &[Option], @@ -576,6 +604,7 @@ where } fn aggregate_chunks( + &self, _setup: &Self::ProverSetup, _onehot_k: Option, _tier1_commitments: &[Self::ChunkState], diff --git a/jolt-core/src/poly/commitment/mock.rs b/jolt-core/src/poly/commitment/mock.rs index e14e8425cd..a44d14dc61 100644 --- a/jolt-core/src/poly/commitment/mock.rs +++ b/jolt-core/src/poly/commitment/mock.rs @@ -18,6 +18,14 @@ pub struct MockCommitScheme { _marker: PhantomData, } +impl Default for MockCommitScheme { + fn default() -> Self { + Self { + _marker: PhantomData, + } + } +} + #[derive(Default, Debug, PartialEq, Clone, CanonicalDeserialize, CanonicalSerialize)] pub struct MockCommitment { _field: PhantomData, @@ -33,6 +41,7 @@ where F: JoltField, { type Field = F; + type Config = (); type ProverSetup = (); type VerifierSetup = (); type Commitment = MockCommitment; @@ -44,7 +53,16 @@ where fn setup_verifier(_setup: &Self::ProverSetup) -> Self::VerifierSetup {} + fn from_proof(_proof: &Self::BatchedProof) -> Self { + Self::default() + } + + fn config(&self) -> &() { + &() + } + fn commit( + &self, _poly: &MultilinearPolynomial, _setup: &Self::ProverSetup, ) -> (Self::Commitment, Self::OpeningProofHint) { @@ -52,19 +70,21 @@ where } fn batch_commit

( + &self, polys: &[P], gens: &Self::ProverSetup, ) -> Vec<(Self::Commitment, Self::OpeningProofHint)> where - P: Borrow>, + P: Borrow> + Sync, { polys .iter() - .map(|poly| (Self::commit(poly.borrow(), gens).0, ())) + .map(|poly| (self.commit(poly.borrow(), gens).0, ())) .collect() } fn prove( + &self, _setup: &Self::ProverSetup, _poly: &MultilinearPolynomial, opening_point: &[::Challenge], @@ -78,6 +98,7 @@ where } fn verify( + &self, proof: &Self::Proof, _setup: &Self::VerifierSetup, _transcript: &mut ProofTranscript, @@ -90,6 +111,7 @@ where } fn batch_prove>( + &self, _setup: &Self::ProverSetup, _poly_source: &S, _hints: Vec, @@ -105,6 +127,7 @@ where } fn batch_verify( + &self, proof: &Self::BatchedProof, _setup: &Self::VerifierSetup, _transcript: &mut ProofTranscript, @@ -128,10 +151,15 @@ where { type ChunkState = (); - fn process_chunk(_setup: &Self::ProverSetup, _chunk: &[T]) -> Self::ChunkState { + fn process_chunk( + &self, + _setup: &Self::ProverSetup, + _chunk: &[T], + ) -> Self::ChunkState { } fn process_chunk_onehot( + &self, _setup: &Self::ProverSetup, _onehot_k: usize, _chunk: &[Option], @@ -139,6 +167,7 @@ where } fn aggregate_chunks( + &self, _setup: &Self::ProverSetup, _onehot_k: Option, _tier1_commitments: &[Self::ChunkState], diff --git a/jolt-core/src/zkvm/prover.rs b/jolt-core/src/zkvm/prover.rs index 72c3b7ffdc..64d425cfac 100644 --- a/jolt-core/src/zkvm/prover.rs +++ b/jolt-core/src/zkvm/prover.rs @@ -572,7 +572,7 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip &trace, Some(&self.one_hot_params), ); - PCS::commit(&witness, &self.preprocessing.generators) + PCS::default().commit(&witness, &self.preprocessing.generators) }) .unzip(); @@ -632,7 +632,7 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip .zip(&polys) .map(|(tier1_commitments, poly)| { let onehot_k = poly.get_onehot_k(&self.one_hot_params); - PCS::aggregate_chunks( + PCS::default().aggregate_chunks( &self.preprocessing.generators, onehot_k, &tier1_commitments, @@ -677,7 +677,7 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip let _guard = DoryGlobals::initialize_context(1, advice_len, DoryContext::UntrustedAdvice, None); let _ctx = DoryGlobals::with_context(DoryContext::UntrustedAdvice); - let (commitment, hint) = PCS::commit(&poly, &self.preprocessing.generators); + let (commitment, hint) = PCS::default().commit(&poly, &self.preprocessing.generators); self.transcript .append_serializable(b"untrusted_advice", &commitment); @@ -1482,7 +1482,7 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip poly_ids, }; - PCS::batch_prove( + PCS::default().batch_prove( &self.preprocessing.generators, &poly_source, hints, @@ -1629,7 +1629,7 @@ mod tests { DoryGlobals::initialize_context(1, advice_len, DoryContext::TrustedAdvice, None); let (commitment, hint) = { let _ctx = DoryGlobals::with_context(DoryContext::TrustedAdvice); - DoryCommitmentScheme::commit(&poly, &preprocessing.generators) + DoryCommitmentScheme::default().commit(&poly, &preprocessing.generators) }; (commitment, hint) } diff --git a/jolt-core/src/zkvm/verifier.rs b/jolt-core/src/zkvm/verifier.rs index 18c1d60895..895556c56f 100644 --- a/jolt-core/src/zkvm/verifier.rs +++ b/jolt-core/src/zkvm/verifier.rs @@ -672,7 +672,7 @@ impl<'a, F: JoltField, PCS: CommitmentScheme, ProofTranscript: Transc .collect(); let commitment_ref_slice: Vec<&PCS::Commitment> = commitment_refs.iter().collect(); - PCS::batch_verify( + PCS::default().batch_verify( &self.proof.joint_opening_proof, &self.preprocessing.generators, &mut self.transcript, diff --git a/jolt-core/src/zkvm/witness.rs b/jolt-core/src/zkvm/witness.rs index efcef73652..d017cd978e 100644 --- a/jolt-core/src/zkvm/witness.rs +++ b/jolt-core/src/zkvm/witness.rs @@ -71,6 +71,7 @@ impl CommittedPolynomial { F: JoltField, PCS: StreamingCommitmentScheme, { + let pcs = PCS::default(); match self { CommittedPolynomial::RdInc => { let row: Vec = row_cycles @@ -80,7 +81,7 @@ impl CommittedPolynomial { post_value as i128 - pre_value as i128 }) .collect(); - PCS::process_chunk(setup, &row) + pcs.process_chunk(setup, &row) } CommittedPolynomial::RamInc => { let row: Vec = row_cycles @@ -92,7 +93,7 @@ impl CommittedPolynomial { _ => 0, }) .collect(); - PCS::process_chunk(setup, &row) + pcs.process_chunk(setup, &row) } CommittedPolynomial::InstructionRa(idx) => { let row: Vec> = row_cycles @@ -102,7 +103,7 @@ impl CommittedPolynomial { Some(one_hot_params.lookup_index_chunk(lookup_index, *idx) as usize) }) .collect(); - PCS::process_chunk_onehot(setup, one_hot_params.k_chunk, &row) + pcs.process_chunk_onehot(setup, one_hot_params.k_chunk, &row) } CommittedPolynomial::BytecodeRa(idx) => { let row: Vec> = row_cycles @@ -112,7 +113,7 @@ impl CommittedPolynomial { Some(one_hot_params.bytecode_pc_chunk(pc, *idx) as usize) }) .collect(); - PCS::process_chunk_onehot(setup, one_hot_params.k_chunk, &row) + pcs.process_chunk_onehot(setup, one_hot_params.k_chunk, &row) } CommittedPolynomial::RamRa(idx) => { let row: Vec> = row_cycles @@ -125,7 +126,7 @@ impl CommittedPolynomial { .map(|address| one_hot_params.ram_address_chunk(address, *idx) as usize) }) .collect(); - PCS::process_chunk_onehot(setup, one_hot_params.k_chunk, &row) + pcs.process_chunk_onehot(setup, one_hot_params.k_chunk, &row) } CommittedPolynomial::TrustedAdvice | CommittedPolynomial::UntrustedAdvice => { panic!("Advice polynomials should not use streaming witness generation") diff --git a/jolt-sdk/macros/src/lib.rs b/jolt-sdk/macros/src/lib.rs index be4b4cce06..d4a1aa1db4 100644 --- a/jolt-sdk/macros/src/lib.rs +++ b/jolt-sdk/macros/src/lib.rs @@ -607,7 +607,7 @@ impl MacroBuilder { let _ctx = jolt::DoryGlobals::with_context(jolt::DoryContext::TrustedAdvice); let poly = MultilinearPolynomial::::from(trusted_advice_vec); - let (commitment, hint) = jolt::PCS::commit(&poly, &preprocessing.generators); + let (commitment, hint) = jolt::PCS::default().commit(&poly, &preprocessing.generators); (Some(commitment), Some(hint)) } From 901194b80f2237f1ff6e9e201f9121370795130c Mon Sep 17 00:00:00 2001 From: Quang Dao Date: Sat, 28 Feb 2026 14:50:00 -0800 Subject: [PATCH 3/5] refactor: replace DoryGlobals reads in commitment_scheme.rs with local computation Extract balanced_sigma_nu as a standalone function. Replace DoryGlobals::get_num_columns/get_max_num_rows reads in commit, prove, process_chunk, process_chunk_onehot, aggregate_chunks, and combine_hints_internal with values derived from polynomial/chunk sizes. Thread layout through reorder_opening_point_for_layout via self.layout. Made-with: Cursor --- .../poly/commitment/dory/commitment_scheme.rs | 63 +++++++++++-------- jolt-core/src/poly/commitment/dory/mod.rs | 2 +- jolt-core/src/poly/commitment/dory/tests.rs | 8 ++- 3 files changed, 43 insertions(+), 30 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs index 8e2c132dbb..2ec7e2cee0 100644 --- a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs +++ b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs @@ -39,6 +39,16 @@ pub struct DoryBatchedProof { pub layout: DoryLayout, } +/// Split `total_vars` into balanced `(sigma, nu)` where sigma = ceil(total_vars / 2) +/// and nu = total_vars - sigma. sigma is the number of column variables, +/// nu is the number of row variables. +#[inline] +pub fn balanced_sigma_nu(total_vars: usize) -> (usize, usize) { + let sigma = total_vars.div_ceil(2); + let nu = total_vars - sigma; + (sigma, nu) +} + impl CommitmentScheme for DoryCommitmentScheme { type Field = ark_bn254::Fr; type Config = DoryLayout; @@ -90,10 +100,8 @@ impl CommitmentScheme for DoryCommitmentScheme { ) -> (Self::Commitment, Self::OpeningProofHint) { let _span = trace_span!("DoryCommitmentScheme::commit").entered(); - let num_cols = DoryGlobals::get_num_columns(); - let num_rows = DoryGlobals::get_max_num_rows(); - let sigma = num_cols.log_2(); - let nu = num_rows.log_2(); + let total_vars = poly.len().log_2(); + let (sigma, nu) = balanced_sigma_nu(total_vars); let (tier_2, row_commitments) = as Polynomial< ArkFr, @@ -137,17 +145,16 @@ impl CommitmentScheme for DoryCommitmentScheme { row_commitments }); - let num_cols = DoryGlobals::get_num_columns(); - let num_rows = DoryGlobals::get_max_num_rows(); - let sigma = num_cols.log_2(); - let nu = num_rows.log_2(); + let total_vars = poly.len().log_2(); + let (sigma, nu) = balanced_sigma_nu(total_vars); - let reordered_point = reorder_opening_point_for_layout::(opening_point); + let reordered_point = + reorder_opening_point_for_layout::(self.layout, opening_point); // Dory uses the opposite endian-ness as Jolt let ark_point: Vec = reordered_point .iter() - .rev() // Reverse the order for Dory + .rev() .map(|p| { let f_val: ark_bn254::Fr = (*p).into(); jolt_to_ark(&f_val) @@ -179,7 +186,8 @@ impl CommitmentScheme for DoryCommitmentScheme { ) -> Result<(), ProofVerifyError> { let _span = trace_span!("DoryCommitmentScheme::verify").entered(); - let reordered_point = reorder_opening_point_for_layout::(opening_point); + let reordered_point = + reorder_opening_point_for_layout::(self.layout, opening_point); // Dory uses the opposite endian-ness as Jolt let ark_point: Vec = reordered_point @@ -219,7 +227,10 @@ impl CommitmentScheme for DoryCommitmentScheme { transcript: &mut ProofTranscript, ) -> Self::BatchedProof { let joint_poly = poly_source.build_joint_polynomial(coeffs); - let combined_hint = Self::combine_hints_internal(hints, coeffs); + let total_vars = joint_poly.len().log_2(); + let (_sigma, nu) = balanced_sigma_nu(total_vars); + let max_num_rows = 1 << nu; + let combined_hint = Self::combine_hints_internal(hints, coeffs, max_num_rows); let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); let proof = self.prove( setup, @@ -231,7 +242,7 @@ impl CommitmentScheme for DoryCommitmentScheme { ); DoryBatchedProof { proof, - layout: DoryGlobals::get_layout(), + layout: self.layout, } } @@ -268,12 +279,11 @@ impl DoryCommitmentScheme { pub(crate) fn combine_hints_internal( hints: Vec>, coeffs: &[ark_bn254::Fr], + max_num_rows: usize, ) -> Vec { - let num_rows = DoryGlobals::get_max_num_rows(); - - let mut rlc_hint = vec![ArkG1(G1Projective::zero()); num_rows]; + let mut rlc_hint = vec![ArkG1(G1Projective::zero()); max_num_rows]; for (coeff, mut hint) in coeffs.iter().zip(hints.into_iter()) { - hint.resize(num_rows, ArkG1(G1Projective::zero())); + hint.resize(max_num_rows, ArkG1(G1Projective::zero())); // SAFETY: ArkG1 is repr(transparent) over G1Projective let row_commitments: &mut [G1Projective] = unsafe { @@ -317,13 +327,11 @@ impl DoryCommitmentScheme { } impl StreamingCommitmentScheme for DoryCommitmentScheme { - type ChunkState = Vec; // Tier 1 commitment chunks + type ChunkState = Vec; #[tracing::instrument(skip_all, name = "DoryCommitmentScheme::compute_tier1_commitment")] fn process_chunk(&self, setup: &Self::ProverSetup, chunk: &[T]) -> Self::ChunkState { - debug_assert_eq!(chunk.len(), DoryGlobals::get_num_columns()); - - let row_len = DoryGlobals::get_num_columns(); + let row_len = chunk.len(); let g1_slice = unsafe { std::slice::from_raw_parts(setup.g1_vec.as_ptr(), setup.g1_vec.len()) }; @@ -348,8 +356,8 @@ impl StreamingCommitmentScheme for DoryCommitmentScheme { chunk: &[Option], ) -> Self::ChunkState { let K = onehot_k; + let row_len = chunk.len(); - let row_len = DoryGlobals::get_num_columns(); let g1_slice = unsafe { std::slice::from_raw_parts(setup.g1_vec.as_ptr(), setup.g1_vec.len()) }; @@ -384,10 +392,8 @@ impl StreamingCommitmentScheme for DoryCommitmentScheme { chunks: &[Self::ChunkState], ) -> (Self::Commitment, Self::OpeningProofHint) { if let Some(K) = onehot_k { - let row_len = DoryGlobals::get_num_columns(); - let T = DoryGlobals::get_T(); - let rows_per_k = T / row_len; - let num_rows = K * T / row_len; + let rows_per_k = chunks.len(); + let num_rows = K * rows_per_k; let mut row_commitments = vec![ArkG1(G1Projective::zero()); num_rows]; for (chunk_index, commitments) in chunks.iter().enumerate() { @@ -424,9 +430,12 @@ impl StreamingCommitmentScheme for DoryCommitmentScheme { /// /// For CycleMajor layout, returns the point unchanged. fn reorder_opening_point_for_layout( + layout: DoryLayout, opening_point: &[F::Challenge], ) -> Vec { - if DoryGlobals::get_layout() == DoryLayout::AddressMajor { + if layout == DoryLayout::AddressMajor { + // For AddressMajor, T is needed to split the point. + // Fall back to DoryGlobals for now; will be eliminated in Phase 2d. let log_T = DoryGlobals::get_T().log_2(); let log_K = opening_point.len().saturating_sub(log_T); let (r_address, r_cycle) = opening_point.split_at(log_K); diff --git a/jolt-core/src/poly/commitment/dory/mod.rs b/jolt-core/src/poly/commitment/dory/mod.rs index 11f7f9a58a..a8956eadc0 100644 --- a/jolt-core/src/poly/commitment/dory/mod.rs +++ b/jolt-core/src/poly/commitment/dory/mod.rs @@ -11,7 +11,7 @@ mod wrappers; #[cfg(test)] mod tests; -pub use commitment_scheme::{DoryBatchedProof, DoryCommitmentScheme}; +pub use commitment_scheme::{balanced_sigma_nu, DoryBatchedProof, DoryCommitmentScheme}; pub use dory_globals::{DoryContext, DoryGlobals, DoryLayout}; pub use jolt_dory_routines::{JoltG1Routines, JoltG2Routines}; pub use wrappers::{ diff --git a/jolt-core/src/poly/commitment/dory/tests.rs b/jolt-core/src/poly/commitment/dory/tests.rs index d11289b20b..ddf74f4875 100644 --- a/jolt-core/src/poly/commitment/dory/tests.rs +++ b/jolt-core/src/poly/commitment/dory/tests.rs @@ -486,7 +486,9 @@ mod tests { let commitment_refs: Vec<&ArkGT> = commitments.iter().collect(); let combined_commitment = DoryCommitmentScheme::combine_commitments_internal(&commitment_refs, &coeffs); - let combined_hint = DoryCommitmentScheme::combine_hints_internal(hints, &coeffs); + let (_, nu) = balanced_sigma_nu(num_vars); + let combined_hint = + DoryCommitmentScheme::combine_hints_internal(hints, &coeffs, 1 << nu); let opening_point: Vec<::Challenge> = (0..num_vars) .map(|_| ::Challenge::random(&mut rng)) @@ -561,7 +563,9 @@ mod tests { let commitment_refs: Vec<&ArkGT> = commitments.iter().collect(); let combined_commitment = DoryCommitmentScheme::combine_commitments_internal(&commitment_refs, &coeffs); - let combined_hint = DoryCommitmentScheme::combine_hints_internal(hints, &coeffs); + let (_, nu) = balanced_sigma_nu(num_vars); + let combined_hint = + DoryCommitmentScheme::combine_hints_internal(hints, &coeffs, 1 << nu); let opening_point: Vec<::Challenge> = (0..num_vars) .map(|_| ::Challenge::random(&mut rng)) From a8e5f54ab1792ff4fb380d467df14a702df55b08 Mon Sep 17 00:00:00 2001 From: Quang Dao Date: Sat, 28 Feb 2026 16:33:02 -0800 Subject: [PATCH 4/5] fix: handle OneHot/RLC variants in MultilinearPolynomial::len() and lint cleanup Add missing match arms for OneHot and RLC variants in len()/original_len() which were panicking at runtime. Fix test_dory_one_hot_address_major to use AddressMajor layout on the DoryCommitmentScheme instance. Hoist FQ paths, remove commented-out code, move mid-function imports to top. Made-with: Cursor --- jolt-core/benches/commit.rs | 4 - .../poly/commitment/dory/commitment_scheme.rs | 8 +- jolt-core/src/poly/commitment/dory/tests.rs | 37 +++--- jolt-core/src/poly/commitment/hyperkzg.rs | 108 ------------------ jolt-core/src/poly/multilinear_polynomial.rs | 6 +- jolt-core/src/poly/rlc_polynomial.rs | 5 + jolt-core/src/zkvm/proof_serialization.rs | 2 +- jolt-core/src/zkvm/prover.rs | 4 +- jolt-core/src/zkvm/verifier.rs | 25 ++-- 9 files changed, 54 insertions(+), 145 deletions(-) diff --git a/jolt-core/benches/commit.rs b/jolt-core/benches/commit.rs index 0e45bac140..cf5ca05910 100644 --- a/jolt-core/benches/commit.rs +++ b/jolt-core/benches/commit.rs @@ -6,7 +6,6 @@ use jolt_core::utils::math::Math; use rand::Rng; use rand_chacha::ChaCha20Rng; use rand_core::{RngCore, SeedableRng}; -// use rayon::prelude::*; fn benchmark_dory_dense(c: &mut Criterion, name: &str, k: usize, t: usize) { let globals = DoryGlobals::initialize_context(k, t, DoryContext::Main, None); @@ -44,9 +43,6 @@ fn benchmark_dory_one_hot_batch(c: &mut Criterion, name: &str, k: usize, t: usiz b.iter(|| { let _ = globals; DoryCommitmentScheme::default().batch_commit(&polys, &setup); - // polys.par_iter().for_each(|poly| { - // DoryCommitmentScheme::default().commit(&poly, &setup); - // }); }); }); } diff --git a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs index 2ec7e2cee0..7bfc45c67e 100644 --- a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs +++ b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs @@ -6,7 +6,6 @@ use super::wrappers::{ jolt_to_ark, ArkDoryProof, ArkFr, ArkG1, ArkGT, ArkworksProverSetup, ArkworksVerifierSetup, JoltToDoryTranscript, BN254, }; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use crate::{ field::JoltField, poly::commitment::commitment_scheme::{CommitmentScheme, StreamingCommitmentScheme}, @@ -18,6 +17,7 @@ use crate::{ use ark_bn254::{G1Affine, G1Projective}; use ark_ec::CurveGroup; use ark_ff::Zero; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use dory::primitives::{ arithmetic::{Group, PairingCurve}, poly::Polynomial, @@ -330,7 +330,11 @@ impl StreamingCommitmentScheme for DoryCommitmentScheme { type ChunkState = Vec; #[tracing::instrument(skip_all, name = "DoryCommitmentScheme::compute_tier1_commitment")] - fn process_chunk(&self, setup: &Self::ProverSetup, chunk: &[T]) -> Self::ChunkState { + fn process_chunk( + &self, + setup: &Self::ProverSetup, + chunk: &[T], + ) -> Self::ChunkState { let row_len = chunk.len(); let g1_slice = unsafe { std::slice::from_raw_parts(setup.g1_vec.as_ptr(), setup.g1_vec.len()) }; diff --git a/jolt-core/src/poly/commitment/dory/tests.rs b/jolt-core/src/poly/commitment/dory/tests.rs index ddf74f4875..36279d536a 100644 --- a/jolt-core/src/poly/commitment/dory/tests.rs +++ b/jolt-core/src/poly/commitment/dory/tests.rs @@ -27,7 +27,8 @@ mod tests { .map(|_| ::Challenge::random(&mut rng)) .collect(); - let (commitment, row_commitments) = DoryCommitmentScheme::default().commit(&poly, prover_setup); + let (commitment, row_commitments) = + DoryCommitmentScheme::default().commit(&poly, prover_setup); let evaluation = as PolynomialEvaluation>::evaluate( &poly, @@ -256,7 +257,8 @@ mod tests { let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup); - let (commitment, row_commitments) = DoryCommitmentScheme::default().commit(&poly, &prover_setup); + let (commitment, row_commitments) = + DoryCommitmentScheme::default().commit(&poly, &prover_setup); let mut prove_transcript = Blake2bTranscript::new(DoryCommitmentScheme::protocol_name()); @@ -321,7 +323,8 @@ mod tests { let wrong_coeffs: Vec = (0..num_coeffs).map(|_| Fr::rand(&mut rng)).collect(); let wrong_poly = MultilinearPolynomial::LargeScalars(DensePolynomial::new(wrong_coeffs)); - let (wrong_commitment, _) = DoryCommitmentScheme::default().commit(&wrong_poly, &prover_setup); + let (wrong_commitment, _) = + DoryCommitmentScheme::default().commit(&wrong_poly, &prover_setup); let mut verify_transcript = Blake2bTranscript::new(DoryCommitmentScheme::protocol_name()); @@ -414,7 +417,8 @@ mod tests { let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup); - let (commitment, row_commitments) = DoryCommitmentScheme::default().commit(&poly, &prover_setup); + let (commitment, row_commitments) = + DoryCommitmentScheme::default().commit(&poly, &prover_setup); let evaluation = as PolynomialEvaluation>::evaluate( &poly, @@ -487,8 +491,7 @@ mod tests { let combined_commitment = DoryCommitmentScheme::combine_commitments_internal(&commitment_refs, &coeffs); let (_, nu) = balanced_sigma_nu(num_vars); - let combined_hint = - DoryCommitmentScheme::combine_hints_internal(hints, &coeffs, 1 << nu); + let combined_hint = DoryCommitmentScheme::combine_hints_internal(hints, &coeffs, 1 << nu); let opening_point: Vec<::Challenge> = (0..num_vars) .map(|_| ::Challenge::random(&mut rng)) @@ -553,7 +556,8 @@ mod tests { let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup); - let commitments_and_hints = DoryCommitmentScheme::default().batch_commit(&polys, &prover_setup); + let commitments_and_hints = + DoryCommitmentScheme::default().batch_commit(&polys, &prover_setup); let commitments: Vec<_> = commitments_and_hints.iter().map(|(c, _)| *c).collect(); let hints: Vec<_> = commitments_and_hints.into_iter().map(|(_, h)| h).collect(); @@ -564,8 +568,7 @@ mod tests { let combined_commitment = DoryCommitmentScheme::combine_commitments_internal(&commitment_refs, &coeffs); let (_, nu) = balanced_sigma_nu(num_vars); - let combined_hint = - DoryCommitmentScheme::combine_hints_internal(hints, &coeffs, 1 << nu); + let combined_hint = DoryCommitmentScheme::combine_hints_internal(hints, &coeffs, 1 << nu); let opening_point: Vec<::Challenge> = (0..num_vars) .map(|_| ::Challenge::random(&mut rng)) @@ -728,11 +731,13 @@ mod tests { DoryGlobals::set_layout(DoryLayout::CycleMajor); let poly1 = MultilinearPolynomial::LargeScalars(DensePolynomial::new(coeffs.clone())); - let (commitment_cycle_major, _) = DoryCommitmentScheme::default().commit(&poly1, &prover_setup); + let (commitment_cycle_major, _) = + DoryCommitmentScheme::default().commit(&poly1, &prover_setup); DoryGlobals::set_layout(DoryLayout::AddressMajor); let poly2 = MultilinearPolynomial::LargeScalars(DensePolynomial::new(coeffs)); - let (commitment_addr_major, _) = DoryCommitmentScheme::default().commit(&poly2, &prover_setup); + let (commitment_addr_major, _) = + DoryCommitmentScheme::default().commit(&poly2, &prover_setup); assert_eq!( commitment_cycle_major, commitment_addr_major, @@ -808,10 +813,14 @@ mod tests { .map(|_| ::Challenge::random(&mut rng)) .collect(); + let pcs = DoryCommitmentScheme { + layout: DoryLayout::AddressMajor, + }; + let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup); - let (commitment, row_commitments) = DoryCommitmentScheme::default().commit(&poly, &prover_setup); + let (commitment, row_commitments) = pcs.commit(&poly, &prover_setup); let evaluation = as PolynomialEvaluation>::evaluate( &poly, @@ -819,7 +828,7 @@ mod tests { ); let mut prove_transcript = Blake2bTranscript::new(b"dory_test"); - let proof = DoryCommitmentScheme::default().prove( + let proof = pcs.prove( &prover_setup, &poly, &opening_point, @@ -829,7 +838,7 @@ mod tests { ); let mut verify_transcript = Blake2bTranscript::new(b"dory_test"); - let verification_result = DoryCommitmentScheme::default().verify( + let verification_result = pcs.verify( &proof, &verifier_setup, &mut verify_transcript, diff --git a/jolt-core/src/poly/commitment/hyperkzg.rs b/jolt-core/src/poly/commitment/hyperkzg.rs index c9ff72166a..bac38417a7 100644 --- a/jolt-core/src/poly/commitment/hyperkzg.rs +++ b/jolt-core/src/poly/commitment/hyperkzg.rs @@ -622,114 +622,6 @@ mod tests { use rand::Rng; use rand_core::SeedableRng; - //#[test] - //fn test_hyperkzg_eval() { - // // Test with poly(X1, X2) = 1 + X1 + X2 + X1*X2 - // let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(0); - // let srs = HyperKZGSRS::setup(&mut rng, 3); - // let (pk, vk): (HyperKZGProverKey, HyperKZGVerifierKey) = srs.trim(3); - // - // // poly is in eval. representation; evaluated at [(0,0), (0,1), (1,0), (1,1)] - // let poly = - // MultilinearPolynomial::from(vec![Fr::from(1), Fr::from(2), Fr::from(2), Fr::from(4)]); - // - // let C = HyperKZG::commit(&pk, &poly).unwrap(); - // - // let test_inner = - // |point: Vec>, eval: Fr| -> Result<(), ProofVerifyError> { - // let mut tr = Blake2bTranscript::new(b"TestEval"); - // let proof = HyperKZG::open(&pk, &poly, &point, &eval, &mut tr).unwrap(); - // let mut tr = Blake2bTranscript::new(b"TestEval"); - // HyperKZG::verify(&vk, &C, &point, &eval, &proof, &mut tr) - // }; - // - // // Call the prover with a (point, eval) pair. - // // The prover does not recompute so it may produce a proof, but it should not verify - // let point = vec![Fr::from(0), Fr::from(0)]; - // let eval = Fr::from(1); - // assert!(test_inner(point, eval).is_ok()); - // - // let point = vec![Fr::from(0), Fr::from(1)]; - // let eval = Fr::from(2); - // assert!(test_inner(point, eval).is_ok()); - // - // let point = vec![Fr::from(1), Fr::from(1)]; - // let eval = Fr::from(4); - // assert!(test_inner(point, eval).is_ok()); - // - // let point = vec![Fr::from(0), Fr::from(2)]; - // let eval = Fr::from(3); - // assert!(test_inner(point, eval).is_ok()); - // - // let point = vec![Fr::from(2), Fr::from(2)]; - // let eval = Fr::from(9); - // assert!(test_inner(point, eval).is_ok()); - // - // // Try a couple incorrect evaluations and expect failure - // let point = vec![Fr::from(2), Fr::from(2)]; - // let eval = Fr::from(50); - // assert!(test_inner(point, eval).is_err()); - // - // let point = vec![Fr::from(0), Fr::from(2)]; - // let eval = Fr::from(4); - // assert!(test_inner(point, eval).is_err()); - //} - - // THIS test does not make sense for MontU128Challenge - //#[test] - //fn test_hyperkzg_small() { - // let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(0); - // - // // poly = [1, 2, 1, 4] - // let poly = - // MultilinearPolynomial::from(vec![Fr::from(1), Fr::from(2), Fr::from(1), Fr::from(4)]); - // - // // point = [4,3] - // let point = vec![Fr::from(4), Fr::from(3)]; - // - // // eval = 28 - // let eval = Fr::from(28); - // - // let srs = HyperKZGSRS::setup(&mut rng, 3); - // let (pk, vk): (HyperKZGProverKey, HyperKZGVerifierKey) = srs.trim(3); - // - // // make a commitment - // let C = HyperKZG::commit(&pk, &poly).unwrap(); - // - // // prove an evaluation - // let mut tr = Blake2bTranscript::new(b"TestEval"); - // let proof = HyperKZG::open(&pk, &poly, &point, &eval, &mut tr).unwrap(); - // let post_c_p = tr.challenge_scalar::(); - // - // // verify the evaluation - // let mut verifier_transcript = Blake2bTranscript::new(b"TestEval"); - // assert!( - // HyperKZG::verify(&vk, &C, &point, &eval, &proof, &mut verifier_transcript,).is_ok() - // ); - // let post_c_v = verifier_transcript.challenge_scalar::(); - // - // // check if the prover transcript and verifier transcript are kept in the same state - // assert_eq!(post_c_p, post_c_v); - // - // let mut proof_bytes = Vec::new(); - // proof.serialize_compressed(&mut proof_bytes).unwrap(); - // assert_eq!(proof_bytes.len(), 368); - // - // // Change the proof and expect verification to fail - // let mut bad_proof = proof.clone(); - // let v1 = bad_proof.v[1].clone(); - // bad_proof.v[0].clone_from(&v1); - // let mut verifier_transcript2 = Blake2bTranscript::new(b"TestEval"); - // assert!(HyperKZG::verify( - // &vk, - // &C, - // &point, - // &eval, - // &bad_proof, - // &mut verifier_transcript2 - // ) - // .is_err()); - //} #[test] fn test_hyperkzg_large() { // test the hyperkzg prover and verifier with random instances (derived from a seed) diff --git a/jolt-core/src/poly/multilinear_polynomial.rs b/jolt-core/src/poly/multilinear_polynomial.rs index ce49b7693b..b0b6a6a73f 100644 --- a/jolt-core/src/poly/multilinear_polynomial.rs +++ b/jolt-core/src/poly/multilinear_polynomial.rs @@ -90,7 +90,8 @@ impl MultilinearPolynomial { MultilinearPolynomial::I128Scalars(poly) => poly.coeffs.len(), MultilinearPolynomial::U128Scalars(poly) => poly.coeffs.len(), MultilinearPolynomial::S128Scalars(poly) => poly.coeffs.len(), - _ => unimplemented!("Unexpected MultilinearPolynomial variant"), + MultilinearPolynomial::OneHot(poly) => poly.K * poly.nonzero_indices.len(), + MultilinearPolynomial::RLC(poly) => poly.len(), } } @@ -123,7 +124,8 @@ impl MultilinearPolynomial { MultilinearPolynomial::I128Scalars(poly) => poly.len(), MultilinearPolynomial::U128Scalars(poly) => poly.len(), MultilinearPolynomial::S128Scalars(poly) => poly.len(), - _ => unimplemented!("Unexpected MultilinearPolynomial variant"), + MultilinearPolynomial::OneHot(poly) => poly.K * poly.nonzero_indices.len(), + MultilinearPolynomial::RLC(poly) => poly.len(), } } diff --git a/jolt-core/src/poly/rlc_polynomial.rs b/jolt-core/src/poly/rlc_polynomial.rs index 47a68c231e..7bf56ccd2e 100644 --- a/jolt-core/src/poly/rlc_polynomial.rs +++ b/jolt-core/src/poly/rlc_polynomial.rs @@ -91,6 +91,11 @@ impl PartialEq for RLCPolynomial { } impl RLCPolynomial { + /// Total number of coefficients in the Dory matrix for this RLC polynomial. + pub fn len(&self) -> usize { + DoryGlobals::get_num_columns() * DoryGlobals::get_max_num_rows() + } + pub fn new() -> Self { Self { dense_rlc: unsafe_allocate_zero_vec(DoryGlobals::get_T()), diff --git a/jolt-core/src/zkvm/proof_serialization.rs b/jolt-core/src/zkvm/proof_serialization.rs index d9a82ec04e..b292c4be12 100644 --- a/jolt-core/src/zkvm/proof_serialization.rs +++ b/jolt-core/src/zkvm/proof_serialization.rs @@ -1,5 +1,6 @@ use std::{ collections::BTreeMap, + fs::File, io::{Read, Write}, }; @@ -498,7 +499,6 @@ pub fn serialize_and_print_size( file_name: &str, item: &impl CanonicalSerialize, ) -> Result<(), SerializationError> { - use std::fs::File; let mut file = File::create(file_name)?; item.serialize_compressed(&mut file)?; let file_size_bytes = file.metadata()?.len(); diff --git a/jolt-core/src/zkvm/prover.rs b/jolt-core/src/zkvm/prover.rs index 64d425cfac..37e162fbe2 100644 --- a/jolt-core/src/zkvm/prover.rs +++ b/jolt-core/src/zkvm/prover.rs @@ -5,7 +5,7 @@ use crate::{ zkvm::{claim_reductions::advice::ReductionPhase, config::OneHotConfig}, }; use std::{ - collections::HashMap, + collections::{BTreeMap, HashMap}, fs::File, io::{Read, Write}, path::Path, @@ -1437,7 +1437,7 @@ impl<'a, F: JoltField, PCS: StreamingCommitmentScheme, ProofTranscrip let gamma_powers: Vec = self.transcript.challenge_scalar_powers(claims.len()); // Accumulate gamma coefficients per unique polynomial (BTreeMap orders by CommittedPolynomial) - let mut rlc_map = std::collections::BTreeMap::new(); + let mut rlc_map = BTreeMap::new(); for (gamma, (poly, claim)) in gamma_powers.iter().zip(polynomial_claims.iter()) { let entry = rlc_map.entry(*poly).or_insert((F::zero(), F::zero())); entry.0 += *gamma; diff --git a/jolt-core/src/zkvm/verifier.rs b/jolt-core/src/zkvm/verifier.rs index 895556c56f..fb6a78f999 100644 --- a/jolt-core/src/zkvm/verifier.rs +++ b/jolt-core/src/zkvm/verifier.rs @@ -1,4 +1,4 @@ -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use std::fs::File; use std::io::{Read, Write}; use std::path::Path; @@ -640,7 +640,7 @@ impl<'a, F: JoltField, PCS: CommitmentScheme, ProofTranscript: Transc let gamma_powers: Vec = self.transcript.challenge_scalar_powers(claims.len()); // Accumulate gamma coefficients per unique polynomial (BTreeMap for deterministic ordering) - let mut rlc_map = std::collections::BTreeMap::new(); + let mut rlc_map = BTreeMap::new(); for (gamma, (poly, claim)) in gamma_powers.iter().zip(polynomial_claims.iter()) { let entry = rlc_map.entry(*poly).or_insert((F::zero(), F::zero())); entry.0 += *gamma; @@ -672,16 +672,17 @@ impl<'a, F: JoltField, PCS: CommitmentScheme, ProofTranscript: Transc .collect(); let commitment_ref_slice: Vec<&PCS::Commitment> = commitment_refs.iter().collect(); - PCS::default().batch_verify( - &self.proof.joint_opening_proof, - &self.preprocessing.generators, - &mut self.transcript, - &opening_point.r, - &commitment_ref_slice, - &sorted_claims, - &coeffs, - ) - .context("Stage 8") + PCS::default() + .batch_verify( + &self.proof.joint_opening_proof, + &self.preprocessing.generators, + &mut self.transcript, + &opening_point.r, + &commitment_ref_slice, + &sorted_claims, + &coeffs, + ) + .context("Stage 8") } } From 0da325a4e31b60406357ac165cee236a41784247 Mon Sep 17 00:00:00 2001 From: Quang Dao Date: Sat, 28 Feb 2026 17:51:54 -0800 Subject: [PATCH 5/5] fix: use DoryGlobals at PCS construction, not in commit/prove bodies Move DoryGlobals reads (layout, sigma, nu) from commit()/prove() into Default::default() and from_proof(), so the operational methods use self.sigma/self.nu/self.layout instead. Fixes AddressMajor Stage 8 verification failure caused by derive(Default) always using CycleMajor. Made-with: Cursor --- .../poly/commitment/dory/commitment_scheme.rs | 31 +++++++++++++------ jolt-core/src/poly/commitment/dory/tests.rs | 4 +-- 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs index 7bfc45c67e..5847567d72 100644 --- a/jolt-core/src/poly/commitment/dory/commitment_scheme.rs +++ b/jolt-core/src/poly/commitment/dory/commitment_scheme.rs @@ -28,9 +28,21 @@ use rayon::prelude::*; use sha3::{Digest, Sha3_256}; use tracing::trace_span; -#[derive(Clone, Default)] +#[derive(Clone)] pub struct DoryCommitmentScheme { pub layout: DoryLayout, + pub sigma: usize, + pub nu: usize, +} + +impl Default for DoryCommitmentScheme { + fn default() -> Self { + Self { + layout: DoryGlobals::get_layout(), + sigma: DoryGlobals::get_num_columns().log_2(), + nu: DoryGlobals::get_max_num_rows().log_2(), + } + } } #[derive(CanonicalSerialize, CanonicalDeserialize)] @@ -86,6 +98,8 @@ impl CommitmentScheme for DoryCommitmentScheme { fn from_proof(proof: &DoryBatchedProof) -> Self { Self { layout: proof.layout, + sigma: DoryGlobals::get_num_columns().log_2(), + nu: DoryGlobals::get_max_num_rows().log_2(), } } @@ -100,8 +114,8 @@ impl CommitmentScheme for DoryCommitmentScheme { ) -> (Self::Commitment, Self::OpeningProofHint) { let _span = trace_span!("DoryCommitmentScheme::commit").entered(); - let total_vars = poly.len().log_2(); - let (sigma, nu) = balanced_sigma_nu(total_vars); + let sigma = self.sigma; + let nu = self.nu; let (tier_2, row_commitments) = as Polynomial< ArkFr, @@ -145,8 +159,8 @@ impl CommitmentScheme for DoryCommitmentScheme { row_commitments }); - let total_vars = poly.len().log_2(); - let (sigma, nu) = balanced_sigma_nu(total_vars); + let sigma = self.sigma; + let nu = self.nu; let reordered_point = reorder_opening_point_for_layout::(self.layout, opening_point); @@ -227,9 +241,7 @@ impl CommitmentScheme for DoryCommitmentScheme { transcript: &mut ProofTranscript, ) -> Self::BatchedProof { let joint_poly = poly_source.build_joint_polynomial(coeffs); - let total_vars = joint_poly.len().log_2(); - let (_sigma, nu) = balanced_sigma_nu(total_vars); - let max_num_rows = 1 << nu; + let max_num_rows = 1 << self.nu; let combined_hint = Self::combine_hints_internal(hints, coeffs, max_num_rows); let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); let proof = self.prove( @@ -258,7 +270,8 @@ impl CommitmentScheme for DoryCommitmentScheme { ) -> Result<(), ProofVerifyError> { let joint_commitment = Self::combine_commitments_internal(commitments, coeffs); let joint_claim: ark_bn254::Fr = coeffs.iter().zip(claims).map(|(c, v)| *c * *v).sum(); - self.verify( + let pcs = Self::from_proof(proof); + pcs.verify( &proof.proof, setup, transcript, diff --git a/jolt-core/src/poly/commitment/dory/tests.rs b/jolt-core/src/poly/commitment/dory/tests.rs index 36279d536a..883035880f 100644 --- a/jolt-core/src/poly/commitment/dory/tests.rs +++ b/jolt-core/src/poly/commitment/dory/tests.rs @@ -813,9 +813,7 @@ mod tests { .map(|_| ::Challenge::random(&mut rng)) .collect(); - let pcs = DoryCommitmentScheme { - layout: DoryLayout::AddressMajor, - }; + let pcs = DoryCommitmentScheme::default(); let prover_setup = DoryCommitmentScheme::setup_prover(num_vars); let verifier_setup = DoryCommitmentScheme::setup_verifier(&prover_setup);