Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ keywords = ["SNARK", "cryptography", "proofs"]

[workspace]
members = [
"crates/jolt-crypto",
"crates/jolt-poly",
"crates/jolt-instructions",
"crates/jolt-transcript",
Expand Down
48 changes: 48 additions & 0 deletions crates/jolt-crypto/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
[package]
name = "jolt-crypto"
version = "0.1.0"
authors = ["Jolt Contributors"]
edition = "2021"
license = "MIT OR Apache-2.0"
repository = "https://github.com/a16z/jolt"
description = "Backend-agnostic cryptographic group and commitment primitives for Jolt"
keywords = ["cryptography", "zero-knowledge", "commitment", "elliptic-curve", "zkvm"]
categories = ["cryptography"]

[lints]
workspace = true

[features]
default = ["bn254"]
bn254 = ["dep:ark-bn254", "dep:ark-ec", "dep:ark-ff", "dep:ark-serialize", "dep:ark-std", "dep:num-bigint", "dep:num-integer", "dep:num-traits", "dep:rayon"]
dory-pcs = ["dep:dory", "bn254", "jolt-field/dory-pcs"]

[dependencies]
jolt-field = { path = "../jolt-field" }
jolt-transcript = { path = "../jolt-transcript" }
tracing.workspace = true
serde = { workspace = true, features = ["derive", "alloc"] }
rand_core = { workspace = true }
dory = { workspace = true, optional = true }

# Arkworks — BN254 backend (internal, gated behind `bn254` feature)
ark-bn254 = { workspace = true, features = ["curve"], optional = true }
ark-ec = { workspace = true, optional = true }
ark-ff = { workspace = true, optional = true }
ark-serialize = { workspace = true, optional = true }
ark-std = { workspace = true, optional = true }
num-bigint = { workspace = true, optional = true }
num-integer = { workspace = true, optional = true }
num-traits = { workspace = true, optional = true }
rayon = { workspace = true, optional = true }

[dev-dependencies]
ark-std = { workspace = true }
rand_chacha = { workspace = true }
serde_json = { workspace = true, features = ["std"] }
bincode = { workspace = true }
criterion = { workspace = true }

[[bench]]
name = "crypto"
harness = false
66 changes: 66 additions & 0 deletions crates/jolt-crypto/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
# jolt-crypto

Backend-agnostic cryptographic group and commitment primitives for the Jolt zkVM.

Part of the [Jolt](https://github.com/a16z/jolt) zkVM.

## Overview

This crate defines the core group abstractions (`JoltGroup`, `PairingGroup`) and commitment trait hierarchy (`Commitment`, `JoltCommitment`, `HomomorphicCommitment`) used by the Jolt proving system. It provides a backend-agnostic interface -- the BN254 implementation wraps arkworks internally, but no arkworks types appear in the public API.

## Commitment Hierarchy

```
Commitment (base: just the Output type)

JoltCommitment (Setup, Commitment types; commit, verify)

HomomorphicCommitment<F> (linear_combine for Nova folding)
```

| Scheme | Message | Output |
|--------|---------|--------|
| Pedersen | `F` (field) | `G` (group) |
| Dory tier-1 | `F` (field) | `G1` |
| Dory tier-2 | `G1` (commitments) | `GT` |

`Commitment` is the base trait shared with `jolt-openings::CommitmentScheme` -- both extend it, sharing the `Output` associated type.

## Public API

### Core Traits

- **`Commitment`** -- Base trait defining `type Output` with standard bounds.
- **`JoltCommitment`** -- Backend-agnostic vector commitment with `Setup`, `Commitment` associated types. Methods: `capacity()`, `commit()`, `verify()`. Commit takes a blinding factor.
- **`HomomorphicCommitment<F>`** -- Additive homomorphism: `linear_combine(c1, c2, scalar) = c1 + scalar * c2`. Blanket-implemented for `JoltGroup`.
- **`JoltGroup`** -- Cryptographic group with additive notation. Provides `identity()`, `is_identity()`, `double()`, `scalar_mul()`, `msm()`.
- **`PairingGroup`** -- Pairing-friendly group. Associates `ScalarField`, `G1`, `G2`, `GT` (all `JoltGroup`), provides `pairing()` and `multi_pairing()`.

### Pedersen Commitment

- **`Pedersen<G: JoltGroup>`** -- Generic Pedersen vector commitment. Implements `JoltCommitment`.
- **`PedersenSetup<G>`** -- Setup parameters (generators + blinding generator).

### BN254 Concrete Types

- **`Bn254`** -- BN254 pairing curve implementing `PairingGroup`.
- **`Bn254G1`** / **`Bn254G2`** -- G1/G2 group elements implementing `JoltGroup`.
- **`Bn254GT`** -- Target group element (additive notation over `Fq12`).

## Feature Flags

| Flag | Default | Description |
|------|---------|-------------|
| `bn254` | **Yes** | Enable BN254 backend via arkworks |
| `dory-pcs` | No | Enable Dory PCS interop (implies `bn254`) |

## Dependency Position

```
jolt-field <- jolt-crypto <- jolt-openings, jolt-sumcheck, jolt-dory, jolt-blindfold
jolt-transcript ->
```

## License

MIT
94 changes: 94 additions & 0 deletions crates/jolt-crypto/REVIEW.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
# jolt-crypto Review

**Crate:** jolt-crypto (Level 2)
**LOC:** 4,969 (was ~5,070 — reduced via G1/G2 macro dedup)
**Baseline:** 0 clippy warnings, 128 tests passing
**Rating:** 8.5/10

## Overview

Elliptic curve and commitment abstractions for Jolt. Provides the `JoltGroup`,
`PairingGroup`, `JoltCommitment`, and `HomomorphicCommitment` trait hierarchy
with a BN254 backend. Includes GLV scalar multiplication, batch affine addition,
Pedersen commitments, and Dory interop bridge. Zero arkworks leakage in the
public API — all arkworks types are behind `#[repr(transparent)]` newtypes.

**Verdict:** Clean trait design with strong encapsulation. The `#[repr(transparent)]`
newtypes effectively hide arkworks internals. Performance-critical paths (GLV, batch
addition, MSM) are well-optimized. The G1/G2 macro dedup eliminated 300+ lines of
identical boilerplate. Well-tested with 128 tests, 14 benchmarks, and 3 fuzz targets.

---

## Findings

### [CQ-1.1] G1/G2 ~300 LOC identical boilerplate
**File:** `src/arkworks/bn254/g1.rs`, `g2.rs`
**Severity:** MEDIUM
**Finding:** Both files had 157 LOC of nearly identical operator impls, serde, JoltGroup impl.
**Status:** RESOLVED — Created `impl_jolt_group_wrapper!` macro in `mod.rs`. Both files reduced to 8 LOC.

### [CQ-2.1] Missing compile-time size assertions for repr(transparent)
**File:** `src/arkworks/bn254/g1.rs`, `g2.rs`
**Severity:** HIGH
**Finding:** Unsafe pointer casts between wrapper and inner types rely on `#[repr(transparent)]` layout guarantee but had no compile-time verification.
**Status:** RESOLVED — Added `const _: () = assert!(size_of::<Wrapper>() == size_of::<Inner>());` inside the macro.

### [CQ-2.2] No safe into_inner() accessor
**File:** `src/arkworks/bn254/g1.rs`, `g2.rs`
**Severity:** LOW
**Finding:** Downstream crates (jolt-dory) use unsafe transmute to extract the inner arkworks type. A safe accessor would reduce unsafe surface area.
**Status:** RESOLVED — Added `pub fn into_inner(self)` to the macro. Downstream migration (jolt-dory scheme.rs ~15 transmutes) deferred.

### [CQ-3.1] Clippy warnings in dory_interop
**File:** `src/dory_interop.rs`
**Severity:** LOW
**Finding:** 3 redundant closures, 1 needless borrow.
**Status:** RESOLVED — Fixed all 4 warnings.

### [CQ-4.1] Missing #[must_use] on pure trait methods
**File:** `src/groups.rs`, `src/commitment.rs`
**Severity:** LOW
**Finding:** Pure methods returning values without side effects should be `#[must_use]`.
**Status:** RESOLVED — Added to JoltGroup, JoltCommitment, HomomorphicCommitment, PairingGroup.

### [CD-1.1] Internal modules exposed in public API
**File:** `src/arkworks/bn254/mod.rs`
**Severity:** LOW
**Finding:** `glv` and `batch_addition` modules are `pub` but are implementation details.
**Status:** RESOLVED — Added `#[doc(hidden)]`.

### [CD-2.1] batch_addition precondition undocumented
**File:** `src/arkworks/bn254/batch_addition.rs`
**Severity:** LOW
**Finding:** Entry point has subtle preconditions (in-bounds indices, no equal/inverse pairs).
**Status:** RESOLVED — Added doc comment.

### [CD-3.1] bincode v1 API usage
**File:** `tests/serialization.rs`, `benches/crypto.rs`, `fuzz/fuzz_targets/*.rs`
**Severity:** MEDIUM
**Finding:** Workspace uses bincode v2 but these files used v1 API.
**Status:** RESOLVED — Migrated all files + fuzz Cargo.toml to bincode v2.

### [CD-4.1] Cargo.toml metadata incomplete
**File:** `Cargo.toml`
**Severity:** LOW
**Finding:** Missing repository, keywords, categories; license was MIT-only.
**Status:** RESOLVED — Updated to dual MIT OR Apache-2.0, added all metadata.

### [NIT-1.1] append_to_transcript Vec allocation
**File:** `src/arkworks/bn254/g1.rs` (now in macro)
**Severity:** LOW
**Finding:** `ark_serialize::CanonicalSerialize` allocates a fresh Vec on each call.
**Status:** WONTFIX — Acceptable for now; not on hot path.

---

## Summary

| Category | Pass | Resolved | Wontfix | Total |
|----------|------|----------|---------|-------|
| CQ | 0 | 4 | 0 | 4 |
| CD | 0 | 4 | 0 | 4 |
| NIT | 0 | 0 | 1 | 1 |
| **Total**| **0**| **8** | **1** | **9** |
172 changes: 172 additions & 0 deletions crates/jolt-crypto/benches/crypto.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
#![allow(unused_results)]

use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};

use jolt_crypto::{
Bn254, Bn254G1, Bn254G2, JoltCommitment, JoltGroup, PairingGroup, Pedersen, PedersenSetup,
};
use jolt_field::{Field, Fr};
use rand_chacha::ChaCha20Rng;
use rand_core::SeedableRng;

fn bench_g1_scalar_mul(c: &mut Criterion) {
let mut rng = ChaCha20Rng::seed_from_u64(0);
let g = Bn254::random_g1(&mut rng);
let s = Fr::random(&mut rng);

c.bench_function("g1_scalar_mul", |b| {
b.iter(|| g.scalar_mul(&s));
});
}

fn bench_g2_scalar_mul(c: &mut Criterion) {
let mut rng = ChaCha20Rng::seed_from_u64(1);
let g = Bn254::g2_generator();
let s = Fr::random(&mut rng);

c.bench_function("g2_scalar_mul", |b| {
b.iter(|| g.scalar_mul(&s));
});
}

fn bench_g1_add(c: &mut Criterion) {
let mut rng = ChaCha20Rng::seed_from_u64(2);
let a = Bn254::random_g1(&mut rng);
let b = Bn254::random_g1(&mut rng);

c.bench_function("g1_add", |b_| {
b_.iter(|| a + b);
});
}

fn bench_g1_double(c: &mut Criterion) {
let mut rng = ChaCha20Rng::seed_from_u64(3);
let a = Bn254::random_g1(&mut rng);

c.bench_function("g1_double", |b| {
b.iter(|| a.double());
});
}

fn bench_g1_msm(c: &mut Criterion) {
let mut group = c.benchmark_group("g1_msm");

for size in [4, 16, 64, 256, 1024] {
let mut rng = ChaCha20Rng::seed_from_u64(10);
let bases: Vec<Bn254G1> = (0..size).map(|_| Bn254::random_g1(&mut rng)).collect();
let scalars: Vec<Fr> = (0..size).map(|_| Fr::random(&mut rng)).collect();

group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, _| {
b.iter(|| Bn254G1::msm(&bases, &scalars));
});
}
group.finish();
}

fn bench_g2_msm(c: &mut Criterion) {
let mut group = c.benchmark_group("g2_msm");

for size in [4, 16, 64, 256] {
let mut rng = ChaCha20Rng::seed_from_u64(20);
let g = Bn254::g2_generator();
let bases: Vec<Bn254G2> = (0..size)
.map(|i| g.scalar_mul(&Fr::from_u64(i as u64 + 1)))
.collect();
let scalars: Vec<Fr> = (0..size).map(|_| Fr::random(&mut rng)).collect();

group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, _| {
b.iter(|| Bn254G2::msm(&bases, &scalars));
});
}
group.finish();
}

fn bench_pairing(c: &mut Criterion) {
let g1 = Bn254::g1_generator();
let g2 = Bn254::g2_generator();

c.bench_function("pairing", |b| {
b.iter(|| Bn254::pairing(&g1, &g2));
});
}

fn bench_multi_pairing(c: &mut Criterion) {
let mut group = c.benchmark_group("multi_pairing");

for size in [2, 4, 8, 16] {
let mut rng = ChaCha20Rng::seed_from_u64(30);
let g1s: Vec<Bn254G1> = (0..size).map(|_| Bn254::random_g1(&mut rng)).collect();
let g2 = Bn254::g2_generator();
let g2s: Vec<Bn254G2> = (0..size)
.map(|i| g2.scalar_mul(&Fr::from_u64(i as u64 + 1)))
.collect();

group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, _| {
b.iter(|| Bn254::multi_pairing(&g1s, &g2s));
});
}
group.finish();
}

fn bench_pedersen_commit(c: &mut Criterion) {
let mut group = c.benchmark_group("pedersen_commit");

for size in [4, 16, 64, 256, 1024] {
let mut rng = ChaCha20Rng::seed_from_u64(40);
let gens: Vec<Bn254G1> = (0..size).map(|_| Bn254::random_g1(&mut rng)).collect();
let blinding_gen = Bn254::random_g1(&mut rng);
let setup = PedersenSetup::new(gens, blinding_gen);
let values: Vec<Fr> = (0..size).map(|_| Fr::random(&mut rng)).collect();
let blinding = Fr::random(&mut rng);

group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, _| {
b.iter(|| Pedersen::<Bn254G1>::commit(&setup, &values, &blinding));
});
}
group.finish();
}

fn bench_gt_scalar_mul(c: &mut Criterion) {
let g1 = Bn254::g1_generator();
let g2 = Bn254::g2_generator();
let gt = Bn254::pairing(&g1, &g2);
let mut rng = ChaCha20Rng::seed_from_u64(50);
let s = Fr::random(&mut rng);

c.bench_function("gt_scalar_mul", |b| {
b.iter(|| gt.scalar_mul(&s));
});
}

fn bench_g1_serde(c: &mut Criterion) {
let mut rng = ChaCha20Rng::seed_from_u64(60);
let g = Bn254::random_g1(&mut rng);
let config = bincode::config::standard();
let bytes = bincode::serde::encode_to_vec(g, config).unwrap();

c.bench_function("g1_serialize_bincode", |b| {
b.iter(|| bincode::serde::encode_to_vec(g, config).unwrap());
});

c.bench_function("g1_deserialize_bincode", |b| {
b.iter(|| {
bincode::serde::decode_from_slice::<Bn254G1, _>(&bytes, config).unwrap();
});
});
}

criterion_group!(
benches,
bench_g1_scalar_mul,
bench_g2_scalar_mul,
bench_g1_add,
bench_g1_double,
bench_g1_msm,
bench_g2_msm,
bench_pairing,
bench_multi_pairing,
bench_pedersen_commit,
bench_gt_scalar_mul,
bench_g1_serde,
);
criterion_main!(benches);
Loading
Loading