Skip to content
Open
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,7 @@ ark-ff = { version = "0.5", default-features = false }
ark-ec = { version = "0.5", default-features = false }
ark-poly = { version = "0.5", default-features = false }
ark-serialize = { version = "0.5", default-features = false, features = ["derive"] }
w3f-pcs = { git = "https://github.com/w3f/fflonk", default-features = false }
# TODO: restore w3f once https://github.com/w3f/fflonk/pull/46 gets merged
# w3f-pcs = { git = "https://github.com/w3f/fflonk", default-features = false }
w3f-pcs = { git = "https://github.com/davxy/fflonk", default-features = false }
rayon = { version = "1", default-features = false }
3 changes: 1 addition & 2 deletions w3f-plonk-common/src/kzg_acc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,10 @@ use crate::{ColumnsCommited, ColumnsEvaluated, Proof};
use ark_ec::pairing::Pairing;
use ark_ec::{CurveGroup, VariableBaseMSM};
use ark_ff::{PrimeField, Zero};
use ark_std::iterable::Iterable;
use ark_std::rand::Rng;
use w3f_pcs::pcs::kzg::params::KzgVerifierKey;
use w3f_pcs::pcs::kzg::{AccumulatedOpening, KZG};
use w3f_pcs::pcs::{Commitment, PCS};
use w3f_pcs::pcs::PCS;

// Aggregates opennings for KZG commitments.
// Somewhat similar to https://eprint.iacr.org/2020/499.pdf, section 8.
Expand Down
2 changes: 1 addition & 1 deletion w3f-plonk-common/src/verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ pub struct PlonkVerifier<F: PrimeField, CS: PCS<F>, T: PlonkTranscript<F, CS>> {
pub pcs_vk: CS::VK,
// Transcript,
// initialized with the public parameters and the commitments to the precommitted columns.
transcript_prelude: T,
pub transcript_prelude: T,
}

impl<F: PrimeField, CS: PCS<F>, T: PlonkTranscript<F, CS>> PlonkVerifier<F, CS, T> {
Expand Down
63 changes: 58 additions & 5 deletions w3f-ring-proof/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ impl ArkTranscript {
#[cfg(test)]
mod tests {
use ark_bls12_381::Bls12_381;
use ark_ec::CurveGroup;
use ark_ec::{AffineRepr, CurveGroup};
use ark_ed_on_bls12_381_bandersnatch::{BandersnatchConfig, EdwardsAffine, Fq, Fr};
use ark_std::ops::Mul;
use ark_std::rand::Rng;
Expand All @@ -67,7 +67,26 @@ mod tests {

use super::*;

fn _test_ring_proof<CS: PCS<Fq>>(
impl<F: PrimeField, CS: PCS<F>> Clone for VerifierKey<F, CS> {
fn clone(&self) -> Self {
Self {
pcs_raw_vk: self.pcs_raw_vk.clone(),
fixed_columns_committed: self.fixed_columns_committed.clone(),
}
}
}

impl<F: PrimeField, CS: PCS<F>, G: AffineRepr<BaseField = F>> Clone for ProverKey<F, CS, G> {
fn clone(&self) -> Self {
Self {
pcs_ck: self.pcs_ck.clone(),
fixed_columns: self.fixed_columns.clone(),
verifier_key: self.verifier_key.clone(),
}
}
}

fn _test_ring_proof<CS: PCS<Fq> + Clone>(
domain_size: usize,
batch_size: usize,
) -> (
Expand Down Expand Up @@ -153,11 +172,45 @@ mod tests {
(pcs_params, piop_params)
}

#[test]
// cargo test test_ring_proof_kzg --release --features="print-trace" -- --show-output
//
// ## Parallel feature off
//
// Batch vs sequential verification times (ms):
//
// | proofs | sequential | batch | speedup |
// |--------|------------|--------|---------|
// | 1 | 3.032 | 2.790 | 1.09x |
// | 2 | 6.425 | 3.218 | 2.00x |
// | 4 | 11.968 | 5.122 | 2.34x |
// | 8 | 23.922 | 6.487 | 3.69x |
// | 16 | 47.773 | 10.002 | 4.78x |
// | 32 | 95.570 | 16.601 | 5.76x |
// | 64 | 210.959 | 29.484 | 7.15x |
// | 128 | 422.217 | 52.170 | 8.09x |
// | 256 | 762.874 | 85.164 | 8.96x |
//
// Sequential verification scales linearly with proof count.
// Batch verification scales sub-linearly.
//
// ## Parallel feature on
//
// | proofs | sequential | batch | speedup |
// |--------|------------|--------|---------|
// | 1 | 3.548 | 2.678 | 1.32x |
// | 2 | 7.160 | 3.108 | 2.30x |
// | 4 | 14.323 | 3.115 | 4.60x |
// | 8 | 28.528 | 3.189 | 8.95x |
// | 16 | 57.961 | 3.818 | 15.18x |
// | 32 | 108.132 | 4.741 | 22.81x |
// | 64 | 218.614 | 6.042 | 36.18x |
// | 128 | 466.069 | 8.324 | 55.99x |
// | 256 | 895.605 | 11.869 | 75.46x |
#[test]
fn test_ring_proof_kzg() {
let (verifier, claims) = _test_ring_proof::<KZG<Bls12_381>>(2usize.pow(10), 10);
let t_verify_batch = start_timer!(|| "Verify Batch KZG");
let batch_size: usize = 16;
let (verifier, claims) = _test_ring_proof::<KZG<Bls12_381>>(2usize.pow(10), batch_size);
let t_verify_batch = start_timer!(|| format!("Verify Batch KZG (batch={batch_size})"));
let (blinded_pks, proofs) = claims.into_iter().unzip();
assert!(verifier.verify_batch_kzg(proofs, blinded_pks));
end_timer!(t_verify_batch);
Expand Down
3 changes: 1 addition & 2 deletions w3f-ring-proof/src/piop/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,14 +127,13 @@ impl<F: PrimeField, G: AffineRepr<BaseField = F>> FixedColumns<F, G> {
}

// #[derive(CanonicalSerialize, CanonicalDeserialize)]
#[derive(Clone)]
pub struct ProverKey<F: PrimeField, CS: PCS<F>, G: AffineRepr<BaseField = F>> {
pub(crate) pcs_ck: CS::CK,
pub(crate) fixed_columns: FixedColumns<F, G>,
pub(crate) verifier_key: VerifierKey<F, CS>, // used in the Fiat-Shamir transform
}

#[derive(Clone, Debug, Eq, PartialEq, CanonicalSerialize, CanonicalDeserialize)]
#[derive(Debug, Eq, PartialEq, CanonicalSerialize, CanonicalDeserialize)]
pub struct VerifierKey<F: PrimeField, CS: PCS<F>> {
pub(crate) pcs_raw_vk: <CS::Params as PcsParams>::RVK,
pub(crate) fixed_columns_committed: FixedColumnsCommitted<F, CS::C>,
Expand Down
161 changes: 134 additions & 27 deletions w3f-ring-proof/src/ring_verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@ use ark_ec::pairing::Pairing;
use ark_ec::twisted_edwards::{Affine, TECurveConfig};
use ark_ec::CurveGroup;
use ark_ff::PrimeField;
use ark_std::rand::RngCore;
use w3f_pcs::pcs::kzg::KZG;
use w3f_pcs::pcs::{RawVerifierKey, PCS};
use w3f_plonk_common::kzg_acc::KzgAccumulator;
use w3f_plonk_common::piop::VerifierPiop;
use w3f_plonk_common::transcript::PlonkTranscript;
use w3f_plonk_common::verifier::PlonkVerifier;
use w3f_plonk_common::verifier::{Challenges, PlonkVerifier};

use crate::piop::params::PiopParams;
use crate::piop::{FixedColumnsCommitted, PiopVerifier, VerifierKey};
Expand Down Expand Up @@ -89,40 +90,146 @@ where
}
}

impl<E, Jubjub, T> RingVerifier<E::ScalarField, KZG<E>, Jubjub, T>
/// Accumulating batch verifier for ring proofs using KZG polynomial commitment scheme.
pub struct KzgBatchVerifier<E, J, T = ArkTranscript>
where
E: Pairing,
Jubjub: TECurveConfig<BaseField = E::ScalarField>,
J: TECurveConfig<BaseField = E::ScalarField>,
T: PlonkTranscript<E::ScalarField, KZG<E>>,
{
// Verifies a batch of proofs against the same ring.
pub fn verify_batch_kzg(
pub acc: KzgAccumulator<E>,
pub verifier: RingVerifier<E::ScalarField, KZG<E>, J, T>,
}

/// A ring proof that has been preprocessed for batch verification.
pub struct PreparedBatchItem<E, J>
where
E: Pairing,
J: TECurveConfig<BaseField = E::ScalarField>,
{
piop: PiopVerifier<E::ScalarField, <KZG<E> as PCS<E::ScalarField>>::C, Affine<J>>,
proof: RingProof<E::ScalarField, KZG<E>>,
challenges: Challenges<E::ScalarField>,
entropy: [u8; 32],
}

impl<E, J, T> KzgBatchVerifier<E, J, T>
where
E: Pairing,
J: TECurveConfig<BaseField = E::ScalarField>,
T: PlonkTranscript<E::ScalarField, KZG<E>>,
{
/// Prepare
pub fn prepare(
&self,
proof: RingProof<E::ScalarField, KZG<E>>,
result: Affine<J>,
) -> PreparedBatchItem<E, J> {
let (challenges, mut rng) = self.verifier.plonk_verifier.restore_challenges(
&result,
&proof,
// '1' accounts for the quotient polynomial that is aggregated together with the columns
PiopVerifier::<E::ScalarField, <KZG<E> as PCS<_>>::C, Affine<J>>::N_COLUMNS + 1,
PiopVerifier::<E::ScalarField, <KZG<E> as PCS<_>>::C, Affine<J>>::N_CONSTRAINTS,
);
let seed = self.verifier.piop_params.seed;
let seed_plus_result = (seed + result).into_affine();
let domain_at_zeta = self.verifier.piop_params.domain.evaluate(challenges.zeta);
let piop = PiopVerifier::<_, _, Affine<J>>::init(
domain_at_zeta,
self.verifier.fixed_columns_committed.clone(),
proof.column_commitments.clone(),
proof.columns_at_zeta.clone(),
(seed.x, seed.y),
(seed_plus_result.x, seed_plus_result.y),
);

// Pick some entropy from plonk verifier for later usage
let mut entropy = [0_u8; 32];
rng.fill_bytes(&mut entropy);

PreparedBatchItem {
piop,
proof,
challenges,
entropy,
}
}
Comment on lines +154 to +164
Copy link
Member Author

@davxy davxy Jan 29, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@swasilyev @burdges @drskalman Need some extra attention here.
In practice, instead of immediately using the returned rng, we pick some randomness from it to be used later in the push_prepared


pub fn push_prepared(&mut self, item: PreparedBatchItem<E, J>) {
let mut ts = self.verifier.plonk_verifier.transcript_prelude.clone();
ts._add_serializable(b"batch-entropy", &item.entropy);
self.acc
.accumulate(item.piop, item.proof, item.challenges, &mut ts.to_rng());
}
Comment on lines +173 to +178
Copy link
Member Author

@davxy davxy Jan 29, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@swasilyev @burdges @drskalman here I pick the randomness back to:

  • extend verifier transcript
  • and use the derived rng in accumulate


/// Adds a ring proof to the batch, preparing and accumulating it immediately.
///
/// The proof's pairing equation is aggregated into the internal accumulator.
/// Call `verify` after pushing all proofs to perform the batched verification.
pub fn push(&mut self, proof: RingProof<E::ScalarField, KZG<E>>, result: Affine<J>) {
let item = self.prepare(proof, result);
self.push_prepared(item);
}

/// Batch verify
pub fn verify(&self) -> bool {
self.acc.verify()
}
}

impl<E, J, T> RingVerifier<E::ScalarField, KZG<E>, J, T>
where
E: Pairing,
J: TECurveConfig<BaseField = E::ScalarField>,
T: PlonkTranscript<E::ScalarField, KZG<E>>,
{
/// Build a new batch verifier.
pub fn kzg_batch_verifier(self) -> KzgBatchVerifier<E, J, T> {
KzgBatchVerifier {
acc: KzgAccumulator::<E>::new(self.plonk_verifier.pcs_vk.clone()),
verifier: self,
}
}

/// Verifies a batch of proofs against the same ring.
#[cfg(not(feature = "parallel"))]
pub fn verify_batch_kzg(
self,
proofs: Vec<RingProof<E::ScalarField, KZG<E>>>,
results: Vec<Affine<Jubjub>>,
results: Vec<Affine<J>>,
) -> bool {
let mut acc = KzgAccumulator::<E>::new(self.plonk_verifier.pcs_vk.clone());
let mut batch = self.kzg_batch_verifier();
for (proof, result) in proofs.into_iter().zip(results) {
let (challenges, mut rng) = self.plonk_verifier.restore_challenges(
&result,
&proof,
// '1' accounts for the quotient polynomial that is aggregated together with the columns
PiopVerifier::<E::ScalarField, <KZG<E> as PCS<_>>::C, Affine<Jubjub>>::N_COLUMNS + 1,
PiopVerifier::<E::ScalarField, <KZG<E> as PCS<_>>::C, Affine<Jubjub>>::N_CONSTRAINTS,
);
let seed = self.piop_params.seed;
let seed_plus_result = (seed + result).into_affine();
let domain_at_zeta = self.piop_params.domain.evaluate(challenges.zeta);
let piop = PiopVerifier::<_, _, Affine<Jubjub>>::init(
domain_at_zeta,
self.fixed_columns_committed.clone(),
proof.column_commitments.clone(),
proof.columns_at_zeta.clone(),
(seed.x, seed.y),
(seed_plus_result.x, seed_plus_result.y),
);
acc.accumulate(piop, proof, challenges, &mut rng);
batch.push(proof, result);
}
batch.verify()
}
}

#[cfg(feature = "parallel")]
impl<E, J, T> RingVerifier<E::ScalarField, KZG<E>, J, T>
where
E: Pairing,
J: TECurveConfig<BaseField = E::ScalarField>,
T: PlonkTranscript<E::ScalarField, KZG<E>> + Sync,
{
/// Verifies a batch of proofs against the same ring.
pub fn verify_batch_kzg(
self,
proofs: Vec<RingProof<E::ScalarField, KZG<E>>>,
results: Vec<Affine<J>>,
) -> bool {
use rayon::prelude::*;
let mut batch = self.kzg_batch_verifier();
let prepared: Vec<_> = proofs
.into_par_iter()
.zip(results)
.map(|(proof, result)| batch.prepare(proof, result))
.collect();
for item in prepared {
batch.push_prepared(item);
}
acc.verify()
batch.verify()
}
}
Loading