Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 12 additions & 6 deletions crates/stark-backend-v2/src/keygen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,12 +218,8 @@ impl AirKeygenBuilderV2 {
let air_name = self.air.name();

let symbolic_builder = self.get_symbolic_builder();
let vparams = StarkVerifyingParamsV2 {
width: symbolic_builder.width(),
num_public_values: symbolic_builder.num_public_values(),
};
// Deprecated in v2:
assert!(vparams.width.after_challenge.is_empty());
let width = symbolic_builder.width();
let num_public_values = symbolic_builder.num_public_values();

let symbolic_constraints = symbolic_builder.constraints();
let constraint_degree = symbolic_constraints.max_constraint_degree();
Expand All @@ -245,6 +241,16 @@ impl AirKeygenBuilderV2 {
} = self;

let dag = SymbolicConstraintsDag::from(symbolic_constraints);
let max_rotation = dag.constraints.max_rotation(); // TODO: exclude unused vars?
debug_assert!(max_rotation <= 1);
let vparams = StarkVerifyingParamsV2 {
width,
num_public_values,
need_rot: max_rotation == 1,
};
// Deprecated in v2:
assert!(vparams.width.after_challenge.is_empty());

let unused_variables = find_unused_vars(&dag, &vparams.width);
let vk = StarkVerifyingKeyV2 {
preprocessed_data: preprocessed_vdata,
Expand Down
2 changes: 2 additions & 0 deletions crates/stark-backend-v2/src/keygen/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ pub struct StarkVerifyingParamsV2 {
pub width: TraceWidth,
/// Number of public values for this STARK only
pub num_public_values: usize,
/// A flag indication whether we need the rotations
pub need_rot: bool,
}

/// Verifier data for preprocessed trace for a single AIR.
Expand Down
26 changes: 20 additions & 6 deletions crates/stark-backend-v2/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ use crate::{
Digest, EF, F,
};

use p3_field::FieldAlgebra;

#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Proof {
/// The commitment to the data in common_main.
Expand Down Expand Up @@ -82,11 +84,23 @@ pub struct BatchConstraintProof {
/// For rounds `1, ..., n_max`; evaluations on `{1, ..., vk.d + 1}`.
pub sumcheck_round_polys: Vec<Vec<EF>>,

/// Per AIR **in sorted AIR order**, per AIR part, per column index in that part, opening of
/// the prismalinear column polynomial and its rotational convolution.
/// The trace parts are ordered: [CommonMain (part
/// 0), Preprocessed (if any), Cached(0), Cached(1), ...]
pub column_openings: Vec<Vec<Vec<(EF, EF)>>>,
/// Per AIR **in sorted AIR order**, per AIR part, per column index in that part, openings for
/// the prismalinear column polynomial and (optionally) its rotational convolution. All column
/// openings are stored in a flat way, so only column openings or them interleaved with
/// rotations. The trace parts are ordered: [CommonMain (part 0), Preprocessed (if any),
/// Cached(0), Cached(1), ...]
pub column_openings: Vec<Vec<Vec<EF>>>,
}

pub fn column_openings_by_rot<'a>(
openings: &'a [EF],
need_rot: bool,
) -> Box<dyn Iterator<Item = (EF, EF)> + 'a> {
if need_rot {
Box::new(openings.chunks_exact(2).map(|chunk| (chunk[0], chunk[1])))
} else {
Box::new(openings.iter().map(|&claim| (claim, EF::ZERO)))
}
}

#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
Expand Down Expand Up @@ -384,7 +398,7 @@ impl Decode for BatchConstraintProof {

let mut column_openings = Vec::with_capacity(num_present_airs);
for _ in 0..num_present_airs {
column_openings.push(Vec::<Vec<(EF, EF)>>::decode(reader)?);
column_openings.push(Vec::<Vec<EF>>::decode(reader)?);
}

Ok(Self {
Expand Down
17 changes: 17 additions & 0 deletions crates/stark-backend-v2/src/prover/cpu_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,12 @@ impl<TS: FiatShamirTranscript> OpeningProverV2<CpuBackendV2, TS> for CpuDeviceV2
) -> (StackingProof, WhirProof) {
let params = &self.config;

let need_rot_per_trace = ctx
.per_trace
.iter()
.map(|(air_idx, _)| mpk.per_air[*air_idx].vk.params.need_rot)
.collect_vec();

// Currently alternates between preprocessed and cached pcs data
let pre_cached_pcs_data_per_commit: Vec<_> = ctx
.per_trace
Expand All @@ -109,12 +115,23 @@ impl<TS: FiatShamirTranscript> OpeningProverV2<CpuBackendV2, TS> for CpuDeviceV2
for data in &pre_cached_pcs_data_per_commit {
stacked_per_commit.push(data);
}
let mut need_rot_per_commit = vec![need_rot_per_trace];
for (air_idx, air_ctx) in &ctx.per_trace {
let need_rot = mpk.per_air[*air_idx].vk.params.need_rot;
if mpk.per_air[*air_idx].preprocessed_data.is_some() {
need_rot_per_commit.push(vec![need_rot]);
}
for _ in &air_ctx.cached_mains {
need_rot_per_commit.push(vec![need_rot]);
}
}
let (stacking_proof, u_prisma) =
prove_stacked_opening_reduction::<_, _, _, StackedReductionCpu>(
self,
transcript,
self.config.n_stack,
stacked_per_commit,
need_rot_per_commit,
&r,
);

Expand Down
64 changes: 39 additions & 25 deletions crates/stark-backend-v2/src/prover/logup_zerocheck/cpu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,8 @@ impl<'a> LogupZerocheckCpu<'a> {
}
}
}
let needs_next = rotation > 0;
let needs_next = pk.vk.params.need_rot;
debug_assert_eq!(needs_next, rotation > 0);
let symbolic_constraints = SymbolicConstraints::from(&pk.vk.symbolic_constraints);
EvalHelper {
constraints_dag: &pk.vk.symbolic_constraints.constraints,
Expand Down Expand Up @@ -598,34 +599,47 @@ impl<'a> LogupZerocheckCpu<'a> {
}
}

pub fn into_column_openings(mut self) -> Vec<Vec<Vec<(EF, EF)>>> {
pub fn into_column_openings(&mut self) -> Vec<Vec<Vec<EF>>> {
let num_airs_present = self.mat_evals_per_trace.len();
let mut column_openings = Vec::with_capacity(num_airs_present);
// At the end, we've folded all MLEs so they only have one row equal to evaluation at `\vec
// r`.
for mut mat_evals in take(&mut self.mat_evals_per_trace) {
// Order of mats is:
// - preprocessed (if has_preprocessed),
// - preprocessed_rot (if has_preprocessed),
// - cached(0), cached(0)_rot, ...
// - common_main
// - common_main_rot
// For column openings, we pop common_main, common_main_rot and put it at the front
assert_eq!(mat_evals.len() % 2, 0); // always include rot for now
let common_main_rot = mat_evals.pop().unwrap();
let common_main = mat_evals.pop().unwrap();
let openings_of_air = iter::once(&[common_main, common_main_rot] as &[_])
.chain(mat_evals.chunks_exact(2))
.map(|pair| {
zip(pair[0].columns(), pair[1].columns())
.map(|(claim, claim_rot)| {
assert_eq!(claim.len(), 1);
assert_eq!(claim_rot.len(), 1);
(claim[0], claim_rot[0])
})
.collect_vec()
})
.collect_vec();
for (helper, mut mat_evals) in self
.eval_helpers
.iter()
.zip(take(&mut self.mat_evals_per_trace))
{
// For column openings, we pop common_main (and common_main_rot when present) and put it
// at the front.
let openings_of_air: Vec<Vec<EF>> = if helper.needs_next {
let common_main_rot = mat_evals.pop().unwrap();
let common_main = mat_evals.pop().unwrap();
iter::once(&[common_main, common_main_rot] as &[_])
.chain(mat_evals.chunks_exact(2))
.map(|pair| {
zip(pair[0].columns(), pair[1].columns())
.flat_map(|(claim, claim_rot)| {
assert_eq!(claim.len(), 1);
assert_eq!(claim_rot.len(), 1);
[claim[0], claim_rot[0]]
})
.collect_vec()
})
.collect_vec()
} else {
let common_main = mat_evals.pop().unwrap();
iter::once(common_main)
.chain(mat_evals.into_iter())
.map(|mat| {
mat.columns()
.map(|claim| {
assert_eq!(claim.len(), 1);
claim[0]
})
.collect_vec()
})
.collect_vec()
};
column_openings.push(openings_of_air);
}
column_openings
Expand Down
18 changes: 9 additions & 9 deletions crates/stark-backend-v2/src/prover/logup_zerocheck/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use crate::{
dft::Radix2BowersSerial,
poly_common::{eq_sharp_uni_poly, eq_uni_poly, UnivariatePoly},
poseidon2::sponge::FiatShamirTranscript,
proof::{BatchConstraintProof, GkrProof},
proof::{column_openings_by_rot, BatchConstraintProof, GkrProof},
prover::{
fractional_sumcheck_gkr::{fractional_sumcheck, Frac},
stacked_pcs::StackedLayout,
Expand Down Expand Up @@ -395,17 +395,17 @@ where
let column_openings = prover.into_column_openings();

// Observe common main openings first, and then preprocessed/cached
for openings in &column_openings {
for (claim, claim_rot) in &openings[0] {
transcript.observe_ext(*claim);
transcript.observe_ext(*claim_rot);
for (helper, openings) in prover.eval_helpers.iter().zip(column_openings.iter()) {
for (claim, claim_rot) in column_openings_by_rot(&openings[0], helper.needs_next) {
transcript.observe_ext(claim);
transcript.observe_ext(claim_rot);
}
}
for openings in &column_openings {
for (helper, openings) in prover.eval_helpers.iter().zip(column_openings.iter()) {
for part in openings.iter().skip(1) {
for (claim, claim_rot) in part {
transcript.observe_ext(*claim);
transcript.observe_ext(*claim_rot);
for (claim, claim_rot) in column_openings_by_rot(part, helper.needs_next) {
transcript.observe_ext(claim);
transcript.observe_ext(claim_rot);
}
}
}
Expand Down
39 changes: 29 additions & 10 deletions crates/stark-backend-v2/src/prover/logup_zerocheck/single.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,20 +39,29 @@ impl<'a> EvalHelper<'a, crate::F> {
&self,
ctx: &'a AirProvingContextV2<CpuBackendV2>,
) -> Vec<(StridedColMajorMatrixView<'a, crate::F>, bool)> {
let mut mats = Vec::with_capacity(
2 * (usize::from(self.has_preprocessed()) + 1 + ctx.cached_mains.len()),
);
let base_mats = usize::from(self.has_preprocessed()) + 1 + ctx.cached_mains.len();
let mut mats = Vec::with_capacity(if self.needs_next {
2 * base_mats
} else {
base_mats
});
if let Some(mat) = self.preprocessed_trace {
mats.push((mat, false));
mats.push((mat, true));
if self.needs_next {
mats.push((mat, true));
}
}
for cd in ctx.cached_mains.iter() {
let trace_view = cd.data.mat_view(0);
mats.push((trace_view, false));
mats.push((trace_view, true));
if self.needs_next {
mats.push((trace_view, true));
}
}
mats.push((ctx.common_main.as_view().into(), false));
mats.push((ctx.common_main.as_view().into(), true));
if self.needs_next {
mats.push((ctx.common_main.as_view().into(), true));
}
mats
}
}
Expand Down Expand Up @@ -148,10 +157,20 @@ impl<F: TwoAdicField> EvalHelper<'_, F> {
row_parts: &[Vec<FF>],
) -> ProverConstraintEvaluator<'_, F, FF> {
let sels = &row_parts[0];
let mut view_pairs = row_parts[1..]
.chunks_exact(2)
.map(|pair| ViewPair::new(&pair[0], self.needs_next.then(|| &pair[1][..])))
.collect_vec();
let mut view_pairs = if self.needs_next {
let mut chunks = row_parts[1..].chunks_exact(2);
let pairs = chunks
.by_ref()
.map(|pair| ViewPair::new(&pair[0], Some(&pair[1][..])))
.collect_vec();
debug_assert!(chunks.remainder().is_empty());
pairs
} else {
row_parts[1..]
.iter()
.map(|part| ViewPair::new(part, None))
.collect_vec()
};
let mut preprocessed = None;
if self.has_preprocessed() {
preprocessed = Some(view_pairs.remove(0));
Expand Down
Loading
Loading