Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 12 additions & 6 deletions crates/stark-backend-v2/src/keygen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,12 +218,8 @@ impl AirKeygenBuilderV2 {
let air_name = self.air.name();

let symbolic_builder = self.get_symbolic_builder();
let vparams = StarkVerifyingParamsV2 {
width: symbolic_builder.width(),
num_public_values: symbolic_builder.num_public_values(),
};
// Deprecated in v2:
assert!(vparams.width.after_challenge.is_empty());
let width = symbolic_builder.width();
let num_public_values = symbolic_builder.num_public_values();

let symbolic_constraints = symbolic_builder.constraints();
let constraint_degree = symbolic_constraints.max_constraint_degree();
Expand All @@ -245,6 +241,16 @@ impl AirKeygenBuilderV2 {
} = self;

let dag = SymbolicConstraintsDag::from(symbolic_constraints);
let max_rotation = dag.constraints.max_rotation(); // TODO: exclude unused vars?
debug_assert!(max_rotation <= 1);
let vparams = StarkVerifyingParamsV2 {
width,
num_public_values,
need_rot: max_rotation == 1,
};
// Deprecated in v2:
assert!(vparams.width.after_challenge.is_empty());

let unused_variables = find_unused_vars(&dag, &vparams.width);
let vk = StarkVerifyingKeyV2 {
preprocessed_data: preprocessed_vdata,
Expand Down
2 changes: 2 additions & 0 deletions crates/stark-backend-v2/src/keygen/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ pub struct StarkVerifyingParamsV2 {
pub width: TraceWidth,
/// Number of public values for this STARK only
pub num_public_values: usize,
/// A flag indication whether we need the rotations
pub need_rot: bool,
}

/// Verifier data for preprocessed trace for a single AIR.
Expand Down
17 changes: 17 additions & 0 deletions crates/stark-backend-v2/src/prover/cpu_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,12 @@ impl<TS: FiatShamirTranscript> OpeningProverV2<CpuBackendV2, TS> for CpuDeviceV2
) -> (StackingProof, WhirProof) {
let params = &self.config;

let need_rot_per_trace = ctx
.per_trace
.iter()
.map(|(air_idx, _)| mpk.per_air[*air_idx].vk.params.need_rot)
.collect_vec();

// Currently alternates between preprocessed and cached pcs data
let pre_cached_pcs_data_per_commit: Vec<_> = ctx
.per_trace
Expand All @@ -109,12 +115,23 @@ impl<TS: FiatShamirTranscript> OpeningProverV2<CpuBackendV2, TS> for CpuDeviceV2
for data in &pre_cached_pcs_data_per_commit {
stacked_per_commit.push(data);
}
let mut need_rot_per_commit = vec![need_rot_per_trace];
for (air_idx, air_ctx) in &ctx.per_trace {
let need_rot = mpk.per_air[*air_idx].vk.params.need_rot;
if mpk.per_air[*air_idx].preprocessed_data.is_some() {
need_rot_per_commit.push(vec![need_rot]);
}
for _ in &air_ctx.cached_mains {
need_rot_per_commit.push(vec![need_rot]);
}
}
let (stacking_proof, u_prisma) =
prove_stacked_opening_reduction::<_, _, _, StackedReductionCpu>(
self,
transcript,
self.config.n_stack,
stacked_per_commit,
need_rot_per_commit,
&r,
);

Expand Down
62 changes: 38 additions & 24 deletions crates/stark-backend-v2/src/prover/logup_zerocheck/cpu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,8 @@ impl<'a> LogupZerocheckCpu<'a> {
}
}
}
let needs_next = rotation > 0;
let needs_next = pk.vk.params.need_rot;
debug_assert_eq!(needs_next, rotation > 0);
let symbolic_constraints = SymbolicConstraints::from(&pk.vk.symbolic_constraints);
EvalHelper {
constraints_dag: &pk.vk.symbolic_constraints.constraints,
Expand Down Expand Up @@ -603,29 +604,42 @@ impl<'a> LogupZerocheckCpu<'a> {
let mut column_openings = Vec::with_capacity(num_airs_present);
// At the end, we've folded all MLEs so they only have one row equal to evaluation at `\vec
// r`.
for mut mat_evals in take(&mut self.mat_evals_per_trace) {
// Order of mats is:
// - preprocessed (if has_preprocessed),
// - preprocessed_rot (if has_preprocessed),
// - cached(0), cached(0)_rot, ...
// - common_main
// - common_main_rot
// For column openings, we pop common_main, common_main_rot and put it at the front
assert_eq!(mat_evals.len() % 2, 0); // always include rot for now
let common_main_rot = mat_evals.pop().unwrap();
let common_main = mat_evals.pop().unwrap();
let openings_of_air = iter::once(&[common_main, common_main_rot] as &[_])
.chain(mat_evals.chunks_exact(2))
.map(|pair| {
zip(pair[0].columns(), pair[1].columns())
.map(|(claim, claim_rot)| {
assert_eq!(claim.len(), 1);
assert_eq!(claim_rot.len(), 1);
(claim[0], claim_rot[0])
})
.collect_vec()
})
.collect_vec();
for (helper, mut mat_evals) in self
.eval_helpers
.iter()
.zip(take(&mut self.mat_evals_per_trace))
{
// For column openings, we pop common_main (and common_main_rot when present) and put it
// at the front.
let openings_of_air = if helper.needs_next {
let common_main_rot = mat_evals.pop().unwrap();
let common_main = mat_evals.pop().unwrap();
iter::once(&[common_main, common_main_rot] as &[_])
.chain(mat_evals.chunks_exact(2))
.map(|pair| {
zip(pair[0].columns(), pair[1].columns())
.map(|(claim, claim_rot)| {
assert_eq!(claim.len(), 1);
assert_eq!(claim_rot.len(), 1);
(claim[0], claim_rot[0])
})
.collect_vec()
})
.collect_vec()
} else {
let common_main = mat_evals.pop().unwrap();
iter::once(common_main)
.chain(mat_evals.into_iter())
.map(|mat| {
mat.columns()
.map(|claim| {
assert_eq!(claim.len(), 1);
(claim[0], EF::ZERO)
})
.collect_vec()
})
.collect_vec()
};
column_openings.push(openings_of_air);
}
column_openings
Expand Down
39 changes: 29 additions & 10 deletions crates/stark-backend-v2/src/prover/logup_zerocheck/single.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,20 +39,29 @@ impl<'a> EvalHelper<'a, crate::F> {
&self,
ctx: &'a AirProvingContextV2<CpuBackendV2>,
) -> Vec<(StridedColMajorMatrixView<'a, crate::F>, bool)> {
let mut mats = Vec::with_capacity(
2 * (usize::from(self.has_preprocessed()) + 1 + ctx.cached_mains.len()),
);
let base_mats = usize::from(self.has_preprocessed()) + 1 + ctx.cached_mains.len();
let mut mats = Vec::with_capacity(if self.needs_next {
2 * base_mats
} else {
base_mats
});
if let Some(mat) = self.preprocessed_trace {
mats.push((mat, false));
mats.push((mat, true));
if self.needs_next {
mats.push((mat, true));
}
}
for cd in ctx.cached_mains.iter() {
let trace_view = cd.data.mat_view(0);
mats.push((trace_view, false));
mats.push((trace_view, true));
if self.needs_next {
mats.push((trace_view, true));
}
}
mats.push((ctx.common_main.as_view().into(), false));
mats.push((ctx.common_main.as_view().into(), true));
if self.needs_next {
mats.push((ctx.common_main.as_view().into(), true));
}
mats
}
}
Expand Down Expand Up @@ -148,10 +157,20 @@ impl<F: TwoAdicField> EvalHelper<'_, F> {
row_parts: &[Vec<FF>],
) -> ProverConstraintEvaluator<'_, F, FF> {
let sels = &row_parts[0];
let mut view_pairs = row_parts[1..]
.chunks_exact(2)
.map(|pair| ViewPair::new(&pair[0], self.needs_next.then(|| &pair[1][..])))
.collect_vec();
let mut view_pairs = if self.needs_next {
let mut chunks = row_parts[1..].chunks_exact(2);
let pairs = chunks
.by_ref()
.map(|pair| ViewPair::new(&pair[0], Some(&pair[1][..])))
.collect_vec();
debug_assert!(chunks.remainder().is_empty());
pairs
} else {
row_parts[1..]
.iter()
.map(|part| ViewPair::new(part, None))
.collect_vec()
};
let mut preprocessed = None;
if self.has_preprocessed() {
preprocessed = Some(view_pairs.remove(0));
Expand Down
Loading