Skip to content

Commit

Permalink
koly-commitment: rename elems field into chunks
Browse files Browse the repository at this point in the history
It is more meaningful because it is actually chunks.
In following pull requests, the abstraction over PolyComm will be changed, to
avoid errorneous usage of PolyComm.
  • Loading branch information
dannywillems committed Oct 4, 2024
1 parent ac4e914 commit 86aeee7
Show file tree
Hide file tree
Showing 22 changed files with 174 additions and 165 deletions.
16 changes: 8 additions & 8 deletions arrabiata/src/witness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -496,7 +496,7 @@ where
let idx_col = idx / 2;
debug!("Absorbing the accumulator for the column index {idx_col}. After this, there will still be {} elements to absorb", NUMBER_OF_VALUES_TO_ABSORB_PUBLIC_IO - idx - 1);
if self.current_iteration % 2 == 0 {
let (pt_x, pt_y) = self.ivc_accumulator_e2[idx_col].elems[0]
let (pt_x, pt_y) = self.ivc_accumulator_e2[idx_col].chunks[0]
.to_coordinates()
.unwrap();
if idx % 2 == 0 {
Expand All @@ -505,7 +505,7 @@ where
self.write_public_input(pos, pt_y.to_biguint().into())
}
} else {
let (pt_x, pt_y) = self.ivc_accumulator_e1[idx_col].elems[0]
let (pt_x, pt_y) = self.ivc_accumulator_e1[idx_col].chunks[0]
.to_coordinates()
.unwrap();
if idx % 2 == 0 {
Expand Down Expand Up @@ -544,7 +544,7 @@ where
if self.current_iteration % 2 == 0 {
match side {
Side::Left => {
let pt = self.previous_commitments_e2[i_comm].elems[0];
let pt = self.previous_commitments_e2[i_comm].chunks[0];
// We suppose we never have a commitment equals to the
// point at infinity
let (pt_x, pt_y) = pt.to_coordinates().unwrap();
Expand All @@ -570,7 +570,7 @@ where
} else {
match side {
Side::Left => {
let pt = self.previous_commitments_e1[i_comm].elems[0];
let pt = self.previous_commitments_e1[i_comm].chunks[0];
// We suppose we never have a commitment equals to the
// point at infinity
let (pt_x, pt_y) = pt.to_coordinates().unwrap();
Expand Down Expand Up @@ -603,22 +603,22 @@ where
let (pt_x, pt_y): (BigInt, BigInt) = match side {
Side::Left => {
if self.current_iteration % 2 == 0 {
let pt = self.ivc_accumulator_e2[i_comm].elems[0];
let pt = self.ivc_accumulator_e2[i_comm].chunks[0];
let (x, y) = pt.to_coordinates().unwrap();
(x.to_biguint().into(), y.to_biguint().into())
} else {
let pt = self.ivc_accumulator_e1[i_comm].elems[0];
let pt = self.ivc_accumulator_e1[i_comm].chunks[0];
let (x, y) = pt.to_coordinates().unwrap();
(x.to_biguint().into(), y.to_biguint().into())
}
}
Side::Right => {
if self.current_iteration % 2 == 0 {
let pt = self.previous_commitments_e2[i_comm].elems[0];
let pt = self.previous_commitments_e2[i_comm].chunks[0];
let (x, y) = pt.to_coordinates().unwrap();
(x.to_biguint().into(), y.to_biguint().into())
} else {
let pt = self.previous_commitments_e1[i_comm].elems[0];
let pt = self.previous_commitments_e1[i_comm].chunks[0];
let (x, y) = pt.to_coordinates().unwrap();
(x.to_biguint().into(), y.to_biguint().into())
}
Expand Down
6 changes: 3 additions & 3 deletions arrabiata/tests/witness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ fn test_unit_witness_elliptic_curve_addition() {
assert_eq!(env.current_iteration, 0);
let (exp_x3, exp_y3) = {
let res: Pallas =
(env.ivc_accumulator_e2[0].elems[0] + env.previous_commitments_e2[0].elems[0]).into();
(env.ivc_accumulator_e2[0].chunks[0] + env.previous_commitments_e2[0].chunks[0]).into();
let (x3, y3) = res.to_coordinates().unwrap();
(
x3.to_biguint().to_bigint().unwrap(),
Expand All @@ -181,7 +181,7 @@ fn test_unit_witness_elliptic_curve_addition() {
assert_eq!(env.current_iteration, 1);
let (exp_x3, exp_y3) = {
let res: Vesta =
(env.ivc_accumulator_e1[0].elems[0] + env.previous_commitments_e1[0].elems[0]).into();
(env.ivc_accumulator_e1[0].chunks[0] + env.previous_commitments_e1[0].chunks[0]).into();
let (x3, y3) = res.to_coordinates().unwrap();
(
x3.to_biguint().to_bigint().unwrap(),
Expand All @@ -200,7 +200,7 @@ fn test_unit_witness_elliptic_curve_addition() {
assert_eq!(env.current_iteration, 2);
let (exp_x3, exp_y3) = {
let res: Pallas =
(env.ivc_accumulator_e2[0].elems[0] + env.previous_commitments_e2[0].elems[0]).into();
(env.ivc_accumulator_e2[0].chunks[0] + env.previous_commitments_e2[0].chunks[0]).into();
let (x3, y3) = res.to_coordinates().unwrap();
(
x3.to_biguint().to_bigint().unwrap(),
Expand Down
18 changes: 10 additions & 8 deletions folding/src/decomposable_folding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,11 +112,11 @@ impl<'a, CF: FoldingConfig> DecomposableFoldingScheme<'a, CF> {

// sanity check to verify that we only have one commitment in polycomm
// (i.e. domain = poly size)
assert_eq!(error_commitments[0].elems.len(), 1);
assert_eq!(error_commitments[1].elems.len(), 1);
assert_eq!(error_commitments[0].chunks.len(), 1);
assert_eq!(error_commitments[1].chunks.len(), 1);

let t0 = &error_commitments[0].elems[0];
let t1 = &error_commitments[1].elems[0];
let t0 = &error_commitments[0].chunks[0];
let t1 = &error_commitments[1].chunks[0];

let to_absorb = env.to_absorb(t0, t1);
fq_sponge.absorb_fr(&to_absorb.0);
Expand Down Expand Up @@ -175,16 +175,18 @@ impl<'a, CF: FoldingConfig> DecomposableFoldingScheme<'a, CF> {

// sanity check to verify that we only have one commitment in polycomm
// (i.e. domain = poly size)
assert_eq!(error_commitments[0].elems.len(), 1);
assert_eq!(error_commitments[1].elems.len(), 1);
assert_eq!(error_commitments[0].chunks.len(), 1);
assert_eq!(error_commitments[1].chunks.len(), 1);

let to_absorb = {
let mut left = a.to_absorb();
let right = b.to_absorb();
left.0.extend(right.0);
left.1.extend(right.1);
left.1
.extend([error_commitments[0].elems[0], error_commitments[1].elems[0]]);
left.1.extend([
error_commitments[0].chunks[0],
error_commitments[1].chunks[0],
]);
left
};

Expand Down
8 changes: 4 additions & 4 deletions folding/src/instance_witness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,8 +209,8 @@ impl<G: CommitmentCurve, I: Instance<G>> Instance<G> for ExtendedInstance<G, I>
fn to_absorb(&self) -> (Vec<G::ScalarField>, Vec<G>) {
let mut elements = self.instance.to_absorb();
let extended_commitments = self.extended.iter().map(|commit| {
assert_eq!(commit.elems.len(), 1);
commit.elems[0]
assert_eq!(commit.chunks.len(), 1);
commit.chunks[0]
});
elements.1.extend(extended_commitments);
elements
Expand Down Expand Up @@ -256,8 +256,8 @@ impl<G: CommitmentCurve, I: Instance<G>> RelaxedInstance<G, I> {
pub fn to_absorb(&self) -> (Vec<G::ScalarField>, Vec<G>) {
let mut elements = self.extended_instance.to_absorb();
elements.0.push(self.u);
assert_eq!(self.error_commitment.elems.len(), 1);
elements.1.push(self.error_commitment.elems[0]);
assert_eq!(self.error_commitment.chunks.len(), 1);
elements.1.push(self.error_commitment.chunks[0]);
elements
}

Expand Down
20 changes: 11 additions & 9 deletions folding/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,11 +235,11 @@ impl<'a, CF: FoldingConfig> FoldingScheme<'a, CF> {

// sanity check to verify that we only have one commitment in polycomm
// (i.e. domain = poly size)
assert_eq!(error_commitments[0].elems.len(), 1);
assert_eq!(error_commitments[1].elems.len(), 1);
assert_eq!(error_commitments[0].chunks.len(), 1);
assert_eq!(error_commitments[1].chunks.len(), 1);

let t_0 = &error_commitments[0].elems[0];
let t_1 = &error_commitments[1].elems[0];
let t_0 = &error_commitments[0].chunks[0];
let t_1 = &error_commitments[1].chunks[0];

// Absorbing the commitments into the sponge
let to_absorb = env.to_absorb(t_0, t_1);
Expand Down Expand Up @@ -300,16 +300,18 @@ impl<'a, CF: FoldingConfig> FoldingScheme<'a, CF> {

// sanity check to verify that we only have one commitment in polycomm
// (i.e. domain = poly size)
assert_eq!(error_commitments[0].elems.len(), 1);
assert_eq!(error_commitments[1].elems.len(), 1);
assert_eq!(error_commitments[0].chunks.len(), 1);
assert_eq!(error_commitments[1].chunks.len(), 1);

let to_absorb = {
let mut left = a.to_absorb();
let right = b.to_absorb();
left.0.extend(right.0);
left.1.extend(right.1);
left.1
.extend([error_commitments[0].elems[0], error_commitments[1].elems[0]]);
left.1.extend([
error_commitments[0].chunks[0],
error_commitments[1].chunks[0],
]);
left
};

Expand Down Expand Up @@ -341,7 +343,7 @@ impl<'a, CF: FoldingConfig> FoldingScheme<'a, CF> {
let right = right_instance.to_absorb();
left.0.extend(right.0);
left.1.extend(right.1);
left.1.extend([t_0.elems[0], t_1.elems[0]]);
left.1.extend([t_0.chunks[0], t_1.chunks[0]]);
left
};

Expand Down
6 changes: 3 additions & 3 deletions folding/tests/test_decomposable_folding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ fn instance_from_witness(
.0
.iter()
.map(|w| srs.commit_evaluations_non_hiding(domain, w))
.map(|c| c.elems[0])
.map(|c| c.chunks[0])
.collect_vec();
let commitments: [_; 5] = commitments.try_into().unwrap();

Expand Down Expand Up @@ -465,8 +465,8 @@ fn test_decomposable_folding() {
// show that there is some non trivial computation.
assert_eq!(t_0.len(), 1);
assert_eq!(t_1.len(), 1);
assert!(!t_0.elems[0].is_zero());
assert!(!t_1.elems[0].is_zero());
assert!(!t_0.chunks[0].is_zero());
assert!(!t_1.chunks[0].is_zero());

let checker = ExtendedProvider::new(folded_instance, folded_witness);
debug!("exp: \n {:#?}", final_constraint.to_string());
Expand Down
6 changes: 3 additions & 3 deletions folding/tests/test_folding_with_quadriticization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ fn instance_from_witness(
.0
.iter()
.map(|w| srs.commit_evaluations_non_hiding(domain, w))
.map(|c| c.elems[0])
.map(|c| c.chunks[0])
.collect_vec();
let commitments: [_; 5] = commitments.try_into().unwrap();

Expand Down Expand Up @@ -503,8 +503,8 @@ fn test_quadriticization() {
// show that there is some non trivial computation.
assert_eq!(t_0.len(), 1);
assert_eq!(t_1.len(), 1);
assert!(!t_0.elems[0].is_zero());
assert!(!t_1.elems[0].is_zero());
assert!(!t_0.chunks[0].is_zero());
assert!(!t_1.chunks[0].is_zero());

let checker = ExtendedProvider::new(folded_instance, folded_witness);

Expand Down
6 changes: 3 additions & 3 deletions folding/tests/test_vanilla_folding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ fn instance_from_witness(
.0
.iter()
.map(|w| srs.commit_evaluations_non_hiding(domain, w))
.map(|c| c.elems[0])
.map(|c| c.chunks[0])
.collect_vec();
let commitments: [_; 3] = commitments.try_into().unwrap();

Expand Down Expand Up @@ -493,8 +493,8 @@ fn test_folding_instance() {
// show that there is some non trivial computation.
assert_eq!(t_0.len(), 1);
assert_eq!(t_1.len(), 1);
assert!(!t_0.elems[0].is_zero());
assert!(!t_1.elems[0].is_zero());
assert!(!t_0.chunks[0].is_zero());
assert!(!t_1.chunks[0].is_zero());

// checking that we have the expected number of elements to absorb
// 3+2 from each instance + 1 from u, times 2 instances
Expand Down
6 changes: 3 additions & 3 deletions ivc/src/plonkish_lang.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,13 +177,13 @@ impl<G: CommitmentCurve, const N_COL: usize, const N_ALPHAS: usize>

// Absorbing commitments
(&commitments).into_iter().for_each(|c| {
assert!(c.elems.len() == 1);
assert!(c.chunks.len() == 1);
absorb_commitment(fq_sponge, c)
});

let commitments: [G; N_COL] = commitments
.into_iter()
.map(|c| c.elems[0])
.map(|c| c.chunks[0])
.collect_vec()
.try_into()
.unwrap();
Expand Down Expand Up @@ -215,7 +215,7 @@ impl<G: CommitmentCurve, const N_COL: usize, const N_ALPHAS: usize>
// Absorbing commitments
self.commitments
.iter()
.for_each(|c| absorb_commitment(fq_sponge, &PolyComm { elems: vec![*c] }));
.for_each(|c| absorb_commitment(fq_sponge, &PolyComm { chunks: vec![*c] }));

let beta = fq_sponge.challenge();
let gamma = fq_sponge.challenge();
Expand Down
6 changes: 3 additions & 3 deletions ivc/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ where

let witness_comms: Witness<N_WIT_QUAD, PolyComm<G>> = {
let blinders = PolyComm {
elems: vec![Fp::one()],
chunks: vec![Fp::one()],
};
let comm = {
|poly: &DensePolynomial<Fp>| {
Expand Down Expand Up @@ -438,10 +438,10 @@ where

let coefficients_form = DensePolynomialOrEvaluations::DensePolynomial;
let non_hiding = |d1_size| PolyComm {
elems: vec![Fp::zero(); d1_size],
chunks: vec![Fp::zero(); d1_size],
};
let hiding = |d1_size| PolyComm {
elems: vec![Fp::one(); d1_size],
chunks: vec![Fp::one(); d1_size],
};

// Gathering all polynomials_to_open to use in the opening proof
Expand Down
20 changes: 10 additions & 10 deletions ivc/tests/simple.rs
Original file line number Diff line number Diff line change
Expand Up @@ -491,8 +491,8 @@ pub fn heavy_test_simple_add() {

// The polynomial of the computation is linear, therefore, the error terms
// are zero
assert_ne!(folding_output_one.t_0.elems[0], Curve::zero());
assert_ne!(folding_output_one.t_1.elems[0], Curve::zero());
assert_ne!(folding_output_one.t_0.chunks[0], Curve::zero());
assert_ne!(folding_output_one.t_1.chunks[0], Curve::zero());

// Sanity check that the u values are the same. The u value is there to
// homogeneoize the polynomial describing the NP relation.
Expand All @@ -518,7 +518,7 @@ pub fn heavy_test_simple_add() {
.relaxed_extended_left_instance
.extended_instance
.extended;
let extended_comms: Vec<_> = extended.iter().map(|x| x.elems[0]).collect();
let extended_comms: Vec<_> = extended.iter().map(|x| x.chunks[0]).collect();
comms_left.extend(extended_comms.clone());
extended_comms.iter().enumerate().for_each(|(i, x)| {
assert_ne!(
Expand Down Expand Up @@ -553,7 +553,7 @@ pub fn heavy_test_simple_add() {
.relaxed_extended_right_instance
.extended_instance
.extended;
comms_right.extend(extended.iter().map(|x| x.elems[0]));
comms_right.extend(extended.iter().map(|x| x.chunks[0]));
}
assert_eq!(comms_right.len(), N_COL_TOTAL_QUAD);
// Checking they are all not zero.
Expand All @@ -571,7 +571,7 @@ pub fn heavy_test_simple_add() {
comms_out.extend(folded_instance_one.extended_instance.instance.commitments);
{
let extended = folded_instance_one.extended_instance.extended.clone();
comms_out.extend(extended.iter().map(|x| x.elems[0]));
comms_out.extend(extended.iter().map(|x| x.chunks[0]));
}
// Checking they are all not zero.
comms_out.iter().for_each(|c| {
Expand Down Expand Up @@ -607,9 +607,9 @@ pub fn heavy_test_simple_add() {
.commitment;

let error_terms = [
left_error_term.elems[0],
right_error_term.elems[0],
folded_instance_one.error_commitment.elems[0],
left_error_term.chunks[0],
right_error_term.chunks[0],
folded_instance_one.error_commitment.chunks[0],
];
error_terms.iter().for_each(|c| {
assert_ne!(c, &Curve::zero());
Expand All @@ -618,8 +618,8 @@ pub fn heavy_test_simple_add() {
let error_terms: [(Fq, Fq); 3] = std::array::from_fn(|i| (error_terms[i].x, error_terms[i].y));

let t_terms = [
folding_output_one.t_0.elems[0],
folding_output_one.t_1.elems[0],
folding_output_one.t_0.chunks[0],
folding_output_one.t_1.chunks[0],
];
t_terms.iter().for_each(|c| {
assert_ne!(c, &Curve::zero());
Expand Down
Loading

0 comments on commit 86aeee7

Please sign in to comment.