Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
2143ccf
feat: Add Plonky3 basic backend
Leo-Besancon Sep 17, 2025
d83b791
feat: Handle plonky3 target
Leo-Besancon Sep 17, 2025
98495ba
tests: Add E2E test codegen for binary plonky3
Leo-Besancon Sep 17, 2025
f15970d
tests: Add test of generated code in Plonky3 binary E2E tests
Leo-Besancon Sep 17, 2025
254cdcc
fix: Make docs_sync() test work on windows
Leo-Besancon Sep 17, 2025
aaa41ed
fix(codegen): differentiate integrity and transition constraints
Leo-Besancon Sep 19, 2025
8b3ed73
refactor(codegen): avoid public_+value Vec allocation
Leo-Besancon Sep 19, 2025
e628f19
chore: changelog, lint fix and removed unused IntegrityConstraintDegr…
Leo-Besancon Sep 19, 2025
df72ff4
feat: add periodic_columns
Leo-Besancon Sep 24, 2025
6c7c777
fix(codegen): Correctly handle boundary constraints domain
Leo-Besancon Sep 24, 2025
96e741c
tests: generate and test all E2E Plonky3 tests
Leo-Besancon Sep 24, 2025
0bd3b79
Merge branch 'next' into add_plonky3_backend
Leo-Besancon Sep 26, 2025
4875e6c
tests: add E2E test for selectors combine with list comprehensions
Leo-Besancon Sep 26, 2025
7a2998a
Merge branch 'next' into add_plonky3_backend
Leo-Besancon Oct 14, 2025
d8c60cc
refactor(plonky3): use AB::Expr::ZERO, ONE, from_u64 and double
Leo-Besancon Oct 14, 2025
ab4c4e8
tests: update plonky3 E2E tests following codegen refactor
Leo-Besancon Oct 14, 2025
dee6af7
refactor(tests): Use macro for plonky3 test boilerplate
Leo-Besancon Oct 14, 2025
7b11b11
docs: Add documentation for Plonky3 backend
Leo-Besancon Oct 14, 2025
3eb73ad
Merge branch 'next' into add_plonky3_backend
Leo-Besancon Oct 15, 2025
4409002
tests: Add Plonky3 E2E tests for computed indices
Leo-Besancon Oct 15, 2025
d0418fe
chore: fix fmt lint
Leo-Besancon Oct 15, 2025
e6aa421
tests(plonky3): use Goldilocks instead of Mersenne32
Leo-Besancon Oct 20, 2025
a2f9883
Merge branch 'next' into add_plonky3_backend
Leo-Besancon Oct 20, 2025
1ec834b
chore: cleanup import
Leo-Besancon Oct 20, 2025
b3a441e
Target 0xMiden Plonky3 repo and use AirScriptAir and AirScriptBuilder…
Leo-Besancon Dec 4, 2025
18a4905
Merge branch 'next' into add_plonky3_backend
Leo-Besancon Dec 8, 2025
805649f
tests: update tests after merge, add plonky3 E2E tests codegen and up…
Leo-Besancon Dec 8, 2025
13d2ae0
fix: cargo fmt
Leo-Besancon Dec 8, 2025
b72ead4
fix: prev cargo fmt failed
Leo-Besancon Dec 8, 2025
f4f6c86
feat: prove/verify and sync plonky3 (#523)
Leo-Besancon Dec 17, 2025
e979548
Merge branch 'next' into add_plonky3_backend
Leo-Besancon Jan 5, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ members = [
"air",
"codegen/winterfell",
"codegen/ace",
"codegen/plonky3",
]
resolver = "2"

Expand Down
13 changes: 13 additions & 0 deletions air-script/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ path = "src/main.rs"

[dependencies]
air-codegen-winter = { package = "air-codegen-winter", path = "../codegen/winterfell", version = "0.5" }
air-codegen-plonky3 = { package = "air-codegen-plonky3", path = "../codegen/plonky3", version = "0.5" }
air-ir = { package = "air-ir", path = "../air", version = "0.5" }
air-parser = { package = "air-parser", path = "../parser", version = "0.5" }
air-pass = { package = "air-pass", path = "../pass", version = "0.5" }
Expand All @@ -29,6 +30,18 @@ mir = { package = "air-mir", path = "../mir", version = "0.5" }

[dev-dependencies]
expect-test = "1.4"
p3-air = { package = "p3-air", version = "0.3", default-features = false }
p3-challenger = { package = "p3-challenger", version = "0.3", default-features = false }
p3-circle = { package = "p3-circle", version = "0.3", default-features = false }
p3-commit = { package = "p3-commit", version = "0.3", default-features = false }
p3-field = { package = "p3-field", version = "0.3", default-features = false }
p3-fri = { package = "p3-fri", version = "0.3", default-features = false }
p3-matrix = { package = "p3-matrix", version = "0.3", default-features = false }
p3-merkle-tree = { package = "p3-merkle-tree", version = "0.3", default-features = false }
p3-mersenne-31 = { package = "p3-mersenne-31", version = "0.3", default-features = false }
p3-sha256 = { package = "p3-sha256", version = "0.3", default-features = false }
p3-symmetric = { package = "p3-symmetric", version = "0.3", default-features = false }
p3-uni-stark = { package = "p3-uni-stark", version = "0.3", default-features = false }
winter-air = { package = "winter-air", version = "0.12", default-features = false }
winter-math = { package = "winter-math", version = "0.12", default-features = false }
winter-utils = { package = "winter-utils", version = "0.12", default-features = false }
Expand Down
10 changes: 10 additions & 0 deletions air-script/src/cli/transpile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,13 @@ use miden_diagnostics::{
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
pub enum Target {
Winterfell,
Plonky3,
}
impl Target {
pub fn extension(&self) -> &'static str {
match self {
Self::Winterfell => "rs",
Self::Plonky3 => "rs",
}
}
}
Expand Down Expand Up @@ -55,13 +57,21 @@ impl Transpile {
let target = self.target.unwrap_or(Target::Winterfell);
let backend: Box<dyn CodeGenerator<Output = String>> = match target {
Target::Winterfell => Box::new(air_codegen_winter::CodeGenerator),
Target::Plonky3 => Box::new(air_codegen_plonky3::CodeGenerator),
};

// write transpiled output to the output path
let output_path = match &self.output {
Some(path) => path.clone(),
None => {
let mut path = input_path.clone();
if target == Target::Plonky3 {
path.set_file_name(format!(
"{}_plonky3",
path.file_stem().unwrap().display()
));
path.set_extension("air");
}
path.set_extension(target.extension());
path
},
Expand Down
2 changes: 1 addition & 1 deletion air-script/tests/binary/binary.air
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ public_inputs {
}

boundary_constraints {
enf a.first = 0;
enf a.first = stack_inputs[0];
}

integrity_constraints {
Expand Down
2 changes: 1 addition & 1 deletion air-script/tests/binary/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ impl Air for BinaryAir {

fn get_assertions(&self) -> Vec<Assertion<Felt>> {
let mut result = Vec::new();
result.push(Assertion::single(0, 0, Felt::ZERO));
result.push(Assertion::single(0, 0, self.stack_inputs[0]));
result
}

Expand Down
35 changes: 35 additions & 0 deletions air-script/tests/binary/binary_plonky3.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
use p3_air::{Air, AirBuilder, AirBuilderWithPublicValues, BaseAir, BaseAirWithPublicValues};
use p3_matrix::Matrix;
use p3_field::PrimeCharacteristicRing;

pub const NUM_COLUMNS: usize = 2;

pub const NUM_PUBLIC_VALUES: usize = 16;

pub struct BinaryAir;

impl<F> BaseAir<F> for BinaryAir {
fn width(&self) -> usize {
NUM_COLUMNS
}
}

impl<F> BaseAirWithPublicValues<F> for BinaryAir {
fn num_public_values(&self) -> usize {
NUM_PUBLIC_VALUES
}
}

impl<AB: AirBuilderWithPublicValues> Air<AB> for BinaryAir {
fn eval(&self, builder: &mut AB) {
let main = builder.main();
let public_values = builder.public_values().to_vec();
let (main_current, main_next) = (
main.row_slice(0).unwrap(),
main.row_slice(1).unwrap(),
);
builder.when_first_row().assert_zero::<_>(main_current[0] - public_values[0].into());
builder.when_transition().assert_zero::<_>(main_current[0] * main_current[0] - main_current[0]);
builder.when_transition().assert_zero::<_>(main_current[1] * main_current[1] - main_current[1]);
}
}
8 changes: 7 additions & 1 deletion air-script/tests/binary/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
#[rustfmt::skip]
#[allow(clippy::all)]
mod binary;
mod test_air;
mod test_air_winterfell;

mod test_air_plonky3;
#[rustfmt::skip]
#[allow(clippy::all)]
#[allow(unused_imports)]
mod binary_plonky3;
81 changes: 81 additions & 0 deletions air-script/tests/binary/test_air_plonky3.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
use std::marker::PhantomData;

use p3_challenger::{HashChallenger, SerializingChallenger32};
use p3_circle::CirclePcs;
use p3_commit::ExtensionMmcs;
use p3_field::{PrimeField64, extension::BinomialExtensionField};
use p3_fri::create_benchmark_fri_params;
use p3_matrix::dense::RowMajorMatrix;
use p3_merkle_tree::MerkleTreeMmcs;
use p3_mersenne_31::Mersenne31;
use p3_sha256::Sha256;
use p3_symmetric::{CompressionFunctionFromHasher, SerializingHasher};
use p3_uni_stark::{StarkConfig, prove, verify};

use crate::binary::binary_plonky3::{BinaryAir, NUM_COLUMNS};

pub fn generate_trace_rows<F: PrimeField64>(inputs: Vec<u32>) -> RowMajorMatrix<F> {
let num_rows = 32;
let trace_length = num_rows * NUM_COLUMNS;

let mut long_trace = F::zero_vec(trace_length);

let mut trace = RowMajorMatrix::new(long_trace, NUM_COLUMNS);

let (prefix, rows, suffix) = unsafe { trace.values.align_to_mut::<[F; NUM_COLUMNS]>() };
assert!(prefix.is_empty(), "Alignment should match");
assert!(suffix.is_empty(), "Alignment should match");
assert_eq!(rows.len(), num_rows);

// Initialize first row
rows[0][0] = F::from_canonical_checked(inputs[0]).unwrap();
rows[0][1] = F::ONE;

// Fill subsequent rows using direct access to the rows array
for i in 1..num_rows {
let a_prev = rows[i - 1][0];
let b_prev = rows[i - 1][1];

// Update current row based on previous values
rows[i][0] = F::ONE - a_prev;
rows[i][1] = F::ONE - b_prev;
}

trace
}

#[test]
fn test_air_plonky3() {
type Val = Mersenne31;
type Challenge = BinomialExtensionField<Val, 3>;

type ByteHash = Sha256;
type FieldHash = SerializingHasher<ByteHash>;
type MyCompress = CompressionFunctionFromHasher<ByteHash, 2, 32>;
type ValMmcs = MerkleTreeMmcs<Val, u8, FieldHash, MyCompress, 32>;
type ChallengeMmcs = ExtensionMmcs<Val, Challenge, ValMmcs>;
type Challenger = SerializingChallenger32<Val, HashChallenger<u8, ByteHash, 32>>;
type Pcs = CirclePcs<Val, ValMmcs, ChallengeMmcs>;
type MyConfig = StarkConfig<Pcs, Challenge, Challenger>;

let byte_hash = ByteHash {};
let field_hash = FieldHash::new(Sha256);
let compress = MyCompress::new(byte_hash);
let val_mmcs = ValMmcs::new(field_hash, compress);
let challenge_mmcs = ChallengeMmcs::new(val_mmcs.clone());
let challenger = Challenger::from_hasher(vec![], byte_hash);
let fri_params = create_benchmark_fri_params(challenge_mmcs);
let pcs = Pcs {
mmcs: val_mmcs,
fri_params,
_phantom: PhantomData,
};
let config = MyConfig::new(pcs, challenger);

let inputs = vec![1; 16];
let inputs_m31: Vec<Val> = inputs.iter().map(|&x| Val::new_checked(x).unwrap()).collect();

let trace = generate_trace_rows::<Val>(inputs);
let proof = prove(&config, &BinaryAir {}, trace, &inputs_m31);
verify(&config, &BinaryAir {}, &proof, &inputs_m31).expect("Verification failed");
}
2 changes: 2 additions & 0 deletions air-script/tests/codegen/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use miden_diagnostics::{

pub enum Target {
Winterfell,
Plonky3,
}

pub struct Test {
Expand All @@ -30,6 +31,7 @@ impl Test {

let backend: Box<dyn CodeGenerator<Output = String>> = match target {
Target::Winterfell => Box::new(air_codegen_winter::CodeGenerator),
Target::Plonky3 => Box::new(air_codegen_plonky3::CodeGenerator),
};

// generate Rust code targeting Winterfell
Expand Down
1 change: 1 addition & 0 deletions air-script/tests/codegen/mod.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
mod helpers;
mod plonky3;
mod winterfell;
15 changes: 15 additions & 0 deletions air-script/tests/codegen/plonky3.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
use expect_test::expect_file;

use super::helpers::{Target, Test};

#[test]
fn binary() {
let generated_air = Test::new("tests/binary/binary.air".to_string())
.transpile(Target::Plonky3)
.unwrap();

let expected = expect_file!["../binary/binary_plonky3.rs"];
expected.assert_eq(&generated_air);
}

// TODO: add all tests
10 changes: 6 additions & 4 deletions air-script/tests/docs_sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ use std::{path::Path, process::Command};
#[test]
fn docs_sync() {
let examples_dir = Path::new("../docs/examples");
let airc_path = Path::new("target/release/airc");
// Use CARGO_MANIFEST_DIR to build an absolute path to airc, needed on Windows to correctly use `current_dir`.
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR not set");
let airc_path = Path::new(&manifest_dir).join("../target/release/airc");

// Build the CLI tool first
let build_output = Command::new("cargo")
Expand Down Expand Up @@ -38,11 +40,11 @@ fn docs_sync() {
let file_name = air_file.file_name().unwrap().to_string_lossy();
let output_path = air_file.with_extension("rs");

let output = Command::new(airc_path)
let output = Command::new(&airc_path)
.args(["transpile", air_file.to_str().unwrap(), "-o", output_path.to_str().unwrap()])
.current_dir("../")
.output()
.unwrap_or_else(|_| panic!("Failed to transpile {}", file_name));
.unwrap_or_else(|_| panic!("Failed to transpile {file_name}"));

assert!(
output.status.success(),
Expand All @@ -51,7 +53,7 @@ fn docs_sync() {
String::from_utf8_lossy(&output.stderr)
);

println!("Successfully transpiled: {}", file_name);
println!("Successfully transpiled: {file_name}");

// Clean up generated Rust files
let _ = std::fs::remove_file(output_path);
Expand Down
17 changes: 17 additions & 0 deletions codegen/plonky3/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[package]
name = "air-codegen-plonky3"
version = "0.5.0"
description = "Plonky3 code generator for the AirScript language"
authors.workspace = true
readme = "README.md"
license.workspace = true
repository.workspace = true
categories = ["compilers", "cryptography"]
keywords = ["air", "stark", "plonky3", "zero-knowledge", "zkp"]
edition.workspace = true
rust-version.workspace = true

[dependencies]
air-ir = { package = "air-ir", path = "../../air", version = "0.5" }
anyhow = { workspace = true }
codegen = "0.2"
3 changes: 3 additions & 0 deletions codegen/plonky3/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Plonky3 Code Generator

This crate contains a code generator targeting the [Plonky3 prover](https://github.com/Plonky3/Plonky3) Rust library.
17 changes: 17 additions & 0 deletions codegen/plonky3/src/air/boundary_constraints.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
use air_ir::{Air, TraceSegmentId};
use codegen::Function;

use super::Codegen;

/// Adds the main boundary constraints to the generated code.
pub(super) fn add_main_boundary_constraints(eval_func: &mut Function, ir: &Air) {
for constraint in ir.boundary_constraints(TraceSegmentId::Main) {
let expr_root = constraint.node_index();

let expr_root_string = expr_root.to_string(ir, TraceSegmentId::Main);

let assertion = format!("builder.when_first_row().assert_zero::<_>({expr_root_string});");

eval_func.line(assertion);
}
}
Loading
Loading