diff --git a/CHANGELOG.md b/CHANGELOG.md index f06f41cdbf..d3a1cb4b10 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ - [BREAKING] Refactored storage slots to be accessed by names instead of indices ([#1987](https://github.com/0xMiden/miden-base/pull/1987), [#2025](https://github.com/0xMiden/miden-base/pull/2025), [#2149](https://github.com/0xMiden/miden-base/pull/2149), [#2150](https://github.com/0xMiden/miden-base/pull/2150), [#2153](https://github.com/0xMiden/miden-base/pull/2153), [#2154](https://github.com/0xMiden/miden-base/pull/2154), [#2160](https://github.com/0xMiden/miden-base/pull/2160), [#2161](https://github.com/0xMiden/miden-base/pull/2161), [#2170](https://github.com/0xMiden/miden-base/pull/2170)). - [BREAKING] Allowed account components to share identical account code procedures ([#2164](https://github.com/0xMiden/miden-base/pull/2164)). - Add `From<&ExecutedTransaction> for TransactionHeader` implementation ([#2178](https://github.com/0xMiden/miden-base/pull/2178)). +- Implement keccak-based MMR frontier for the newly created `agglayer::collections` module ([#2202](https://github.com/0xMiden/miden-base/pull/2202)). ### Changes @@ -26,6 +27,7 @@ - [BREAKING] Renamed `AccountProcedureInfo` into `AccountProcedureRoot` and remove storage offset and size ([#2162](https://github.com/0xMiden/miden-base/pull/2162)). - [BREAKING] Made `AccountProcedureIndexMap` construction infallible ([#2163](https://github.com/0xMiden/miden-base/pull/2163)). - [BREAKING] Renamed `tracked_procedure_roots_slot` to `trigger_procedure_roots_slot` in ACL auth components for naming consistency ([#2166](https://github.com/0xMiden/miden-base/pull/2166)). +- [BREAKING] Migrated to `miden-vm` v0.20 and `miden-crypto` v0.19 ([#2158](https://github.com/0xMiden/miden-base/pull/2158)). - [BREAKING] Refactor `miden-objects` and `miden-lib` into `miden-protocol` and `miden-standards` ([#2184](https://github.com/0xMiden/miden-base/pull/2184), [#2191](https://github.com/0xMiden/miden-base/pull/2191), [#2197](https://github.com/0xMiden/miden-base/pull/2197)). - [BREAKING] Migrated to `miden-vm` v0.20 and `miden-crypto` v0.19 ([#2158](https://github.com/0xMiden/miden-base/pull/2158)). - [BREAKING] Refactored `AccountStorageDelta` to use a new `StorageSlotDelta` type ([#2182](https://github.com/0xMiden/miden-base/pull/2182)). diff --git a/crates/miden-lib/asm/agglayer/collections/mmr_frontier32_keccak.masm b/crates/miden-lib/asm/agglayer/collections/mmr_frontier32_keccak.masm new file mode 100644 index 0000000000..a4b8d7c539 --- /dev/null +++ b/crates/miden-lib/asm/agglayer/collections/mmr_frontier32_keccak.masm @@ -0,0 +1,262 @@ +use miden::core::crypto::hashes::keccak256 + +# Module description: TBD + +# CONSTANTS +# ================================================================================================= + +# The maximum number of leaves which could be added to the MMR. +const MAX_LEAVES_NUM = 32 + +# The total height of the full MMR tree, which root represents the the commitment to the current +# frontier. +const TREE_HEIGHT = 5 + +# The number of the stack elements which one node occupy. +const NODE_SIZE = 8 + +# The offset of the number of leaves in the current MMR state. +const NUM_LEAVES_OFFSET = 0 + +# The offset of the MMR root. +# +# Q: do we need to store the root? It seems like we never return it +const MMR_ROOT_OFFSET = 4 + +# The offset of the array of the zero hashes of respective heights. +const ZEROS_OFFSET = 12 # 6 double words, 48 felts in total + +# The offset of the array of the frontier nodes of respective heights. +const FRONTIER_OFFSET = 60 # 6 double words, 48 felts in total + +# PUBLIC API +# ================================================================================================= + +#! Sets the zeros in the zero array, sets the root to the ZERO_5 (root of the zero tree of height 5) +#! +#! Inputs: [mmr_frontier_ptr] +#! Outputs: [] +pub proc new + # prepare the pointers to the zero array and to the root + dup add.MMR_ROOT_OFFSET swap add.ZEROS_OFFSET + # => [zero_array_ptr, mmr_root_ptr] + + # store the zero for the height 0 + push.1676014350.378744630.4127735880.2512168523.3362732168.2839872470.2825030484.3656125737 + # => [ZERO_0_LO, ZERO_0_HI, zero_array_ptr, mmr_root_ptr] + + exec.mem_store_double_word dropw dropw + # => [zero_array_ptr, mmr_root_ptr] + + # store the zero for the height 1 + add.NODE_SIZE + push.501060780.3871659214.3009211592.2623812180.1269040150.2402257162.1716941530.3619962211 + # => [ZERO_1_LO, ZERO_1_HI, zero_array_ptr, mmr_root_ptr] + + exec.mem_store_double_word dropw dropw + # => [zero_array_ptr+8, mmr_root_ptr] + + # store the zero for the height 2 + add.NODE_SIZE + push.2113874120.866443917.1466696484.1577749685.3266301349.2378900196.2613970667.2822768521 + # => [ZERO_2_LO, ZERO_2_HI, zero_array_ptr, mmr_root_ptr] + + exec.mem_store_double_word dropw dropw + # => [zero_array_ptr+16, mmr_root_ptr] + + # store the zero for the height 3 + add.NODE_SIZE + push.3624627744.3896701049.3259095475.2541063347.2174359630.3386860883.819752706.2663419451 + # => [ZERO_3_LO, ZERO_3_HI, zero_array_ptr, mmr_root_ptr] + + exec.mem_store_double_word dropw dropw + # => [zero_array_ptr+24, mmr_root_ptr] + + # store the zero for the height 4 + add.NODE_SIZE + push.3672259025.1408703058.2325593427.3764368977.1768984761.3986407010.3179733816.3993949676 + # => [ZERO_4_LO, ZERO_4_HI, zero_array_ptr, mmr_root_ptr] + + exec.mem_store_double_word dropw dropw + # => [zero_array_ptr+32, mmr_root_ptr] + + # store the zero for the height 5 + add.NODE_SIZE + push.2772295100.1943789808.3975148557.2783120662.1954699067.4078189558.55884592.3556179934 + # => [ZERO_5_LO, ZERO_5_HI, zero_array_ptr+40, mmr_root_ptr] + + exec.mem_store_double_word + + # update the root + movup.8 drop + # => [ZERO_5_LO, ZERO_5_HI, mmr_root_ptr] + + exec.mem_store_double_word + dropw dropw drop + # => [] +end + +#! Updates the existing frontier with the new leaf, returns a new leaf count and a new MMR root. +#! +#! Inputs: [NEW_LEAF_LO, NEW_LEAF_HI, mmr_frontier_ptr] +#! Outputs: [NEW_ROOT_LO, NEW_ROOT_HI] +@locals(8) +pub proc append_and_update_frontier + # store the new leaf to the local memory + loc_storew.0 dropw + loc_storew.4 dropw + # => [mmr_frontier_ptr] + + # get the leaves number + dup add.NUM_LEAVES_OFFSET mem_load + # => [num_leaves, mmr_frontier_ptr] + + # store the updated leaves number + dup add.1 dup.2 add.NUM_LEAVES_OFFSET + # => [num_leaves_ptr, num_leaves+1, num_leaves, mmr_frontier_ptr] + + mem_store + # => [num_leaves, mmr_frontier_ptr] + + # iterate `TREE_HEIGHT` times to get the root of the tree + # + # iter_counter in that case will show the current tree height + push.0 push.1 + # => [loop_flag=1, iter_counter=0, num_leaves, mmr_frontier_ptr] + + while.true + # => [iter_counter, num_leaves, mmr_frontier_ptr] + + # get the pointer to the frontier node of the current height + dup.2 add.FRONTIER_OFFSET dup.1 mul.NODE_SIZE add + # => [frontier[iter_counter]_ptr, iter_counter, num_leaves, mmr_frontier_ptr] + + # determine whether the last `num_leaves` bit is 1 (is `num_leaves` odd) + dup.2 u32and.1 + # => [is_odd, frontier[iter_counter]_ptr, iter_counter, num_leaves, mmr_frontier_ptr] + + if.true + # => [frontier[iter_counter]_ptr, iter_counter, num_leaves, mmr_frontier_ptr] + # + # this height already had a subtree root stored in frontier[iter_counter], merge into + # parent. + exec.mem_load_double_word + # => [FRONTIER[iter_counter]_LO, FRONTIER[iter_counter]_HI, iter_counter, num_leaves, mmr_frontier_ptr] + + # load the current hash from the local memory back to the stack + padw loc_loadw.4 padw loc_loadw.0 swapdw + # => [FRONTIER[iter_counter]_LO, FRONTIER[iter_counter]_HI, CUR_HASH_LO, CUR_HASH_HI, iter_counter, num_leaves, mmr_frontier_ptr] + + # merge the frontier node of this height with the current hash to get the current hash + # of the next height (merge(frontier[h], cur)) + exec.keccak256::merge + # => [CUR_HASH_LO', CUR_HASH_HI', iter_counter, num_leaves, mmr_frontier_ptr] + + # store the current hash of the next height back to the local memory + loc_storew.0 dropw + loc_storew.4 dropw + # => [iter_counter, num_leaves, mmr_frontier_ptr] + else + # => [frontier[iter_counter]_ptr, iter_counter, num_leaves, mmr_frontier_ptr] + # + # this height wasn't "occupied" yet: store the current hash as the subtree root + # (frontier node) at height `iter_counter` + padw loc_loadw.4 padw loc_loadw.0 + # => [CUR_HASH_LO, CUR_HASH_HI, frontier[iter_counter]_ptr, iter_counter, num_leaves, mmr_frontier_ptr] + + # store the CUR_HASH to the frontier[iter_counter]_ptr + exec.mem_store_double_word movdn.8 drop + # => [CUR_HASH_LO, CUR_HASH_HI, iter_counter, num_leaves, mmr_frontier_ptr] + + # get the pointer to the zero node of the current height + dup.10 add.ZEROS_OFFSET dup.9 mul.NODE_SIZE add + # => [zeros[iter_counter], CUR_HASH_LO, CUR_HASH_HI, iter_counter, num_leaves, mmr_frontier_ptr] + + # load the zero node to the stack + exec.mem_load_double_word swapdw + # => [CUR_HASH_LO, CUR_HASH_HI, ZERO_H_LO, ZERO_H_HI, iter_counter, num_leaves, mmr_frontier_ptr] + + # merge the current hash with the zero node of this height to get the current hash of + # the next height (merge(cur, zeroes[h])) + exec.keccak256::merge + # => [CUR_HASH_LO', CUR_HASH_HI', iter_counter, num_leaves, mmr_frontier_ptr] + + # store the current hash of the next height back to the local memory + loc_storew.0 dropw + loc_storew.4 dropw + # => [iter_counter, num_leaves, mmr_frontier_ptr] + end + # => [iter_counter, num_leaves, mmr_frontier_ptr] + + # update the counter + push.1 add + + # update the `num_leaves` (shift it right by 1 bit) + swap u32shr.1 swap + # => [iter_counter+1, num_leaves>>1, mmr_frontier_ptr] + + # compute the cycle flag + dup neq.TREE_HEIGHT + # => [loop_flag, iter_counter+1, num_leaves>>1, mmr_frontier_ptr] + end + # => [iter_counter=5, num_leaves=0, mmr_frontier_ptr] + + # clean the stack + drop drop + # => [mmr_frontier_ptr] + + # update the frontier[5] (frontier[tree_height]) value with the current (final) hash -- in case + # we have a full tree (32 leaves) this only node will represent the frontier + # + # at the same time we can update the root, since the final hash is the root of the full MMR + + # load the root pointer and the pointer to the 5th frontier node onto the stack + dup add.FRONTIER_OFFSET push.TREE_HEIGHT.NODE_SIZE mul add + swap add.MMR_ROOT_OFFSET + # => [mmr_root_ptr, frontier[tree_height]_ptr] + + # load the final hash + padw loc_loadw.4 padw loc_loadw.0 + # => [FIN_HASH_LO, FIN_HASH_HI, mmr_root_ptr, frontier[tree_height]_ptr] + + # save the final hash as root of the MMR + exec.mem_store_double_word movup.8 drop + # => [FIN_HASH_LO, FIN_HASH_HI, frontier[tree_height]_ptr] + + # store the final hash to the frontier[tree_height]_ptr (frontier[5]) + exec.mem_store_double_word movup.8 drop + # => [FIN_HASH_LO, FIN_HASH_HI] + # sine the final hash represents the tree root, the resulting stack state could be represented + # as: + # => [NEW_ROOT_LO, NEW_ROOT_HI] +end + +# HELPER PROCEDURES +# ================================================================================================= + +#! Stores two words to the provided global memory address. +#! +#! Inputs: [WORD_1, WORD_2, ptr] +#! Outputs: [WORD_1, WORD_2, ptr] +proc mem_store_double_word + dup.8 mem_storew_be swapw + # => [WORD_2, WORD_1, ptr] + + dup.8 add.4 mem_storew_be swapw + # => [WORD_1, WORD_2, ptr] +end + +#! Loads two words from the provided global memory address. +#! +#! Inputs: [ptr] +#! Outputs: [WORD_1, WORD_2] +proc mem_load_double_word + padw dup.4 mem_loadw_be + # => [WORD_1, ptr] + + padw movup.8 add.4 mem_loadw_be + # => [WORD_2, WORD_1] + + swapw + # => [WORD_1, WORD_2] +end diff --git a/crates/miden-lib/asm/miden/contracts/wallets/basic.masm b/crates/miden-lib/asm/miden/contracts/wallets/basic.masm new file mode 100644 index 0000000000..57c701cb94 --- /dev/null +++ b/crates/miden-lib/asm/miden/contracts/wallets/basic.masm @@ -0,0 +1,61 @@ +use miden::native_account +use miden::output_note + +# CONSTANTS +# ================================================================================================= +const PUBLIC_NOTE=1 + +#! Adds the provided asset to the active account. +#! +#! Inputs: [ASSET, pad(12)] +#! Outputs: [pad(16)] +#! +#! Where: +#! - ASSET is the asset to be received, can be fungible or non-fungible +#! +#! Panics if: +#! - the same non-fungible asset already exists in the account. +#! - adding a fungible asset would result in amount overflow, i.e., +#! the total amount would be greater than 2^63. +#! +#! Invocation: call +pub proc receive_asset + exec.native_account::add_asset + # => [ASSET', pad(12)] + + # drop the final asset + dropw + # => [pad(16)] +end + +#! Removes the specified asset from the account and adds it to the output note with the specified +#! index. +#! +#! This procedure is expected to be invoked using a `call` instruction. It makes no guarantees about +#! the contents of the `PAD` elements shown below. It is the caller's responsibility to make sure +#! these elements do not contain any meaningful data. +#! +#! Inputs: [ASSET, note_idx, pad(11)] +#! Outputs: [ASSET, note_idx, pad(11)] +#! +#! Where: +#! - note_idx is the index of the output note. +#! - ASSET is the fungible or non-fungible asset of interest. +#! +#! Panics if: +#! - the fungible asset is not found in the vault. +#! - the amount of the fungible asset in the vault is less than the amount to be removed. +#! - the non-fungible asset is not found in the vault. +#! +#! Invocation: call +pub proc move_asset_to_note + # remove the asset from the account + exec.native_account::remove_asset + # => [ASSET, note_idx, pad(11)] + + dupw dup.8 movdn.4 + # => [ASSET, note_idx, ASSET, note_idx, pad(11)] + + exec.output_note::add_asset + # => [ASSET, note_idx, pad(11)] +end diff --git a/crates/miden-lib/build.rs b/crates/miden-lib/build.rs new file mode 100644 index 0000000000..6843e1e791 --- /dev/null +++ b/crates/miden-lib/build.rs @@ -0,0 +1,898 @@ +use std::collections::{BTreeMap, BTreeSet}; +use std::env; +use std::fmt::Write; +use std::io::{self}; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use fs_err as fs; +use miden_assembly::diagnostics::{IntoDiagnostic, NamedSource, Result, WrapErr, miette}; +use miden_assembly::utils::Serializable; +use miden_assembly::{Assembler, DefaultSourceManager, KernelLibrary, Library, Report}; +use regex::Regex; +use walkdir::WalkDir; + +/// A map where the key is the error name and the value is the error code with the message. +type ErrorCategoryMap = BTreeMap>; + +// CONSTANTS +// ================================================================================================ + +/// Defines whether the build script should generate files in `/src`. +/// The docs.rs build pipeline has a read-only filesystem, so we have to avoid writing to `src`, +/// otherwise the docs will fail to build there. Note that writing to `OUT_DIR` is fine. +const BUILD_GENERATED_FILES_IN_SRC: bool = option_env!("BUILD_GENERATED_FILES_IN_SRC").is_some(); + +const ASSETS_DIR: &str = "assets"; +const ASM_DIR: &str = "asm"; +const ASM_MIDEN_DIR: &str = "miden"; +const ASM_NOTE_SCRIPTS_DIR: &str = "note_scripts"; +const ASM_ACCOUNT_COMPONENTS_DIR: &str = "account_components"; +const SHARED_UTILS_DIR: &str = "shared_utils"; +const SHARED_MODULES_DIR: &str = "shared_modules"; +const ASM_TX_KERNEL_DIR: &str = "kernels/transaction"; +const KERNEL_PROCEDURES_RS_FILE: &str = "src/transaction/kernel_procedures.rs"; + +const TX_KERNEL_ERRORS_FILE: &str = "src/errors/tx_kernel_errors.rs"; +const NOTE_SCRIPT_ERRORS_FILE: &str = "src/errors/note_script_errors.rs"; + +const TX_KERNEL_ERRORS_ARRAY_NAME: &str = "TX_KERNEL_ERRORS"; +const NOTE_SCRIPT_ERRORS_ARRAY_NAME: &str = "NOTE_SCRIPT_ERRORS"; + +const TX_KERNEL_ERROR_CATEGORIES: [TxKernelErrorCategory; 14] = [ + TxKernelErrorCategory::Kernel, + TxKernelErrorCategory::Prologue, + TxKernelErrorCategory::Epilogue, + TxKernelErrorCategory::Tx, + TxKernelErrorCategory::Note, + TxKernelErrorCategory::Account, + TxKernelErrorCategory::ForeignAccount, + TxKernelErrorCategory::Faucet, + TxKernelErrorCategory::FungibleAsset, + TxKernelErrorCategory::NonFungibleAsset, + TxKernelErrorCategory::Vault, + TxKernelErrorCategory::LinkMap, + TxKernelErrorCategory::InputNote, + TxKernelErrorCategory::OutputNote, +]; + +// PRE-PROCESSING +// ================================================================================================ + +/// Read and parse the contents from `./asm`. +/// - Compiles contents of asm/miden directory into a Miden library file (.masl) under miden +/// namespace. +/// - Compiles contents of asm/scripts directory into individual .masb files. +fn main() -> Result<()> { + // re-build when the MASM code changes + println!("cargo::rerun-if-changed={ASM_DIR}/"); + println!("cargo::rerun-if-env-changed=BUILD_GENERATED_FILES_IN_SRC"); + + // Copies the MASM code to the build directory + let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + let build_dir = env::var("OUT_DIR").unwrap(); + let src = Path::new(&crate_dir).join(ASM_DIR); + let dst = Path::new(&build_dir).to_path_buf(); + copy_directory(src, &dst)?; + + // set source directory to {OUT_DIR}/asm + let source_dir = dst.join(ASM_DIR); + + // copy the shared modules to the kernel and miden library folders + copy_shared_modules(&source_dir)?; + + // set target directory to {OUT_DIR}/assets + let target_dir = Path::new(&build_dir).join(ASSETS_DIR); + + // compile transaction kernel + let mut assembler = + compile_tx_kernel(&source_dir.join(ASM_TX_KERNEL_DIR), &target_dir.join("kernels"))?; + + // compile miden library + let miden_lib = compile_miden_lib(&source_dir, &target_dir, assembler.clone())?; + assembler.link_dynamic_library(miden_lib)?; + + // compile note scripts + compile_note_scripts( + &source_dir.join(ASM_NOTE_SCRIPTS_DIR), + &target_dir.join(ASM_NOTE_SCRIPTS_DIR), + assembler.clone(), + )?; + + // compile account components + compile_account_components( + &source_dir.join(ASM_ACCOUNT_COMPONENTS_DIR), + &target_dir.join(ASM_ACCOUNT_COMPONENTS_DIR), + assembler, + )?; + + generate_error_constants(&source_dir)?; + + generate_event_constants(&source_dir, &target_dir)?; + Ok(()) +} + +// COMPILE TRANSACTION KERNEL +// ================================================================================================ + +/// Reads the transaction kernel MASM source from the `source_dir`, compiles it, saves the results +/// to the `target_dir`, and returns an [Assembler] instantiated with the compiled kernel. +/// +/// Additionally it compiles the transaction script executor program, see the +/// [compile_tx_script_main] procedure for details. +/// +/// `source_dir` is expected to have the following structure: +/// +/// - {source_dir}/api.masm -> defines exported procedures from the transaction kernel. +/// - {source_dir}/main.masm -> defines the executable program of the transaction kernel. +/// - {source_dir}/tx_script_main -> defines the executable program of the arbitrary transaction +/// script. +/// - {source_dir}/lib -> contains common modules used by both api.masm and main.masm. +/// +/// The compiled files are written as follows: +/// +/// - {target_dir}/tx_kernel.masl -> contains kernel library compiled from api.masm. +/// - {target_dir}/tx_kernel.masb -> contains the executable compiled from main.masm. +/// - {target_dir}/tx_script_main.masb -> contains the executable compiled from +/// tx_script_main.masm. +/// - src/transaction/procedures/kernel_v0.rs -> contains the kernel procedures table. +fn compile_tx_kernel(source_dir: &Path, target_dir: &Path) -> Result { + let shared_utils_path = std::path::Path::new(ASM_DIR).join(SHARED_UTILS_DIR); + let kernel_path = miden_assembly::Path::kernel_path(); + + let mut assembler = build_assembler(None)?; + // add the shared util modules to the kernel lib under the ::$kernel::util namespace + assembler.compile_and_statically_link_from_dir(&shared_utils_path, kernel_path)?; + + // assemble the kernel library and write it to the "tx_kernel.masl" file + let kernel_lib = assembler + .assemble_kernel_from_dir(source_dir.join("api.masm"), Some(source_dir.join("lib")))?; + + // generate kernel `procedures.rs` file + generate_kernel_proc_hash_file(kernel_lib.clone())?; + + let output_file = target_dir.join("tx_kernel").with_extension(Library::LIBRARY_EXTENSION); + kernel_lib.write_to_file(output_file).into_diagnostic()?; + + let assembler = build_assembler(Some(kernel_lib))?; + + // assemble the kernel program and write it to the "tx_kernel.masb" file + let mut main_assembler = assembler.clone(); + // add the shared util modules to the kernel lib under the ::$kernel::util namespace + main_assembler.compile_and_statically_link_from_dir(&shared_utils_path, kernel_path)?; + main_assembler.compile_and_statically_link_from_dir(source_dir.join("lib"), kernel_path)?; + + let main_file_path = source_dir.join("main.masm"); + let kernel_main = main_assembler.clone().assemble_program(main_file_path)?; + + let masb_file_path = target_dir.join("tx_kernel.masb"); + kernel_main.write_to_file(masb_file_path).into_diagnostic()?; + + // compile the transaction script main program + compile_tx_script_main(source_dir, target_dir, main_assembler)?; + + #[cfg(any(feature = "testing", test))] + { + let mut kernel_lib_assembler = assembler.clone(); + // Build kernel as a library and save it to file. + // This is needed in test assemblers to access individual procedures which would otherwise + // be hidden when using KernelLibrary (api.masm) + + // add the shared util modules to the kernel lib under the ::$kernel::util namespace + kernel_lib_assembler + .compile_and_statically_link_from_dir(&shared_utils_path, kernel_path)?; + + let test_lib = kernel_lib_assembler + .assemble_library_from_dir(source_dir.join("lib"), kernel_path) + .unwrap(); + + let masb_file_path = + target_dir.join("kernel_library").with_extension(Library::LIBRARY_EXTENSION); + test_lib.write_to_file(masb_file_path).into_diagnostic()?; + } + + Ok(assembler) +} + +/// Reads the transaction script executor MASM source from the `source_dir/tx_script_main.masm`, +/// compiles it and saves the results to the `target_dir` as a `tx_script_main.masb` binary file. +fn compile_tx_script_main( + source_dir: &Path, + target_dir: &Path, + main_assembler: Assembler, +) -> Result<()> { + // assemble the transaction script executor program and write it to the "tx_script_main.masb" + // file. + let tx_script_main_file_path = source_dir.join("tx_script_main.masm"); + let tx_script_main = main_assembler.assemble_program(tx_script_main_file_path)?; + + let masb_file_path = target_dir.join("tx_script_main.masb"); + tx_script_main.write_to_file(masb_file_path).into_diagnostic() +} + +/// Generates kernel `procedures.rs` file based on the kernel library +fn generate_kernel_proc_hash_file(kernel: KernelLibrary) -> Result<()> { + // Because the kernel Rust file will be stored under ./src, this should be a no-op if we can't + // write there + if !BUILD_GENERATED_FILES_IN_SRC { + return Ok(()); + } + + let (_, module_info, _) = kernel.into_parts(); + + let to_exclude = BTreeSet::from_iter(["exec_kernel_proc"]); + let offsets_filename = Path::new(ASM_DIR).join(ASM_MIDEN_DIR).join("kernel_proc_offsets.masm"); + let offsets = parse_proc_offsets(&offsets_filename)?; + + let generated_procs: BTreeMap = module_info + .procedures() + .filter(|(_, proc_info)| !to_exclude.contains::(proc_info.name.as_ref())) + .map(|(_, proc_info)| { + let name = proc_info.name.to_string(); + + let Some(&offset) = offsets.get(&name) else { + panic!("Offset constant for function `{name}` not found in `{offsets_filename:?}`"); + }; + + (offset, format!(" // {name}\n word!(\"{}\"),", proc_info.digest)) + }) + .collect(); + + let proc_count = generated_procs.len(); + let generated_procs: String = generated_procs.into_iter().enumerate().map(|(index, (offset, txt))| { + if index != offset { + panic!("Offset constants in the file `{offsets_filename:?}` are not contiguous (missing offset: {index})"); + } + + txt + }).collect::>().join("\n"); + + fs::write( + KERNEL_PROCEDURES_RS_FILE, + format!( + r#"// This file is generated by build.rs, do not modify + +use miden_objects::{{Word, word}}; + +// KERNEL PROCEDURES +// ================================================================================================ + +/// Hashes of all dynamically executed kernel procedures. +pub const KERNEL_PROCEDURES: [Word; {proc_count}] = [ +{generated_procs} +]; +"#, + ), + ) + .into_diagnostic() +} + +fn parse_proc_offsets(filename: impl AsRef) -> Result> { + let regex: Regex = Regex::new(r"^const\s*(?P\w+)_OFFSET\s*=\s*(?P\d+)").unwrap(); + let mut result = BTreeMap::new(); + for line in fs::read_to_string(filename).into_diagnostic()?.lines() { + if let Some(captures) = regex.captures(line) { + result.insert( + captures["name"].to_string().to_lowercase(), + captures["offset"].parse().into_diagnostic()?, + ); + } + } + + Ok(result) +} + +// COMPILE MIDEN LIB +// ================================================================================================ + +/// Reads the MASM files from "{source_dir}/miden" directory, compiles them into a Miden assembly +/// library, saves the library into "{target_dir}/miden.masl", and returns the compiled library. +fn compile_miden_lib( + source_dir: &Path, + target_dir: &Path, + mut assembler: Assembler, +) -> Result { + let source_dir = source_dir.join(ASM_MIDEN_DIR); + let shared_path = Path::new(ASM_DIR).join(SHARED_UTILS_DIR); + + // add the shared modules to the kernel lib under the miden::util namespace + assembler.compile_and_statically_link_from_dir(&shared_path, "miden")?; + + let miden_lib = assembler.assemble_library_from_dir(source_dir, "miden")?; + + let output_file = target_dir.join("miden").with_extension(Library::LIBRARY_EXTENSION); + miden_lib.write_to_file(output_file).into_diagnostic()?; + + Ok(miden_lib) +} + +// COMPILE EXECUTABLE MODULES +// ================================================================================================ + +/// Reads all MASM files from the "{source_dir}", complies each file individually into a MASB +/// file, and stores the compiled files into the "{target_dir}". +/// +/// The source files are expected to contain executable programs. +fn compile_note_scripts(source_dir: &Path, target_dir: &Path, assembler: Assembler) -> Result<()> { + fs::create_dir_all(target_dir) + .into_diagnostic() + .wrap_err("failed to create note_scripts directory")?; + + for masm_file_path in get_masm_files(source_dir).unwrap() { + // read the MASM file, parse it, and serialize the parsed AST to bytes + let code = assembler.clone().assemble_program(masm_file_path.clone())?; + + let bytes = code.to_bytes(); + + let masm_file_name = masm_file_path + .file_name() + .expect("file name should exist") + .to_str() + .ok_or_else(|| Report::msg("failed to convert file name to &str"))?; + let mut masb_file_path = target_dir.join(masm_file_name); + + // write the binary MASB to the output dir + masb_file_path.set_extension("masb"); + fs::write(masb_file_path, bytes).unwrap(); + } + Ok(()) +} + +// COMPILE ACCOUNT COMPONENTS +// ================================================================================================ + +/// Compiles the account components in `source_dir` into MASL libraries and stores the compiled +/// files in `target_dir`. +fn compile_account_components( + source_dir: &Path, + target_dir: &Path, + assembler: Assembler, +) -> Result<()> { + if !target_dir.exists() { + fs::create_dir_all(target_dir).unwrap(); + } + + for masm_file_path in get_masm_files(source_dir).unwrap() { + let component_name = masm_file_path + .file_stem() + .expect("masm file should have a file stem") + .to_str() + .expect("file stem should be valid UTF-8") + .to_owned(); + + let component_source_code = fs::read_to_string(masm_file_path) + .expect("reading the component's MASM source code should succeed"); + + let named_source = NamedSource::new(component_name.clone(), component_source_code); + + let component_library = assembler + .clone() + .assemble_library([named_source]) + .expect("library assembly should succeed"); + + let component_file_path = + target_dir.join(component_name).with_extension(Library::LIBRARY_EXTENSION); + component_library.write_to_file(component_file_path).into_diagnostic()?; + } + + Ok(()) +} + +// HELPER FUNCTIONS +// ================================================================================================ + +/// Returns a new [Assembler] loaded with miden-core-lib and the specified kernel, if provided. +fn build_assembler(kernel: Option) -> Result { + kernel + .map(|kernel| Assembler::with_kernel(Arc::new(DefaultSourceManager::default()), kernel)) + .unwrap_or_default() + .with_dynamic_library(miden_core_lib::CoreLibrary::default()) +} + +/// Recursively copies `src` into `dst`. +/// +/// This function will overwrite the existing files if re-executed. +fn copy_directory, R: AsRef>(src: T, dst: R) -> Result<()> { + let mut prefix = src.as_ref().canonicalize().unwrap(); + // keep all the files inside the `asm` folder + prefix.pop(); + + let target_dir = dst.as_ref().join(ASM_DIR); + if target_dir.exists() { + // Clear existing asm files that were copied earlier which may no longer exist. + fs::remove_dir_all(&target_dir) + .into_diagnostic() + .wrap_err("failed to remove ASM directory")?; + } + + // Recreate the directory structure. + fs::create_dir_all(&target_dir) + .into_diagnostic() + .wrap_err("failed to create ASM directory")?; + + let dst = dst.as_ref(); + let mut todo = vec![src.as_ref().to_path_buf()]; + + while let Some(goal) = todo.pop() { + for entry in fs::read_dir(goal).unwrap() { + let path = entry.unwrap().path(); + if path.is_dir() { + let src_dir = path.canonicalize().unwrap(); + let dst_dir = dst.join(src_dir.strip_prefix(&prefix).unwrap()); + if !dst_dir.exists() { + fs::create_dir_all(&dst_dir).unwrap(); + } + todo.push(src_dir); + } else { + let dst_file = dst.join(path.strip_prefix(&prefix).unwrap()); + fs::copy(&path, dst_file).unwrap(); + } + } + } + + Ok(()) +} + +/// Copies the content of the build `shared_modules` folder to the `lib` and `miden` build folders. +/// This is required to include the shared modules as APIs of the `kernel` and `miden` libraries. +/// +/// This is done to make it possible to import the modules in the `shared_modules` folder directly, +/// i.e. "use $kernel::account_id". +fn copy_shared_modules>(source_dir: T) -> Result<()> { + // source is expected to be an `OUT_DIR/asm` folder + let shared_modules_dir = source_dir.as_ref().join(SHARED_MODULES_DIR); + + for module_path in get_masm_files(shared_modules_dir).unwrap() { + let module_name = module_path.file_name().unwrap(); + + // copy to kernel lib + let kernel_lib_folder = source_dir.as_ref().join(ASM_TX_KERNEL_DIR).join("lib"); + fs::copy(&module_path, kernel_lib_folder.join(module_name)).into_diagnostic()?; + + // copy to miden lib + let miden_lib_folder = source_dir.as_ref().join(ASM_MIDEN_DIR); + fs::copy(&module_path, miden_lib_folder.join(module_name)).into_diagnostic()?; + } + + Ok(()) +} + +/// Returns a vector with paths to all MASM files in the specified directory. +/// +/// All non-MASM files are skipped. +fn get_masm_files>(dir_path: P) -> Result> { + let mut files = Vec::new(); + + let path = dir_path.as_ref(); + if path.is_dir() { + let entries = fs::read_dir(path) + .into_diagnostic() + .wrap_err_with(|| format!("failed to read directory {}", path.display()))?; + for entry in entries { + let file = entry.into_diagnostic().wrap_err("failed to read directory entry")?; + let file_path = file.path(); + if is_masm_file(&file_path).into_diagnostic()? { + files.push(file_path); + } + } + } else { + println!("cargo:warn=The specified path is not a directory."); + } + + Ok(files) +} + +/// Returns true if the provided path resolves to a file with `.masm` extension. +/// +/// # Errors +/// Returns an error if the path could not be converted to a UTF-8 string. +fn is_masm_file(path: &Path) -> io::Result { + if let Some(extension) = path.extension() { + let extension = extension + .to_str() + .ok_or_else(|| io::Error::other("invalid UTF-8 filename"))? + .to_lowercase(); + Ok(extension == "masm") + } else { + Ok(false) + } +} + +// ERROR CONSTANTS FILE GENERATION +// ================================================================================================ + +/// Reads all MASM files from the `asm_source_dir` and extracts its error constants and their +/// associated error message and generates a Rust file for each category of errors. +/// For example: +/// +/// ```text +/// const ERR_PROLOGUE_NEW_ACCOUNT_VAULT_MUST_BE_EMPTY="new account must have an empty vault" +/// ``` +/// +/// would generate a Rust file for transaction kernel errors (since the error belongs to that +/// category, identified by the category extracted from `ERR_`) with - roughly - the +/// following content: +/// +/// ```rust +/// pub const ERR_PROLOGUE_NEW_ACCOUNT_VAULT_MUST_BE_EMPTY: MasmError = +/// MasmError::from_static_str("new account must have an empty vault"); +/// ``` +/// +/// and add the constant to the error constants array. +/// +/// The function ensures that a constant is not defined twice, except if their error message is the +/// same. This can happen across multiple files. +/// +/// Because the error files will be written to ./src/errors, this should be a no-op if ./src is +/// read-only. To enable writing to ./src, set the `BUILD_GENERATED_FILES_IN_SRC` environment +/// variable. +fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { + if !BUILD_GENERATED_FILES_IN_SRC { + return Ok(()); + } + + let categories = + extract_all_masm_errors(asm_source_dir).context("failed to extract all masm errors")?; + + for (category, errors) in categories { + // Generate the errors file. + let error_file_content = generate_error_file_content(category, errors)?; + std::fs::write(category.error_file_name(), error_file_content).into_diagnostic()?; + } + + Ok(()) +} + +/// Extract all masm errors from the given path and returns a map by error category. +fn extract_all_masm_errors(asm_source_dir: &Path) -> Result { + // We use a BTree here to order the errors by their categories which is the first part after the + // ERR_ prefix and to allow for the same error to be defined multiple times in different files + // (as long as the constant name and error messages match). + let mut errors = BTreeMap::new(); + + // Walk all files of the kernel source directory. + for entry in WalkDir::new(asm_source_dir) { + let entry = entry.into_diagnostic()?; + if !is_masm_file(entry.path()).into_diagnostic()? { + continue; + } + let file_contents = std::fs::read_to_string(entry.path()).into_diagnostic()?; + extract_masm_errors(&mut errors, &file_contents)?; + } + + let mut category_map: BTreeMap> = BTreeMap::new(); + + for (error_name, error) in errors.into_iter() { + let category = ErrorCategory::match_category(&error_name)?; + + let named_error = NamedError { name: error_name, message: error.message }; + + category_map.entry(category).or_default().push(named_error); + } + + Ok(category_map) +} + +/// Extracts the errors from a single masm file and inserts them into the provided map. +fn extract_masm_errors( + errors: &mut BTreeMap, + file_contents: &str, +) -> Result<()> { + let regex = Regex::new(r#"const\s*ERR_(?.*)\s*=\s*"(?.*)""#).unwrap(); + + for capture in regex.captures_iter(file_contents) { + let error_name = capture + .name("name") + .expect("error name should be captured") + .as_str() + .trim() + .to_owned(); + let error_message = capture + .name("message") + .expect("error code should be captured") + .as_str() + .trim() + .to_owned(); + + if let Some(ExtractedError { message: existing_error_message, .. }) = + errors.get(&error_name) + && existing_error_message != &error_message + { + return Err(Report::msg(format!( + "Transaction kernel error constant ERR_{error_name} is already defined elsewhere but its error message is different" + ))); + } + + // Enforce the "no trailing punctuation" rule from the Rust error guidelines on MASM errors. + if error_message.ends_with(".") { + return Err(Report::msg(format!( + "Error messages should not end with a period: `ERR_{error_name}: {error_message}`" + ))); + } + + errors.insert(error_name, ExtractedError { message: error_message }); + } + + Ok(()) +} + +fn is_new_error_category<'a>(last_error: &mut Option<&'a str>, current_error: &'a str) -> bool { + let is_new = match last_error { + Some(last_err) => { + let last_category = + last_err.split("_").next().expect("there should be at least one entry"); + let new_category = + current_error.split("_").next().expect("there should be at least one entry"); + last_category != new_category + }, + None => false, + }; + + last_error.replace(current_error); + + is_new +} + +/// Generates the content of an error file for the given category and the set of errors. +fn generate_error_file_content(category: ErrorCategory, errors: Vec) -> Result { + let mut output = String::new(); + + writeln!(output, "use crate::errors::MasmError;\n").unwrap(); + + writeln!( + output, + "// This file is generated by build.rs, do not modify manually. +// It is generated by extracting errors from the masm files in the `miden-lib/asm` directory. +// +// To add a new error, define a constant in masm of the pattern `const ERR__...`. +// Try to fit the error into a pre-existing category if possible (e.g. Account, Prologue, +// Non-Fungible-Asset, ...). +" + ) + .unwrap(); + + writeln!( + output, + "// {} +// ================================================================================================ +", + category.array_name().replace("_", " ") + ) + .unwrap(); + + let mut last_error = None; + for named_error in errors.iter() { + let NamedError { name, message } = named_error; + + // Group errors into blocks separate by newlines. + if is_new_error_category(&mut last_error, name) { + writeln!(output).into_diagnostic()?; + } + + writeln!(output, "/// Error Message: \"{message}\"").into_diagnostic()?; + writeln!( + output, + r#"pub const ERR_{name}: MasmError = MasmError::from_static_str("{message}");"# + ) + .into_diagnostic()?; + } + + Ok(output) +} + +type ErrorName = String; + +#[derive(Debug, Clone)] +struct ExtractedError { + message: String, +} + +#[derive(Debug, Clone)] +struct NamedError { + name: ErrorName, + message: String, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +enum ErrorCategory { + TxKernel, + NoteScript, +} + +impl ErrorCategory { + pub const fn error_file_name(&self) -> &'static str { + match self { + ErrorCategory::TxKernel => TX_KERNEL_ERRORS_FILE, + ErrorCategory::NoteScript => NOTE_SCRIPT_ERRORS_FILE, + } + } + + pub const fn array_name(&self) -> &'static str { + match self { + ErrorCategory::TxKernel => TX_KERNEL_ERRORS_ARRAY_NAME, + ErrorCategory::NoteScript => NOTE_SCRIPT_ERRORS_ARRAY_NAME, + } + } + + pub fn match_category(error_name: &ErrorName) -> Result { + for kernel_category in TX_KERNEL_ERROR_CATEGORIES { + if error_name.starts_with(kernel_category.category_name()) { + return Ok(ErrorCategory::TxKernel); + } + } + + // If the error is not a tx kernel error, consider it a note script error. + Ok(ErrorCategory::NoteScript) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +enum TxKernelErrorCategory { + Kernel, + Prologue, + Epilogue, + Tx, + Note, + Account, + ForeignAccount, + Faucet, + FungibleAsset, + NonFungibleAsset, + Vault, + LinkMap, + InputNote, + OutputNote, +} + +impl TxKernelErrorCategory { + pub const fn category_name(&self) -> &'static str { + match self { + TxKernelErrorCategory::Kernel => "KERNEL", + TxKernelErrorCategory::Prologue => "PROLOGUE", + TxKernelErrorCategory::Epilogue => "EPILOGUE", + TxKernelErrorCategory::Tx => "TX", + TxKernelErrorCategory::Note => "NOTE", + TxKernelErrorCategory::Account => "ACCOUNT", + TxKernelErrorCategory::ForeignAccount => "FOREIGN_ACCOUNT", + TxKernelErrorCategory::Faucet => "FAUCET", + TxKernelErrorCategory::FungibleAsset => "FUNGIBLE_ASSET", + TxKernelErrorCategory::NonFungibleAsset => "NON_FUNGIBLE_ASSET", + TxKernelErrorCategory::Vault => "VAULT", + TxKernelErrorCategory::LinkMap => "LINK_MAP", + TxKernelErrorCategory::InputNote => "INPUT_NOTE", + TxKernelErrorCategory::OutputNote => "OUTPUT_NOTE", + } + } +} + +// EVENT CONSTANTS FILE GENERATION +// ================================================================================================ + +/// Reads all MASM files from the `asm_source_dir` and extracts event definitions, +/// then generates the transaction_events.rs file with constants. +fn generate_event_constants(asm_source_dir: &Path, target_dir: &Path) -> Result<()> { + // Extract all event definitions from MASM files + let events = extract_all_event_definitions(asm_source_dir)?; + + // Generate the events file in OUT_DIR + let event_file_content = generate_event_file_content(&events).into_diagnostic()?; + let event_file_path = target_dir.join("transaction_events.rs"); + fs::write(event_file_path, event_file_content).into_diagnostic()?; + + Ok(()) +} + +/// Extract all `const X=event("x")` definitions from all MASM files +fn extract_all_event_definitions(asm_source_dir: &Path) -> Result> { + // collect mappings event path to const variable name, we want a unique mapping + // which we use to generate the constants and enum variant names + let mut events = BTreeMap::new(); + + // Walk all MASM files + for entry in WalkDir::new(asm_source_dir) { + let entry = entry.into_diagnostic()?; + if !is_masm_file(entry.path()).into_diagnostic()? { + continue; + } + let file_contents = fs::read_to_string(entry.path()).into_diagnostic()?; + extract_event_definitions_from_file(&mut events, &file_contents, entry.path())?; + } + + Ok(events) +} + +/// Extract event definitions from a single MASM file in form of `const ${X} = event("${x::path}")`. +fn extract_event_definitions_from_file( + events: &mut BTreeMap, + file_contents: &str, + file_path: &Path, +) -> Result<()> { + let regex = Regex::new(r#"const\s*(\w+)\s*=\s*event\("([^"]+)"\)"#).unwrap(); + + for capture in regex.captures_iter(file_contents) { + let const_name = capture.get(1).expect("const name should be captured"); + let event_path = capture.get(2).expect("event path should be captured"); + + let event_path = event_path.as_str(); + let const_name = const_name.as_str(); + + let const_name_wo_suffix = + if let Some((const_name_wo_suffix, _)) = const_name.rsplit_once("_EVENT") { + const_name_wo_suffix.to_string() + } else { + const_name.to_owned() + }; + + if !event_path.starts_with("miden::") { + return Err(miette::miette!("unhandled `event_path={event_path}`")); + } + + // Check for duplicates with different definitions + if let Some(existing_const_name) = events.get(event_path) { + if existing_const_name != &const_name_wo_suffix { + println!( + "cargo:warning=Duplicate event definition found {event_path} with different definitions names: + '{existing_const_name}' vs '{const_name}' in {}", + file_path.display() + ); + } + } else { + events.insert(event_path.to_owned(), const_name_wo_suffix.to_owned()); + } + } + + Ok(()) +} + +/// Generate the content of the transaction_events.rs file +fn generate_event_file_content( + events: &BTreeMap, +) -> std::result::Result { + use std::fmt::Write; + + let mut output = String::new(); + + writeln!(&mut output, "// This file is generated by build.rs, do not modify")?; + writeln!(&mut output)?; + + // Generate constants + // + // Note: If we ever encounter two constants `const X`, that are both named `X` we will error + // when attempting to generate the rust code. Currently this is a side-effect, but we + // want to error out as early as possible: + // TODO: make the error out at build-time to be able to present better error hints + for (event_path, event_name) in events { + let value = miden_core::EventId::from_name(event_path).as_felt().as_int(); + debug_assert!(!event_name.is_empty()); + writeln!(&mut output, "const {}: u64 = {};", event_name, value)?; + } + + { + writeln!(&mut output)?; + + writeln!(&mut output)?; + + writeln!( + &mut output, + r###" +use alloc::collections::BTreeMap; + +pub(crate) static EVENT_NAME_LUT: ::miden_objects::utils::sync::LazyLock> = + ::miden_objects::utils::sync::LazyLock::new(|| {{ + BTreeMap::from_iter([ +"### + )?; + + for (event_path, const_name) in events { + writeln!(&mut output, " ({}, \"{}\"),", const_name, event_path)?; + } + + writeln!( + &mut output, + r###" ]) +}});"### + )?; + } + + Ok(output) +} diff --git a/crates/miden-lib/src/lib.rs b/crates/miden-lib/src/lib.rs new file mode 100644 index 0000000000..c04aeff6f5 --- /dev/null +++ b/crates/miden-lib/src/lib.rs @@ -0,0 +1,96 @@ +#![no_std] + +use alloc::sync::Arc; + +#[macro_use] +extern crate alloc; + +#[cfg(feature = "std")] +extern crate std; + +use miden_objects::assembly::Library; +use miden_objects::assembly::mast::MastForest; +use miden_objects::utils::serde::Deserializable; +use miden_objects::utils::sync::LazyLock; + +mod auth; +pub use auth::AuthScheme; + +pub mod account; +pub mod block; +pub mod errors; +pub mod note; +pub mod transaction; +pub mod utils; + +#[cfg(any(feature = "testing", test))] +pub mod testing; + +// RE-EXPORTS +// ================================================================================================ +pub use miden_core_lib::CoreLibrary; + +// CONSTANTS +// ================================================================================================ + +const MIDEN_LIB_BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/assets/miden.masl")); + +// MIDEN LIBRARY +// ================================================================================================ + +#[derive(Clone)] +pub struct MidenLib(Library); + +impl MidenLib { + /// Returns a reference to the [`MastForest`] of the inner [`Library`]. + pub fn mast_forest(&self) -> &Arc { + self.0.mast_forest() + } +} + +impl AsRef for MidenLib { + fn as_ref(&self) -> &Library { + &self.0 + } +} + +impl From for Library { + fn from(value: MidenLib) -> Self { + value.0 + } +} + +impl Default for MidenLib { + fn default() -> Self { + static MIDEN_LIB: LazyLock = LazyLock::new(|| { + let contents = + Library::read_from_bytes(MIDEN_LIB_BYTES).expect("failed to read miden lib masl!"); + MidenLib(contents) + }); + MIDEN_LIB.clone() + } +} + +// TESTS +// ================================================================================================ + +// NOTE: Most kernel-related tests can be found under /miden-tx/kernel_tests +#[cfg(all(test, feature = "std"))] +mod tests { + use miden_objects::assembly::Path; + + use super::MidenLib; + + #[test] + fn test_compile() { + let path = Path::new("::miden::active_account::get_id"); + let miden = MidenLib::default(); + let exists = miden.0.module_infos().any(|module| { + module + .procedures() + .any(|(_, proc)| module.path().join(&proc.name).as_path() == path) + }); + + assert!(exists); + } +}