Skip to content
Draft
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions bin/node/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ miden-node-validator = { workspace = true }
miden-objects = { workspace = true }
tokio = { features = ["macros", "net", "rt-multi-thread"], workspace = true }
url = { workspace = true }
miden-remote-prover-client = { workspace = true }

[dev-dependencies]
figment = { features = ["env", "test", "toml"], version = "0.10" }
Expand Down
3 changes: 0 additions & 3 deletions bin/node/src/commands/block_producer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ impl BlockProducerCommand {
store_url,
validator_url,
batch_prover_url: block_producer.batch_prover_url,
block_prover_url: block_producer.block_prover_url,
batch_interval: block_producer.batch_interval,
block_interval: block_producer.block_interval,
max_txs_per_batch: block_producer.max_txs_per_batch,
Expand Down Expand Up @@ -128,7 +127,6 @@ mod tests {
validator_url: dummy_url(),
block_producer: BlockProducerConfig {
batch_prover_url: None,
block_prover_url: None,
block_interval: std::time::Duration::from_secs(1),
batch_interval: std::time::Duration::from_secs(1),
max_txs_per_batch: 8,
Expand All @@ -152,7 +150,6 @@ mod tests {
validator_url: dummy_url(),
block_producer: BlockProducerConfig {
batch_prover_url: None,
block_prover_url: None,
block_interval: std::time::Duration::from_secs(1),
batch_interval: std::time::Duration::from_secs(1),
max_txs_per_batch: miden_objects::MAX_ACCOUNTS_PER_BATCH + 1, /* Use protocol
Expand Down
14 changes: 12 additions & 2 deletions bin/node/src/commands/bundled.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use anyhow::Context;
use miden_node_block_producer::BlockProducer;
use miden_node_ntx_builder::NetworkTransactionBuilder;
use miden_node_rpc::Rpc;
use miden_node_store::Store;
use miden_node_store::{BlockProver, Store};
use miden_node_utils::grpc::UrlExt;
use miden_node_validator::Validator;
use miden_objects::block::BlockSigner;
Expand All @@ -22,6 +22,7 @@ use super::{ENV_DATA_DIRECTORY, ENV_RPC_URL};
use crate::commands::{
BlockProducerConfig,
DEFAULT_TIMEOUT,
ENV_BLOCK_PROVER_URL,
ENV_ENABLE_OTEL,
ENV_GENESIS_CONFIG_FILE,
ENV_VALIDATOR_INSECURE_SECRET_KEY,
Expand Down Expand Up @@ -70,6 +71,10 @@ pub enum BundledCommand {
#[arg(long = "rpc.url", env = ENV_RPC_URL, value_name = "URL")]
rpc_url: Url,

/// The remote block prover's gRPC url.
#[arg(long = "block-prover.url", env = ENV_BLOCK_PROVER_URL, value_name = "URL")]
block_prover_url: Url,

/// Directory in which the Store component should store the database and raw block data.
#[arg(long = "data-directory", env = ENV_DATA_DIRECTORY, value_name = "DIR")]
data_directory: PathBuf,
Expand Down Expand Up @@ -130,6 +135,7 @@ impl BundledCommand {
},
BundledCommand::Start {
rpc_url,
block_prover_url,
data_directory,
block_producer,
ntx_builder,
Expand All @@ -142,6 +148,7 @@ impl BundledCommand {
let signer = SecretKey::read_from_bytes(hex::decode(secret_key_hex)?.as_ref())?;
Self::start(
rpc_url,
block_prover_url,
data_directory,
ntx_builder,
block_producer,
Expand All @@ -156,6 +163,7 @@ impl BundledCommand {
#[allow(clippy::too_many_lines)]
async fn start(
rpc_url: Url,
block_prover_url: Url,
data_directory: PathBuf,
ntx_builder: NtxBuilderConfig,
block_producer: BlockProducerConfig,
Expand All @@ -172,6 +180,8 @@ impl BundledCommand {
.await
.context("Failed to bind to RPC gRPC endpoint")?;

let block_prover = Arc::new(BlockProver::new_remote(block_prover_url));

let block_producer_address = TcpListener::bind("127.0.0.1:0")
.await
.context("Failed to bind to block-producer gRPC endpoint")?
Expand Down Expand Up @@ -214,6 +224,7 @@ impl BundledCommand {
block_producer_listener: store_block_producer_listener,
ntx_builder_listener: store_ntx_builder_listener,
data_directory: data_directory_clone,
block_prover,
grpc_timeout,
}
.serve()
Expand Down Expand Up @@ -245,7 +256,6 @@ impl BundledCommand {
store_url,
validator_url,
batch_prover_url: block_producer.batch_prover_url,
block_prover_url: block_producer.block_prover_url,
batch_interval: block_producer.batch_interval,
block_interval: block_producer.block_interval,
max_batches_per_block: block_producer.max_batches_per_block,
Expand Down
5 changes: 0 additions & 5 deletions bin/node/src/commands/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,11 +93,6 @@ pub struct BlockProducerConfig {
#[arg(long = "batch-prover.url", env = ENV_BATCH_PROVER_URL, value_name = "URL")]
pub batch_prover_url: Option<Url>,

/// The remote block prover's gRPC url. If unset, will default to running a prover
/// in-process which is expensive.
#[arg(long = "block-prover.url", env = ENV_BLOCK_PROVER_URL, value_name = "URL")]
pub block_prover_url: Option<Url>,

/// The number of transactions per batch.
#[arg(
long = "max-txs-per-batch",
Expand Down
20 changes: 19 additions & 1 deletion bin/node/src/commands/store.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;

use anyhow::Context;
use miden_node_store::Store;
use miden_node_store::genesis::config::{AccountFileWithName, GenesisConfig};
use miden_node_store::{BlockProver, Store};
use miden_node_utils::grpc::UrlExt;
use miden_objects::crypto::dsa::ecdsa_k256_keccak::SecretKey;
use miden_objects::utils::Deserializable;
Expand All @@ -17,6 +18,7 @@ use super::{
};
use crate::commands::{
DEFAULT_TIMEOUT,
ENV_BLOCK_PROVER_URL,
ENV_ENABLE_OTEL,
ENV_GENESIS_CONFIG_FILE,
ENV_VALIDATOR_INSECURE_SECRET_KEY,
Expand Down Expand Up @@ -72,6 +74,10 @@ pub enum StoreCommand {
#[arg(long = "block-producer.url", env = ENV_STORE_BLOCK_PRODUCER_URL, value_name = "URL")]
block_producer_url: Url,

/// The remote block prover's gRPC url. If not provided, a local block prover will be used.
#[arg(long = "block-prover.url", env = ENV_BLOCK_PROVER_URL, value_name = "URL")]
block_prover_url: Option<Url>,

/// Directory in which to store the database and raw block data.
#[arg(long, env = ENV_DATA_DIRECTORY, value_name = "DIR")]
data_directory: PathBuf,
Expand Down Expand Up @@ -115,6 +121,7 @@ impl StoreCommand {
rpc_url,
ntx_builder_url,
block_producer_url,
block_prover_url,
data_directory,
enable_otel: _,
grpc_timeout,
Expand All @@ -123,6 +130,7 @@ impl StoreCommand {
rpc_url,
ntx_builder_url,
block_producer_url,
block_prover_url,
data_directory,
grpc_timeout,
)
Expand All @@ -143,6 +151,7 @@ impl StoreCommand {
rpc_url: Url,
ntx_builder_url: Url,
block_producer_url: Url,
block_prover_url: Option<Url>,
data_directory: PathBuf,
grpc_timeout: Duration,
) -> anyhow::Result<()> {
Expand All @@ -167,8 +176,17 @@ impl StoreCommand {
.await
.context("Failed to bind to store's block-producer gRPC URL")?;

let block_prover = {
if let Some(url) = block_prover_url {
Arc::new(BlockProver::new_remote(url))
} else {
Arc::new(BlockProver::new_local(None))
}
};

Store {
rpc_listener,
block_prover,
ntx_builder_listener,
block_producer_listener,
data_directory,
Expand Down
1 change: 1 addition & 0 deletions bin/stress-test/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ miden-node-proto = { workspace = true }
miden-node-store = { workspace = true }
miden-node-utils = { workspace = true }
miden-objects = { workspace = true }
miden-remote-prover-client = { features = ["block-prover"], workspace = true }
rand = { workspace = true }
rayon = { version = "1.10" }
tokio = { workspace = true }
Expand Down
65 changes: 39 additions & 26 deletions bin/stress-test/src/seeding/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use std::time::{Duration, Instant};

use metrics::SeedingMetrics;
use miden_air::ExecutionProof;
use miden_block_prover::LocalBlockProver;
use miden_lib::account::auth::AuthRpoFalcon512;
use miden_lib::account::faucets::BasicFungibleFaucet;
use miden_lib::account::wallets::BasicWallet;
Expand All @@ -15,7 +14,7 @@ use miden_lib::utils::Serializable;
use miden_node_block_producer::store::StoreClient;
use miden_node_proto::domain::batch::BatchInputs;
use miden_node_proto::generated::store::rpc_client::RpcClient;
use miden_node_store::{DataDirectory, GenesisState, Store};
use miden_node_store::{BlockProver, DataDirectory, GenesisState, Store};
use miden_node_utils::tracing::grpc::OtelInterceptor;
use miden_objects::account::delta::AccountUpdateDetails;
use miden_objects::account::{
Expand Down Expand Up @@ -161,7 +160,7 @@ async fn generate_blocks(
SecretKey::with_rng(&mut *rng)
};

let mut prev_block = genesis_block.clone();
let mut prev_block_header = genesis_block.header().clone();
let mut current_anchor_header = genesis_block.header().clone();

for i in 0..total_blocks {
Expand Down Expand Up @@ -193,7 +192,7 @@ async fn generate_blocks(
note_nullifiers.extend(notes.iter().map(|n| n.nullifier().prefix()));

// create the tx that creates the notes
let emit_note_tx = create_emit_note_tx(prev_block.header(), &mut faucet, notes.clone());
let emit_note_tx = create_emit_note_tx(&prev_block_header, &mut faucet, notes.clone());

// collect all the txs
block_txs.push(emit_note_tx);
Expand All @@ -202,27 +201,23 @@ async fn generate_blocks(
// create the batches with [TRANSACTIONS_PER_BATCH] txs each
let batches: Vec<ProvenBatch> = block_txs
.par_chunks(TRANSACTIONS_PER_BATCH)
.map(|txs| create_batch(txs, prev_block.header()))
.map(|txs| create_batch(txs, &prev_block_header))
.collect();

// create the block and send it to the store
let block_inputs = get_block_inputs(store_client, &batches, &mut metrics).await;

// update blocks
prev_block = apply_block(batches, block_inputs, store_client, &mut metrics).await;
if current_anchor_header.block_epoch() != prev_block.header().block_epoch() {
current_anchor_header = prev_block.header().clone();
prev_block_header = apply_block(batches, block_inputs, store_client, &mut metrics).await;
if current_anchor_header.block_epoch() != prev_block_header.block_epoch() {
current_anchor_header = prev_block_header.clone();
}

// create the consume notes txs to be used in the next block
let batch_inputs =
get_batch_inputs(store_client, prev_block.header(), &notes, &mut metrics).await;
consume_notes_txs = create_consume_note_txs(
prev_block.header(),
accounts,
notes,
&batch_inputs.note_proofs,
);
get_batch_inputs(store_client, &prev_block_header, &notes, &mut metrics).await;
consume_notes_txs =
create_consume_note_txs(&prev_block_header, accounts, notes, &batch_inputs.note_proofs);

// track store size every 50 blocks
if i % 50 == 0 {
Expand All @@ -239,30 +234,30 @@ async fn generate_blocks(
metrics
}

/// Given a list of batches and block inputs, creates a `ProvenBlock` and sends it to the store.
/// Tracks the insertion time on the metrics.
/// Sends block data to the store
///
/// Returns the the inserted block.
/// Returns the the inserted block header.
async fn apply_block(
batches: Vec<ProvenBatch>,
block_inputs: BlockInputs,
store_client: &StoreClient,
metrics: &mut SeedingMetrics,
) -> ProvenBlock {
) -> BlockHeader {
let proposed_block = ProposedBlock::new(block_inputs.clone(), batches).unwrap();
let (header, body) = build_block(proposed_block.clone()).unwrap();
let block_proof = LocalBlockProver::new(0)
.prove_dummy(proposed_block.batches().clone(), header.clone(), block_inputs)
.unwrap();
let signature = EcdsaSecretKey::new().sign(header.commitment());
let proven_block = ProvenBlock::new_unchecked(header, body, signature, block_proof);
let block_size: usize = proven_block.to_bytes().len();
let block_size: usize = header.to_bytes().len() + body.to_bytes().len();
let ordered_batches = proposed_block.batches().clone();

let start = Instant::now();
store_client.apply_block(&proven_block).await.unwrap();

store_client
.apply_block(ordered_batches, block_inputs, header.clone(), body, signature)
.await
.unwrap();
metrics.track_block_insertion(start.elapsed(), block_size);

proven_block
header
}

// HELPER FUNCTIONS
Expand Down Expand Up @@ -524,6 +519,15 @@ async fn get_block_inputs(
/// - the URL of the store
pub async fn start_store(
data_directory: PathBuf,
) -> (RpcClient<InterceptedService<Channel, OtelInterceptor>>, Url) {
start_store_with_prover(data_directory, None).await
}

/// Starts the store with an optional remote block prover URL.
/// If `block_prover_url` is None, the store will use a local block prover.
pub async fn start_store_with_prover(
data_directory: PathBuf,
block_prover_url: Option<Url>,
) -> (RpcClient<InterceptedService<Channel, OtelInterceptor>>, Url) {
let rpc_listener = TcpListener::bind("127.0.0.1:0")
.await
Expand All @@ -541,10 +545,19 @@ pub async fn start_store(
let dir = data_directory.clone();

task::spawn(async move {
let block_prover = {
if let Some(url) = block_prover_url {
Arc::new(BlockProver::new_remote(url))
} else {
Arc::new(BlockProver::new_local(None))
}
};

Store {
rpc_listener,
ntx_builder_listener,
block_producer_listener,
block_prover,
data_directory: dir,
grpc_timeout: Duration::from_secs(30),
}
Expand Down
Loading
Loading