From 9127a0b12c48b0a18a3c4f99b3f23632c3706ed6 Mon Sep 17 00:00:00 2001 From: sergerad Date: Thu, 18 Dec 2025 12:25:59 +1300 Subject: [PATCH 01/13] Partial rework --- .../down.sql | 2 ++ .../up.sql | 5 ++++ crates/store/src/db/mod.rs | 17 +++++++---- .../src/db/models/queries/block_headers.rs | 20 ++++++------- crates/store/src/db/models/queries/mod.rs | 4 ++- crates/store/src/db/schema.rs | 29 +++++++------------ crates/store/src/db/tests.rs | 12 ++++++-- crates/store/src/state.rs | 22 +++++++++----- 8 files changed, 65 insertions(+), 46 deletions(-) create mode 100644 crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/down.sql create mode 100644 crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/up.sql diff --git a/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/down.sql b/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/down.sql new file mode 100644 index 000000000..3d8c27c3d --- /dev/null +++ b/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/down.sql @@ -0,0 +1,2 @@ +-- Remove signature column from block_headers table +ALTER TABLE block_headers DROP COLUMN signature; diff --git a/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/up.sql b/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/up.sql new file mode 100644 index 000000000..034f0554c --- /dev/null +++ b/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/up.sql @@ -0,0 +1,5 @@ +-- Add signature column to block_headers table +ALTER TABLE block_headers ADD COLUMN signature BLOB NOT NULL DEFAULT ''; + +-- Update existing rows to have empty signature (will be populated later if needed) +-- The default empty blob will be used for existing entries diff --git a/crates/store/src/db/mod.rs b/crates/store/src/db/mod.rs index 7b48684ed..6227e86c0 100644 --- a/crates/store/src/db/mod.rs +++ b/crates/store/src/db/mod.rs @@ -10,7 +10,8 @@ use miden_node_proto::generated as proto; use miden_objects::Word; use miden_objects::account::AccountId; use miden_objects::asset::{Asset, AssetVaultKey}; -use miden_objects::block::{BlockHeader, BlockNoteIndex, BlockNumber, ProvenBlock}; +use miden_objects::block::{BlockBody, BlockHeader, BlockNoteIndex, BlockNumber}; +use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::crypto::merkle::SparseMerklePath; use miden_objects::note::{ NoteDetails, @@ -248,6 +249,7 @@ impl Db { models::queries::apply_block( conn, genesis.header(), + genesis.signature().clone(), &[], &[], genesis.body().updated_accounts(), @@ -509,7 +511,9 @@ impl Db { &self, allow_acquire: oneshot::Sender<()>, acquire_done: oneshot::Receiver<()>, - block: ProvenBlock, + block_header: BlockHeader, + block_body: BlockBody, + signature: Signature, notes: Vec<(NoteRecord, Option)>, ) -> Result<()> { self.transact("apply block", move |conn| -> Result<()> { @@ -518,11 +522,12 @@ impl Db { models::queries::apply_block( conn, - block.header(), + &block_header, + signature, ¬es, - block.body().created_nullifiers(), - block.body().updated_accounts(), - block.body().transactions(), + block_body.created_nullifiers(), + block_body.updated_accounts(), + block_body.transactions(), )?; // XXX FIXME TODO free floating mutex MUST NOT exist diff --git a/crates/store/src/db/models/queries/block_headers.rs b/crates/store/src/db/models/queries/block_headers.rs index 42ec3b0e5..3388b8328 100644 --- a/crates/store/src/db/models/queries/block_headers.rs +++ b/crates/store/src/db/models/queries/block_headers.rs @@ -14,6 +14,7 @@ use diesel::{ use miden_lib::utils::{Deserializable, Serializable}; use miden_node_utils::limiter::{QueryParamBlockLimit, QueryParamLimiter}; use miden_objects::block::{BlockHeader, BlockNumber}; +use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use super::DatabaseError; use crate::db::models::conv::SqlTypeConvert; @@ -130,6 +131,7 @@ pub struct BlockHeaderRawRow { #[allow(dead_code)] pub block_num: i64, pub block_header: Vec, + pub signature: Vec, } impl TryInto for BlockHeaderRawRow { type Error = DatabaseError; @@ -145,14 +147,7 @@ impl TryInto for BlockHeaderRawRow { pub struct BlockHeaderInsert { pub block_num: i64, pub block_header: Vec, -} -impl From<&BlockHeader> for BlockHeaderInsert { - fn from(block_header: &BlockHeader) -> Self { - Self { - block_num: block_header.block_num().to_raw_sql(), - block_header: block_header.to_bytes(), - } - } + pub signature: Vec, } /// Insert a [`BlockHeader`] to the DB using the given [`SqliteConnection`]. @@ -168,10 +163,15 @@ impl From<&BlockHeader> for BlockHeaderInsert { pub(crate) fn insert_block_header( conn: &mut SqliteConnection, block_header: &BlockHeader, + signature: &Signature, ) -> Result { - let block_header = BlockHeaderInsert::from(block_header); + let block_header_insert = BlockHeaderInsert { + block_num: block_header.block_num().to_raw_sql(), + block_header: block_header.to_bytes(), + signature: signature.to_bytes(), + }; let count = diesel::insert_into(schema::block_headers::table) - .values(&[block_header]) + .values(&[block_header_insert]) .execute(conn)?; Ok(count) } diff --git a/crates/store/src/db/models/queries/mod.rs b/crates/store/src/db/models/queries/mod.rs index 0d40dd8c4..4234f240b 100644 --- a/crates/store/src/db/models/queries/mod.rs +++ b/crates/store/src/db/models/queries/mod.rs @@ -33,6 +33,7 @@ use diesel::SqliteConnection; use miden_objects::account::AccountId; use miden_objects::block::{BlockAccountUpdate, BlockHeader, BlockNumber}; +use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::note::Nullifier; use miden_objects::transaction::OrderedTransactionHeaders; @@ -59,6 +60,7 @@ pub(crate) use notes::*; pub(crate) fn apply_block( conn: &mut SqliteConnection, block_header: &BlockHeader, + signature: Signature, notes: &[(NoteRecord, Option)], nullifiers: &[Nullifier], accounts: &[BlockAccountUpdate], @@ -66,7 +68,7 @@ pub(crate) fn apply_block( ) -> Result { let mut count = 0; // Note: ordering here is important as the relevant tables have FK dependencies. - count += insert_block_header(conn, block_header)?; + count += insert_block_header(conn, block_header, &signature)?; count += upsert_accounts(conn, accounts, block_header.block_num())?; count += insert_scripts(conn, notes.iter().map(|(note, _)| note))?; count += insert_notes(conn, notes)?; diff --git a/crates/store/src/db/schema.rs b/crates/store/src/db/schema.rs index 6f36594b9..91aa54175 100644 --- a/crates/store/src/db/schema.rs +++ b/crates/store/src/db/schema.rs @@ -1,5 +1,12 @@ // @generated automatically by Diesel CLI. +diesel::table! { + account_codes (code_commitment) { + code_commitment -> Binary, + code -> Binary, + } +} + diesel::table! { account_storage_map_values (account_id, block_num, slot_name, key) { account_id -> Binary, @@ -25,27 +32,21 @@ diesel::table! { accounts (account_id, block_num) { account_id -> Binary, network_account_id_prefix -> Nullable, + block_num -> BigInt, account_commitment -> Binary, code_commitment -> Nullable, storage -> Nullable, vault -> Nullable, nonce -> Nullable, - block_num -> BigInt, is_latest -> Bool, } } -diesel::table! { - account_codes (code_commitment) { - code_commitment -> Binary, - code -> Binary, - } -} - diesel::table! { block_headers (block_num) { block_num -> BigInt, block_header -> Binary, + signature -> Binary, } } @@ -100,21 +101,11 @@ diesel::table! { } } -diesel::joinable!(accounts -> account_codes (code_commitment)); -diesel::joinable!(accounts -> block_headers (block_num)); -// Note: Cannot use diesel::joinable! with accounts table due to composite primary key -// diesel::joinable!(notes -> accounts (sender)); -// diesel::joinable!(transactions -> accounts (account_id)); -diesel::joinable!(notes -> block_headers (committed_at)); -diesel::joinable!(notes -> note_scripts (script_root)); -diesel::joinable!(nullifiers -> block_headers (block_num)); -diesel::joinable!(transactions -> block_headers (block_num)); - diesel::allow_tables_to_appear_in_same_query!( account_codes, account_storage_map_values, - accounts, account_vault_assets, + accounts, block_headers, note_scripts, notes, diff --git a/crates/store/src/db/tests.rs b/crates/store/src/db/tests.rs index e203e217c..87e0b5758 100644 --- a/crates/store/src/db/tests.rs +++ b/crates/store/src/db/tests.rs @@ -96,7 +96,9 @@ fn create_block(conn: &mut SqliteConnection, block_num: BlockNumber) { 11_u8.into(), ); - conn.transaction(|conn| queries::insert_block_header(conn, &block_header)) + let dummy_signature = miden_objects::crypto::dsa::ecdsa_k256_keccak::SecretKey::new() + .sign(block_header.commitment()); + conn.transaction(|conn| queries::insert_block_header(conn, &block_header, &dummy_signature)) .unwrap(); } @@ -863,7 +865,9 @@ fn db_block_header() { ); // test insertion - queries::insert_block_header(conn, &block_header).unwrap(); + let dummy_signature = miden_objects::crypto::dsa::ecdsa_k256_keccak::SecretKey::new() + .sign(block_header.commitment()); + queries::insert_block_header(conn, &block_header, &dummy_signature).unwrap(); // test fetch unknown block header let block_number = 1; @@ -894,7 +898,9 @@ fn db_block_header() { 21_u8.into(), ); - queries::insert_block_header(conn, &block_header2).unwrap(); + let dummy_signature = miden_objects::crypto::dsa::ecdsa_k256_keccak::SecretKey::new() + .sign(block_header2.commitment()); + queries::insert_block_header(conn, &block_header2, &dummy_signature).unwrap(); let res = queries::select_block_header_by_block_num(conn, None).unwrap(); assert_eq!(res.unwrap(), block_header2); diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index c9225d147..bd62a1c71 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -27,6 +27,7 @@ use miden_objects::account::{AccountHeader, AccountId, StorageSlot, StorageSlotC use miden_objects::block::account_tree::{AccountTree, AccountWitness, account_id_to_smt_key}; use miden_objects::block::nullifier_tree::{NullifierTree, NullifierWitness}; use miden_objects::block::{BlockHeader, BlockInputs, BlockNumber, Blockchain, ProvenBlock}; +use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::crypto::merkle::{ Forest, LargeSmt, @@ -188,12 +189,19 @@ impl State { // TODO: This span is logged in a root span, we should connect it to the parent span. #[allow(clippy::too_many_lines)] #[instrument(target = COMPONENT, skip_all, err)] - pub async fn apply_block(&self, block: ProvenBlock) -> Result<(), ApplyBlockError> { + pub async fn apply_block( + &self, + header: BlockHeader, + body: BlockBody, + signature: Signature, + ) -> Result<(), ApplyBlockError> { let _lock = self.writer.try_lock().map_err(|_| ApplyBlockError::ConcurrentWrite)?; - let header = block.header(); + let header = block.header().clone(); + let block_body = block.body().clone(); + let signature = block.signature().clone(); - let tx_commitment = block.body().transactions().commitment(); + let tx_commitment = block_body.transactions().commitment(); if header.tx_commitment() != tx_commitment { return Err(InvalidBlockError::InvalidBlockTxCommitment { @@ -363,10 +371,10 @@ impl State { // in-memory write lock. This requires the DB update to run concurrently, so a new task is // spawned. let db = Arc::clone(&self.db); - let db_update_task = - tokio::spawn( - async move { db.apply_block(allow_acquire, acquire_done, block, notes).await }, - ); + let db_update_task = tokio::spawn(async move { + db.apply_block(allow_acquire, acquire_done, header, block_body, signature, notes) + .await + }); // Wait for the message from the DB update task, that we ready to commit the DB transaction acquired_allowed.await.map_err(ApplyBlockError::ClosedChannel)?; From 2da1e807b8e3b39c4c9cf3deaec6efd91c42b368 Mon Sep 17 00:00:00 2001 From: sergerad Date: Thu, 18 Dec 2025 14:30:26 +1300 Subject: [PATCH 02/13] Refactor apply block gRPC to store --- bin/stress-test/src/seeding/mod.rs | 40 +- .../block-producer/src/block_builder/mod.rs | 204 +-- crates/block-producer/src/errors.rs | 11 - crates/block-producer/src/server/mod.rs | 3 +- crates/block-producer/src/store/mod.rs | 16 +- crates/proto/src/generated/blockchain.rs | 8 - crates/proto/src/generated/store.rs | 1316 ++++++----------- .../src/db/models/queries/block_headers.rs | 2 +- crates/store/src/server/block_producer.rs | 27 +- crates/store/src/state.rs | 32 +- proto/proto/internal/store.proto | 18 +- proto/proto/types/blockchain.proto | 18 +- 12 files changed, 577 insertions(+), 1118 deletions(-) diff --git a/bin/stress-test/src/seeding/mod.rs b/bin/stress-test/src/seeding/mod.rs index 924710a09..e9cf832a1 100644 --- a/bin/stress-test/src/seeding/mod.rs +++ b/bin/stress-test/src/seeding/mod.rs @@ -5,7 +5,6 @@ use std::time::{Duration, Instant}; use metrics::SeedingMetrics; use miden_air::ExecutionProof; -use miden_block_prover::LocalBlockProver; use miden_lib::account::auth::AuthRpoFalcon512; use miden_lib::account::faucets::BasicFungibleFaucet; use miden_lib::account::wallets::BasicWallet; @@ -161,7 +160,7 @@ async fn generate_blocks( SecretKey::with_rng(&mut *rng) }; - let mut prev_block = genesis_block.clone(); + let mut prev_block_header = genesis_block.header().clone(); let mut current_anchor_header = genesis_block.header().clone(); for i in 0..total_blocks { @@ -193,7 +192,7 @@ async fn generate_blocks( note_nullifiers.extend(notes.iter().map(|n| n.nullifier().prefix())); // create the tx that creates the notes - let emit_note_tx = create_emit_note_tx(prev_block.header(), &mut faucet, notes.clone()); + let emit_note_tx = create_emit_note_tx(&prev_block_header, &mut faucet, notes.clone()); // collect all the txs block_txs.push(emit_note_tx); @@ -202,27 +201,23 @@ async fn generate_blocks( // create the batches with [TRANSACTIONS_PER_BATCH] txs each let batches: Vec = block_txs .par_chunks(TRANSACTIONS_PER_BATCH) - .map(|txs| create_batch(txs, prev_block.header())) + .map(|txs| create_batch(txs, &prev_block_header)) .collect(); // create the block and send it to the store let block_inputs = get_block_inputs(store_client, &batches, &mut metrics).await; // update blocks - prev_block = apply_block(batches, block_inputs, store_client, &mut metrics).await; - if current_anchor_header.block_epoch() != prev_block.header().block_epoch() { - current_anchor_header = prev_block.header().clone(); + prev_block_header = apply_block(batches, block_inputs, store_client, &mut metrics).await; + if current_anchor_header.block_epoch() != prev_block_header.block_epoch() { + current_anchor_header = prev_block_header.clone(); } // create the consume notes txs to be used in the next block let batch_inputs = - get_batch_inputs(store_client, prev_block.header(), ¬es, &mut metrics).await; - consume_notes_txs = create_consume_note_txs( - prev_block.header(), - accounts, - notes, - &batch_inputs.note_proofs, - ); + get_batch_inputs(store_client, &prev_block_header, ¬es, &mut metrics).await; + consume_notes_txs = + create_consume_note_txs(&prev_block_header, accounts, notes, &batch_inputs.note_proofs); // track store size every 50 blocks if i % 50 == 0 { @@ -239,30 +234,25 @@ async fn generate_blocks( metrics } -/// Given a list of batches and block inputs, creates a `ProvenBlock` and sends it to the store. -/// Tracks the insertion time on the metrics. +/// Sends block data to the store /// -/// Returns the the inserted block. +/// Returns the the inserted block header. async fn apply_block( batches: Vec, block_inputs: BlockInputs, store_client: &StoreClient, metrics: &mut SeedingMetrics, -) -> ProvenBlock { +) -> BlockHeader { let proposed_block = ProposedBlock::new(block_inputs.clone(), batches).unwrap(); let (header, body) = build_block(proposed_block.clone()).unwrap(); - let block_proof = LocalBlockProver::new(0) - .prove_dummy(proposed_block.batches().clone(), header.clone(), block_inputs) - .unwrap(); let signature = EcdsaSecretKey::new().sign(header.commitment()); - let proven_block = ProvenBlock::new_unchecked(header, body, signature, block_proof); - let block_size: usize = proven_block.to_bytes().len(); + let block_size: usize = header.to_bytes().len() + body.to_bytes().len(); let start = Instant::now(); - store_client.apply_block(&proven_block).await.unwrap(); + store_client.apply_block(header.clone(), body, signature).await.unwrap(); metrics.track_block_insertion(start.elapsed(), block_size); - proven_block + header } // HELPER FUNCTIONS diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index 8d5c9b806..e34d034e6 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -1,30 +1,24 @@ -use std::ops::{Deref, Range}; +use std::ops::Deref; use std::sync::Arc; use futures::FutureExt; use futures::never::Never; -use miden_block_prover::LocalBlockProver; use miden_lib::block::build_block; use miden_node_utils::tracing::OpenTelemetrySpanExt; -use miden_objects::MIN_PROOF_SECURITY_LEVEL; use miden_objects::batch::{OrderedBatches, ProvenBatch}; use miden_objects::block::{ BlockBody, BlockHeader, BlockInputs, BlockNumber, - BlockProof, ProposedBlock, ProvenBlock, }; use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::note::NoteHeader; -use miden_objects::transaction::{OrderedTransactionHeaders, TransactionHeader}; -use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; -use rand::Rng; +use miden_objects::transaction::TransactionHeader; use tokio::time::Duration; -use tracing::{Span, info, instrument}; -use url::Url; +use tracing::{Span, instrument}; use crate::errors::BuildBlockError; use crate::mempool::SharedMempool; @@ -37,8 +31,6 @@ use crate::{COMPONENT, TelemetryInjectorExt}; pub struct BlockBuilder { pub block_interval: Duration, - /// Used to simulate block proving by sleeping for a random duration selected from this range. - pub simulated_proof_time: Range, /// Simulated block failure rate as a percentage. /// @@ -48,9 +40,6 @@ pub struct BlockBuilder { pub store: StoreClient, pub validator: BlockProducerValidatorClient, - - /// The prover used to prove a proposed block into a proven block. - pub block_prover: BlockProver, } impl BlockBuilder { @@ -60,20 +49,12 @@ impl BlockBuilder { pub fn new( store: StoreClient, validator: BlockProducerValidatorClient, - block_prover_url: Option, block_interval: Duration, ) -> Self { - let block_prover = match block_prover_url { - Some(url) => BlockProver::new_remote(url), - None => BlockProver::new_local(MIN_PROOF_SECURITY_LEVEL), - }; - Self { block_interval, // Note: The range cannot be empty. - simulated_proof_time: Duration::ZERO..Duration::from_millis(1), failure_rate: 0.0, - block_prover, store, validator, } @@ -131,12 +112,7 @@ impl BlockBuilder { ProposedBlock::inject_telemetry(proposed_block); }) .and_then(|(proposed_block, inputs)| self.validate_block(proposed_block, inputs)) - .and_then(|(proposed_block, inputs, header, signature, body)| self.prove_block(proposed_block, inputs, header, signature, body)) - .inspect_ok(ProvenBlock::inject_telemetry) - // Failure must be injected before the final pipeline stage i.e. before commit is called. The system cannot - // handle errors after it considers the process complete (which makes sense). - .and_then(|proven_block| async { self.inject_failure(proven_block) }) - .and_then(|proven_block| self.commit_block(mempool, proven_block)) + .and_then(|(_ordered_batches, _block_inputs, header, body, signature)| self.commit_block(mempool, header, body, signature)) // Handle errors by propagating the error to the root span and rolling back the block. .inspect_err(|err| Span::current().set_error(err)) .or_else(|_err| self.rollback_block(mempool, block_num).never_error()) @@ -230,7 +206,7 @@ impl BlockBuilder { &self, proposed_block: ProposedBlock, block_inputs: BlockInputs, - ) -> Result<(OrderedBatches, BlockInputs, BlockHeader, Signature, BlockBody), BuildBlockError> + ) -> Result<(OrderedBatches, BlockInputs, BlockHeader, BlockBody, Signature), BuildBlockError> { // Concurrently build the block and validate it via the validator. let build_result = tokio::task::spawn_blocking({ @@ -254,53 +230,55 @@ impl BlockBuilder { } let (ordered_batches, ..) = proposed_block.into_parts(); - Ok((ordered_batches, block_inputs, header, signature, body)) + Ok((ordered_batches, block_inputs, header, body, signature)) } - #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] - async fn prove_block( - &self, - ordered_batches: OrderedBatches, - block_inputs: BlockInputs, - header: BlockHeader, - signature: Signature, - body: BlockBody, - ) -> Result { - // Prove block using header and body from validator. - let block_proof = self - .block_prover - .prove(ordered_batches.clone(), header.clone(), block_inputs) - .await?; - self.simulate_proving().await; - - // SAFETY: The header and body are assumed valid and consistent with the proof. - let proven_block = ProvenBlock::new_unchecked(header, body, signature, block_proof); - if proven_block.proof_security_level() < MIN_PROOF_SECURITY_LEVEL { - return Err(BuildBlockError::SecurityLevelTooLow( - proven_block.proof_security_level(), - MIN_PROOF_SECURITY_LEVEL, - )); - } - // TODO(sergerad): Consider removing this validation. Once block proving is implemented, - // this would be replaced with verifying the proof returned from the prover against - // the block header. - validate_tx_headers(&proven_block, &ordered_batches.to_transactions())?; - - Ok(proven_block) - } + //#[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] + //async fn prove_block( + // &self, + // ordered_batches: OrderedBatches, + // block_inputs: BlockInputs, + // header: BlockHeader, + // signature: Signature, + // body: BlockBody, + //) -> Result { + // // Prove block using header and body from validator. + // let block_proof = self + // .block_prover + // .prove(ordered_batches.clone(), header.clone(), block_inputs) + // .await?; + // self.simulate_proving().await; + + // // SAFETY: The header and body are assumed valid and consistent with the proof. + // let proven_block = ProvenBlock::new_unchecked(header, body, signature, block_proof); + // if proven_block.proof_security_level() < MIN_PROOF_SECURITY_LEVEL { + // return Err(BuildBlockError::SecurityLevelTooLow( + // proven_block.proof_security_level(), + // MIN_PROOF_SECURITY_LEVEL, + // )); + // } + // // TODO(sergerad): Consider removing this validation. Once block proving is implemented, + // // this would be replaced with verifying the proof returned from the prover against + // // the block header. + // validate_tx_headers(&proven_block, &ordered_batches.to_transactions())?; + + // Ok(proven_block) + //} #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] async fn commit_block( &self, mempool: &SharedMempool, - built_block: ProvenBlock, + header: BlockHeader, + body: BlockBody, + signature: Signature, ) -> Result<(), BuildBlockError> { self.store - .apply_block(&built_block) + .apply_block(header.clone(), body, signature) .await .map_err(BuildBlockError::StoreApplyBlockFailed)?; - mempool.lock().await.commit_block(built_block.header().clone()); + mempool.lock().await.commit_block(header); Ok(()) } @@ -309,31 +287,6 @@ impl BlockBuilder { async fn rollback_block(&self, mempool: &SharedMempool, block: BlockNumber) { mempool.lock().await.rollback_block(block); } - - #[instrument(target = COMPONENT, name = "block_builder.simulate_proving", skip_all)] - async fn simulate_proving(&self) { - let proving_duration = rand::rng().random_range(self.simulated_proof_time.clone()); - - Span::current().set_attribute("range.min_s", self.simulated_proof_time.start); - Span::current().set_attribute("range.max_s", self.simulated_proof_time.end); - Span::current().set_attribute("dice_roll_s", proving_duration); - - tokio::time::sleep(proving_duration).await; - } - - #[instrument(target = COMPONENT, name = "block_builder.inject_failure", skip_all, err)] - fn inject_failure(&self, value: T) -> Result { - let roll = rand::rng().random::(); - - Span::current().set_attribute("failure_rate", self.failure_rate); - Span::current().set_attribute("dice_roll", roll); - - if roll < self.failure_rate { - Err(BuildBlockError::InjectedFailure) - } else { - Ok(value) - } - } } /// A wrapper around batches selected for inlucion in a block, primarily used to be able to inject @@ -430,76 +383,3 @@ impl TelemetryInjectorExt for ProvenBlock { span.set_attribute("block.commitments.transaction", header.tx_commitment()); } } - -// BLOCK PROVER -// ================================================================================================ - -pub enum BlockProver { - Local(LocalBlockProver), - Remote(RemoteBlockProver), -} - -impl BlockProver { - pub fn new_local(security_level: u32) -> Self { - info!(target: COMPONENT, "Using local block prover"); - Self::Local(LocalBlockProver::new(security_level)) - } - - pub fn new_remote(endpoint: impl Into) -> Self { - info!(target: COMPONENT, "Using remote block prover"); - Self::Remote(RemoteBlockProver::new(endpoint)) - } - - #[instrument(target = COMPONENT, skip_all, err)] - async fn prove( - &self, - tx_batches: OrderedBatches, - block_header: BlockHeader, - block_inputs: BlockInputs, - ) -> Result { - match self { - Self::Local(prover) => prover - .prove(tx_batches, block_header, block_inputs) - .map_err(BuildBlockError::ProveBlockFailed), - Self::Remote(prover) => prover - .prove(tx_batches, block_header, block_inputs) - .await - .map_err(BuildBlockError::RemoteProverClientError), - } - } -} - -/// Validates that the proven block's transaction headers are consistent with the transactions -/// passed in the proposed block. -/// -/// This expects that transactions from the proposed block and proven block are in the same -/// order, as defined by [`OrderedTransactionHeaders`]. -fn validate_tx_headers( - proven_block: &ProvenBlock, - proposed_txs: &OrderedTransactionHeaders, -) -> Result<(), BuildBlockError> { - if proposed_txs.as_slice().len() != proven_block.body().transactions().as_slice().len() { - return Err(BuildBlockError::other(format!( - "remote prover returned {} transaction headers but {} transactions were passed as part of the proposed block", - proven_block.body().transactions().as_slice().len(), - proposed_txs.as_slice().len() - ))); - } - - // Because we checked the length matches we can zip the iterators up. - // We expect the transaction headers to be in the same order. - for (proposed_header, proven_header) in proposed_txs - .as_slice() - .iter() - .zip(proven_block.body().transactions().as_slice()) - { - if proposed_header != proven_header { - return Err(BuildBlockError::other(format!( - "transaction header with id {} does not match header of the transaction in the proposed block", - proposed_header.id() - ))); - } - } - - Ok(()) -} diff --git a/crates/block-producer/src/errors.rs b/crates/block-producer/src/errors.rs index d53a5ead4..87cfdf8d0 100644 --- a/crates/block-producer/src/errors.rs +++ b/crates/block-producer/src/errors.rs @@ -1,6 +1,5 @@ use core::error::Error as CoreError; -use miden_block_prover::BlockProverError; use miden_node_proto::errors::{ConversionError, GrpcError}; use miden_objects::account::AccountId; use miden_objects::block::BlockNumber; @@ -215,16 +214,6 @@ pub enum BuildBlockError { ValidateBlockFailed(#[source] Box), #[error("block signature is invalid")] InvalidSignature, - #[error("failed to prove block")] - ProveBlockFailed(#[source] BlockProverError), - /// We sometimes randomly inject errors into the batch building process to test our failure - /// responses. - #[error("nothing actually went wrong, failure was injected on purpose")] - InjectedFailure, - #[error("failed to prove block with remote prover")] - RemoteProverClientError(#[source] RemoteProverClientError), - #[error("block proof security level is too low: {0} < {1}")] - SecurityLevelTooLow(u32, u32), /// Custom error variant for errors not covered by the other variants. #[error("{error_msg}")] Other { diff --git a/crates/block-producer/src/server/mod.rs b/crates/block-producer/src/server/mod.rs index 73b5a7b3c..c5354bba1 100644 --- a/crates/block-producer/src/server/mod.rs +++ b/crates/block-producer/src/server/mod.rs @@ -125,8 +125,7 @@ impl BlockProducer { info!(target: COMPONENT, "Server initialized"); - let block_builder = - BlockBuilder::new(store.clone(), validator, self.block_prover_url, self.block_interval); + let block_builder = BlockBuilder::new(store.clone(), validator, self.block_interval); let batch_builder = BatchBuilder::new( store.clone(), SERVER_NUM_BATCH_BUILDERS, diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 5ea3089cb..85e013fe6 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -10,7 +10,8 @@ use miden_node_proto::{AccountState, generated as proto}; use miden_node_utils::formatting::format_opt; use miden_objects::Word; use miden_objects::account::AccountId; -use miden_objects::block::{BlockHeader, BlockInputs, BlockNumber, ProvenBlock}; +use miden_objects::block::{BlockBody, BlockHeader, BlockInputs, BlockNumber}; +use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::note::Nullifier; use miden_objects::transaction::ProvenTransaction; use miden_objects::utils::Serializable; @@ -238,8 +239,17 @@ impl StoreClient { } #[instrument(target = COMPONENT, name = "store.client.apply_block", skip_all, err)] - pub async fn apply_block(&self, block: &ProvenBlock) -> Result<(), StoreError> { - let request = tonic::Request::new(proto::blockchain::Block { block: block.to_bytes() }); + pub async fn apply_block( + &self, + header: BlockHeader, + body: BlockBody, + signature: Signature, + ) -> Result<(), StoreError> { + let request = tonic::Request::new(proto::store::ApplyBlockRequest { + header: header.to_bytes(), + body: body.to_bytes(), + signature: signature.to_bytes(), + }); self.client.clone().apply_block(request).await.map(|_| ()).map_err(Into::into) } diff --git a/crates/proto/src/generated/blockchain.rs b/crates/proto/src/generated/blockchain.rs index 927eadb05..cd67fcf02 100644 --- a/crates/proto/src/generated/blockchain.rs +++ b/crates/proto/src/generated/blockchain.rs @@ -1,12 +1,4 @@ // This file is @generated by prost-build. -/// Represents a block. -#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] -pub struct Block { - /// Block data encoded using \[winter_utils::Serializable\] implementation for - /// \[miden_objects::block::Block\]. - #[prost(bytes = "vec", tag = "1")] - pub block: ::prost::alloc::vec::Vec, -} /// Represents a proposed block. #[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] pub struct ProposedBlock { diff --git a/crates/proto/src/generated/store.rs b/crates/proto/src/generated/store.rs index 33703e88a..b99816f80 100644 --- a/crates/proto/src/generated/store.rs +++ b/crates/proto/src/generated/store.rs @@ -1,4 +1,20 @@ // This file is @generated by prost-build. +/// Applies a block to the state. +#[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] +pub struct ApplyBlockRequest { + /// Block header encoded using \[winter_utils::Serializable\] implementation for + /// \[miden_objects::block::BlockHeader\]. + #[prost(bytes = "vec", tag = "1")] + pub header: ::prost::alloc::vec::Vec, + /// Block header encoded using \[winter_utils::Serializable\] implementation for + /// \[miden_objects::block::BlockBody\]. + #[prost(bytes = "vec", tag = "2")] + pub body: ::prost::alloc::vec::Vec, + /// Signature encoded using \[winter_utils::Serializable\] implementation for + /// \[crypto::dsa::ecdsa_k256_keccak::Signature\]. + #[prost(bytes = "vec", tag = "3")] + pub signature: ::prost::alloc::vec::Vec, +} /// Returns data required to prove the next block. #[derive(Clone, PartialEq, ::prost::Message)] pub struct BlockInputsRequest { @@ -16,7 +32,8 @@ pub struct BlockInputsRequest { /// provide a nullifier witness for it. #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, - /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. + /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the + /// store**. #[prost(message, repeated, tag = "3")] pub unauthenticated_notes: ::prost::alloc::vec::Vec, /// Array of block numbers referenced by all batches in the block. @@ -32,9 +49,8 @@ pub struct BlockInputs { /// Proof of each requested unauthenticated note's inclusion in a block, **if it existed in /// the store**. #[prost(message, repeated, tag = "2")] - pub unauthenticated_note_proofs: ::prost::alloc::vec::Vec< - super::note::NoteInclusionInBlockProof, - >, + pub unauthenticated_note_proofs: + ::prost::alloc::vec::Vec, /// The serialized chain MMR which includes proofs for all blocks referenced by the /// above note inclusion proofs as well as proofs for inclusion of the requested blocks /// referenced by the batches in the block. @@ -75,9 +91,7 @@ pub struct BatchInputsRequest { pub struct BatchInputs { /// The block header that the transaction batch should reference. #[prost(message, optional, tag = "1")] - pub batch_reference_block_header: ::core::option::Option< - super::blockchain::BlockHeader, - >, + pub batch_reference_block_header: ::core::option::Option, /// Proof of each *found* unauthenticated note's inclusion in a block. #[prost(message, repeated, tag = "2")] pub note_proofs: ::prost::alloc::vec::Vec, @@ -108,14 +122,10 @@ pub struct TransactionInputsRequest { pub struct TransactionInputs { /// Account state proof. #[prost(message, optional, tag = "1")] - pub account_state: ::core::option::Option< - transaction_inputs::AccountTransactionInputRecord, - >, + pub account_state: ::core::option::Option, /// List of nullifiers that have been consumed. #[prost(message, repeated, tag = "2")] - pub nullifiers: ::prost::alloc::vec::Vec< - transaction_inputs::NullifierTransactionInputRecord, - >, + pub nullifiers: ::prost::alloc::vec::Vec, /// List of unauthenticated notes that were not found in the database. #[prost(message, repeated, tag = "3")] pub found_unauthenticated_notes: ::prost::alloc::vec::Vec, @@ -170,8 +180,8 @@ pub struct MaybeAccountDetails { /// Notes created or consumed after the specified block are excluded from the result. #[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] pub struct UnconsumedNetworkNotesRequest { - /// This should be null on the first call, and set to the response token until the response token - /// is null, at which point all data has been fetched. + /// This should be null on the first call, and set to the response token until the response + /// token is null, at which point all data has been fetched. /// /// Note that this token is only valid if used with the same parameters. #[prost(uint64, optional, tag = "1")] @@ -227,10 +237,10 @@ pub mod rpc_client { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value, + clippy::let_unit_value )] - use tonic::codegen::*; use tonic::codegen::http::Uri; + use tonic::codegen::*; /// Store API for the RPC component #[derive(Debug, Clone)] pub struct RpcClient { @@ -262,22 +272,18 @@ pub mod rpc_client { let inner = tonic::client::Grpc::with_origin(inner, origin); Self { inner } } - pub fn with_interceptor( - inner: T, - interceptor: F, - ) -> RpcClient> + pub fn with_interceptor(inner: T, interceptor: F) -> RpcClient> where F: tonic::service::Interceptor, T::ResponseBody: Default, T: tonic::codegen::Service< - http::Request, - Response = http::Response< - >::ResponseBody, + http::Request, + Response = http::Response< + >::ResponseBody, + >, >, - >, - , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: + Into + std::marker::Send + std::marker::Sync, { RpcClient::new(InterceptedService::new(inner, interceptor)) } @@ -316,18 +322,11 @@ pub mod rpc_client { pub async fn status( &mut self, request: impl tonic::IntoRequest<()>, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/Status"); let mut req = request.into_request(); @@ -342,18 +341,11 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Rpc/CheckNullifiers", - ); + let path = http::uri::PathAndQuery::from_static("/store.Rpc/CheckNullifiers"); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "CheckNullifiers")); self.inner.unary(req, path, codec).await @@ -366,21 +358,13 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Rpc/GetAccountDetails", - ); + let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetAccountDetails"); let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("store.Rpc", "GetAccountDetails")); + req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "GetAccountDetails")); self.inner.unary(req, path, codec).await } /// Returns the latest state proof of the specified account. @@ -391,18 +375,11 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Rpc/GetAccountProof", - ); + let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetAccountProof"); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "GetAccountProof")); self.inner.unary(req, path, codec).await @@ -411,50 +388,31 @@ pub mod rpc_client { pub async fn get_block_by_number( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Rpc/GetBlockByNumber", - ); + let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetBlockByNumber"); let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("store.Rpc", "GetBlockByNumber")); + req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "GetBlockByNumber")); self.inner.unary(req, path, codec).await } /// Retrieves block header by given block number. Optionally, it also returns the MMR path /// and current chain length to authenticate the block's inclusion. pub async fn get_block_header_by_number( &mut self, - request: impl tonic::IntoRequest< - super::super::rpc::BlockHeaderByNumberRequest, - >, + request: impl tonic::IntoRequest, ) -> std::result::Result< tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Rpc/GetBlockHeaderByNumber", - ); + let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetBlockHeaderByNumber"); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.Rpc", "GetBlockHeaderByNumber")); @@ -468,14 +426,9 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetNotesById"); let mut req = request.into_request(); @@ -486,28 +439,19 @@ pub mod rpc_client { pub async fn get_note_script_by_root( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Rpc/GetNoteScriptByRoot", - ); + let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetNoteScriptByRoot"); let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("store.Rpc", "GetNoteScriptByRoot")); + req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "GetNoteScriptByRoot")); self.inner.unary(req, path, codec).await } - /// Returns a list of nullifiers that match the specified prefixes and are recorded in the node. + /// Returns a list of nullifiers that match the specified prefixes and are recorded in the + /// node. /// /// Note that only 16-bit prefixes are supported at this time. pub async fn sync_nullifiers( @@ -517,80 +461,65 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncNullifiers"); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncNullifiers")); self.inner.unary(req, path, codec).await } - /// Returns info which can be used by the requester to sync up to the tip of chain for the notes they are interested in. + /// Returns info which can be used by the requester to sync up to the tip of chain for the + /// notes they are interested in. /// - /// requester specifies the `note_tags` they are interested in, and the block height from which to search for new for - /// matching notes for. The request will then return the next block containing any note matching the provided tags. + /// requester specifies the `note_tags` they are interested in, and the block height from + /// which to search for new for matching notes for. The request will then return the + /// next block containing any note matching the provided tags. /// /// The response includes each note's metadata and inclusion proof. /// - /// A basic note sync can be implemented by repeatedly requesting the previous response's block until reaching the - /// tip of the chain. + /// A basic note sync can be implemented by repeatedly requesting the previous response's + /// block until reaching the tip of the chain. pub async fn sync_notes( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncNotes"); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncNotes")); self.inner.unary(req, path, codec).await } - /// Returns info which can be used by the requester to sync up to the latest state of the chain - /// for the objects (accounts, notes, nullifiers) the requester is interested in. + /// Returns info which can be used by the requester to sync up to the latest state of the + /// chain for the objects (accounts, notes, nullifiers) the requester is interested + /// in. /// - /// This request returns the next block containing requested data. It also returns `chain_tip` - /// which is the latest block number in the chain. requester is expected to repeat these requests - /// in a loop until `response.block_header.block_num == response.chain_tip`, at which point - /// the requester is fully synchronized with the chain. + /// This request returns the next block containing requested data. It also returns + /// `chain_tip` which is the latest block number in the chain. requester is expected + /// to repeat these requests in a loop until `response.block_header.block_num == + /// response.chain_tip`, at which point the requester is fully synchronized with the + /// chain. /// /// Each request also returns info about new notes, nullifiers etc. created. It also returns - /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both chain - /// MMR peaks and chain MMR nodes. + /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both + /// chain MMR peaks and chain MMR nodes. /// - /// For preserving some degree of privacy, note tags and nullifiers filters contain only high - /// part of hashes. Thus, returned data contains excessive notes and nullifiers, requester can make - /// additional filtering of that data on its side. + /// For preserving some degree of privacy, note tags and nullifiers filters contain only + /// high part of hashes. Thus, returned data contains excessive notes and + /// nullifiers, requester can make additional filtering of that data on its side. pub async fn sync_state( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncState"); let mut req = request.into_request(); @@ -605,24 +534,17 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Rpc/SyncAccountVault", - ); + let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncAccountVault"); let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("store.Rpc", "SyncAccountVault")); + req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncAccountVault")); self.inner.unary(req, path, codec).await } - /// Returns storage map updates for specified account and storage slots within a block range. + /// Returns storage map updates for specified account and storage slots within a block + /// range. pub async fn sync_storage_maps( &mut self, request: impl tonic::IntoRequest, @@ -630,18 +552,11 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Rpc/SyncStorageMaps", - ); + let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncStorageMaps"); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncStorageMaps")); self.inner.unary(req, path, codec).await @@ -654,21 +569,13 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.Rpc/SyncTransactions", - ); + let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncTransactions"); let mut req = request.into_request(); - req.extensions_mut() - .insert(GrpcMethod::new("store.Rpc", "SyncTransactions")); + req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncTransactions")); self.inner.unary(req, path, codec).await } } @@ -680,7 +587,7 @@ pub mod rpc_server { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value, + clippy::let_unit_value )] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with RpcServer. @@ -690,10 +597,7 @@ pub mod rpc_server { async fn status( &self, request: tonic::Request<()>, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; + ) -> std::result::Result, tonic::Status>; /// Returns a nullifier proof for each of the requested nullifiers. async fn check_nullifiers( &self, @@ -722,10 +626,7 @@ pub mod rpc_server { async fn get_block_by_number( &self, request: tonic::Request, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; + ) -> std::result::Result, tonic::Status>; /// Retrieves block header by given block number. Optionally, it also returns the MMR path /// and current chain length to authenticate the block's inclusion. async fn get_block_header_by_number( @@ -747,11 +648,9 @@ pub mod rpc_server { async fn get_note_script_by_root( &self, request: tonic::Request, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; - /// Returns a list of nullifiers that match the specified prefixes and are recorded in the node. + ) -> std::result::Result, tonic::Status>; + /// Returns a list of nullifiers that match the specified prefixes and are recorded in the + /// node. /// /// Note that only 16-bit prefixes are supported at this time. async fn sync_nullifiers( @@ -761,44 +660,42 @@ pub mod rpc_server { tonic::Response, tonic::Status, >; - /// Returns info which can be used by the requester to sync up to the tip of chain for the notes they are interested in. + /// Returns info which can be used by the requester to sync up to the tip of chain for the + /// notes they are interested in. /// - /// requester specifies the `note_tags` they are interested in, and the block height from which to search for new for - /// matching notes for. The request will then return the next block containing any note matching the provided tags. + /// requester specifies the `note_tags` they are interested in, and the block height from + /// which to search for new for matching notes for. The request will then return the + /// next block containing any note matching the provided tags. /// /// The response includes each note's metadata and inclusion proof. /// - /// A basic note sync can be implemented by repeatedly requesting the previous response's block until reaching the - /// tip of the chain. + /// A basic note sync can be implemented by repeatedly requesting the previous response's + /// block until reaching the tip of the chain. async fn sync_notes( &self, request: tonic::Request, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; - /// Returns info which can be used by the requester to sync up to the latest state of the chain - /// for the objects (accounts, notes, nullifiers) the requester is interested in. + ) -> std::result::Result, tonic::Status>; + /// Returns info which can be used by the requester to sync up to the latest state of the + /// chain for the objects (accounts, notes, nullifiers) the requester is interested + /// in. /// - /// This request returns the next block containing requested data. It also returns `chain_tip` - /// which is the latest block number in the chain. requester is expected to repeat these requests - /// in a loop until `response.block_header.block_num == response.chain_tip`, at which point - /// the requester is fully synchronized with the chain. + /// This request returns the next block containing requested data. It also returns + /// `chain_tip` which is the latest block number in the chain. requester is expected + /// to repeat these requests in a loop until `response.block_header.block_num == + /// response.chain_tip`, at which point the requester is fully synchronized with the + /// chain. /// /// Each request also returns info about new notes, nullifiers etc. created. It also returns - /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both chain - /// MMR peaks and chain MMR nodes. + /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both + /// chain MMR peaks and chain MMR nodes. /// - /// For preserving some degree of privacy, note tags and nullifiers filters contain only high - /// part of hashes. Thus, returned data contains excessive notes and nullifiers, requester can make - /// additional filtering of that data on its side. + /// For preserving some degree of privacy, note tags and nullifiers filters contain only + /// high part of hashes. Thus, returned data contains excessive notes and + /// nullifiers, requester can make additional filtering of that data on its side. async fn sync_state( &self, request: tonic::Request, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; + ) -> std::result::Result, tonic::Status>; /// Returns account vault updates for specified account within a block range. async fn sync_account_vault( &self, @@ -807,7 +704,8 @@ pub mod rpc_server { tonic::Response, tonic::Status, >; - /// Returns storage map updates for specified account and storage slots within a block range. + /// Returns storage map updates for specified account and storage slots within a block + /// range. async fn sync_storage_maps( &self, request: tonic::Request, @@ -846,10 +744,7 @@ pub mod rpc_server { max_encoding_message_size: None, } } - pub fn with_interceptor( - inner: T, - interceptor: F, - ) -> InterceptedService + pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService where F: tonic::service::Interceptor, { @@ -906,15 +801,10 @@ pub mod rpc_server { struct StatusSvc(pub Arc); impl tonic::server::UnaryService<()> for StatusSvc { type Response = super::super::rpc::StoreStatus; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call(&mut self, request: tonic::Request<()>) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { - ::status(&inner, request).await - }; + let fut = async move { ::status(&inner, request).await }; Box::pin(fut) } } @@ -939,27 +829,22 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/CheckNullifiers" => { #[allow(non_camel_case_types)] struct CheckNullifiersSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService - for CheckNullifiersSvc { + impl tonic::server::UnaryService + for CheckNullifiersSvc + { type Response = super::super::rpc::CheckNullifiersResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { - ::check_nullifiers(&inner, request).await - }; + let fut = + async move { ::check_nullifiers(&inner, request).await }; Box::pin(fut) } } @@ -984,19 +869,15 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/GetAccountDetails" => { #[allow(non_camel_case_types)] struct GetAccountDetailsSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService - for GetAccountDetailsSvc { + impl tonic::server::UnaryService + for GetAccountDetailsSvc + { type Response = super::super::account::AccountDetails; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, @@ -1029,29 +910,22 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/GetAccountProof" => { #[allow(non_camel_case_types)] struct GetAccountProofSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService - for GetAccountProofSvc { + impl tonic::server::UnaryService + for GetAccountProofSvc + { type Response = super::super::rpc::AccountProofResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::rpc::AccountProofRequest, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { - ::get_account_proof(&inner, request).await - }; + let fut = + async move { ::get_account_proof(&inner, request).await }; Box::pin(fut) } } @@ -1076,24 +950,18 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/GetBlockByNumber" => { #[allow(non_camel_case_types)] struct GetBlockByNumberSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService - for GetBlockByNumberSvc { + impl tonic::server::UnaryService + for GetBlockByNumberSvc + { type Response = super::super::blockchain::MaybeBlock; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::blockchain::BlockNumber, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { @@ -1123,30 +991,23 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/GetBlockHeaderByNumber" => { #[allow(non_camel_case_types)] struct GetBlockHeaderByNumberSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService< - super::super::rpc::BlockHeaderByNumberRequest, - > for GetBlockHeaderByNumberSvc { + impl + tonic::server::UnaryService + for GetBlockHeaderByNumberSvc + { type Response = super::super::rpc::BlockHeaderByNumberResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::rpc::BlockHeaderByNumberRequest, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_block_header_by_number(&inner, request) - .await + ::get_block_header_by_number(&inner, request).await }; Box::pin(fut) } @@ -1172,27 +1033,20 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/GetNotesById" => { #[allow(non_camel_case_types)] struct GetNotesByIdSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService - for GetNotesByIdSvc { + impl tonic::server::UnaryService for GetNotesByIdSvc { type Response = super::super::note::CommittedNoteList; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { - ::get_notes_by_id(&inner, request).await - }; + let fut = + async move { ::get_notes_by_id(&inner, request).await }; Box::pin(fut) } } @@ -1217,19 +1071,15 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/GetNoteScriptByRoot" => { #[allow(non_camel_case_types)] struct GetNoteScriptByRootSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService - for GetNoteScriptByRootSvc { + impl tonic::server::UnaryService + for GetNoteScriptByRootSvc + { type Response = super::super::rpc::MaybeNoteScript; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, @@ -1262,30 +1112,23 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/SyncNullifiers" => { #[allow(non_camel_case_types)] struct SyncNullifiersSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService< - super::super::rpc::SyncNullifiersRequest, - > for SyncNullifiersSvc { + impl + tonic::server::UnaryService + for SyncNullifiersSvc + { type Response = super::super::rpc::SyncNullifiersResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::rpc::SyncNullifiersRequest, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { - ::sync_nullifiers(&inner, request).await - }; + let fut = + async move { ::sync_nullifiers(&inner, request).await }; Box::pin(fut) } } @@ -1310,27 +1153,19 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/SyncNotes" => { #[allow(non_camel_case_types)] struct SyncNotesSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService - for SyncNotesSvc { + impl tonic::server::UnaryService for SyncNotesSvc { type Response = super::super::rpc::SyncNotesResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { - ::sync_notes(&inner, request).await - }; + let fut = async move { ::sync_notes(&inner, request).await }; Box::pin(fut) } } @@ -1355,27 +1190,19 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/SyncState" => { #[allow(non_camel_case_types)] struct SyncStateSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService - for SyncStateSvc { + impl tonic::server::UnaryService for SyncStateSvc { type Response = super::super::rpc::SyncStateResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { - ::sync_state(&inner, request).await - }; + let fut = async move { ::sync_state(&inner, request).await }; Box::pin(fut) } } @@ -1400,25 +1227,19 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/SyncAccountVault" => { #[allow(non_camel_case_types)] struct SyncAccountVaultSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService< - super::super::rpc::SyncAccountVaultRequest, - > for SyncAccountVaultSvc { + impl + tonic::server::UnaryService + for SyncAccountVaultSvc + { type Response = super::super::rpc::SyncAccountVaultResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::rpc::SyncAccountVaultRequest, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { @@ -1448,30 +1269,23 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/SyncStorageMaps" => { #[allow(non_camel_case_types)] struct SyncStorageMapsSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService< - super::super::rpc::SyncStorageMapsRequest, - > for SyncStorageMapsSvc { + impl + tonic::server::UnaryService + for SyncStorageMapsSvc + { type Response = super::super::rpc::SyncStorageMapsResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::rpc::SyncStorageMapsRequest, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { - ::sync_storage_maps(&inner, request).await - }; + let fut = + async move { ::sync_storage_maps(&inner, request).await }; Box::pin(fut) } } @@ -1496,30 +1310,23 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } + }, "/store.Rpc/SyncTransactions" => { #[allow(non_camel_case_types)] struct SyncTransactionsSvc(pub Arc); - impl< - T: Rpc, - > tonic::server::UnaryService< - super::super::rpc::SyncTransactionsRequest, - > for SyncTransactionsSvc { + impl + tonic::server::UnaryService + for SyncTransactionsSvc + { type Response = super::super::rpc::SyncTransactionsResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::rpc::SyncTransactionsRequest, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { - ::sync_transactions(&inner, request).await - }; + let fut = + async move { ::sync_transactions(&inner, request).await }; Box::pin(fut) } } @@ -1544,26 +1351,17 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - } - _ => { - Box::pin(async move { - let mut response = http::Response::new( - tonic::body::Body::default(), - ); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) - }) - } + }, + _ => Box::pin(async move { + let mut response = http::Response::new(tonic::body::Body::default()); + let headers = response.headers_mut(); + headers.insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers.insert(http::header::CONTENT_TYPE, tonic::metadata::GRPC_CONTENT_TYPE); + Ok(response) + }), } } } @@ -1592,10 +1390,10 @@ pub mod block_producer_client { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value, + clippy::let_unit_value )] - use tonic::codegen::*; use tonic::codegen::http::Uri; + use tonic::codegen::*; /// Store API for the BlockProducer component #[derive(Debug, Clone)] pub struct BlockProducerClient { @@ -1635,14 +1433,13 @@ pub mod block_producer_client { F: tonic::service::Interceptor, T::ResponseBody: Default, T: tonic::codegen::Service< - http::Request, - Response = http::Response< - >::ResponseBody, + http::Request, + Response = http::Response< + >::ResponseBody, + >, >, - >, - , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: + Into + std::marker::Send + std::marker::Sync, { BlockProducerClient::new(InterceptedService::new(inner, interceptor)) } @@ -1680,20 +1477,13 @@ pub mod block_producer_client { /// Applies changes of a new block to the DB and in-memory data structures. pub async fn apply_block( &mut self, - request: impl tonic::IntoRequest, + request: impl tonic::IntoRequest, ) -> std::result::Result, tonic::Status> { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.BlockProducer/ApplyBlock", - ); + let path = http::uri::PathAndQuery::from_static("/store.BlockProducer/ApplyBlock"); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.BlockProducer", "ApplyBlock")); @@ -1703,30 +1493,20 @@ pub mod block_producer_client { /// and current chain length to authenticate the block's inclusion. pub async fn get_block_header_by_number( &mut self, - request: impl tonic::IntoRequest< - super::super::rpc::BlockHeaderByNumberRequest, - >, + request: impl tonic::IntoRequest, ) -> std::result::Result< tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.BlockProducer/GetBlockHeaderByNumber", - ); + let path = + http::uri::PathAndQuery::from_static("/store.BlockProducer/GetBlockHeaderByNumber"); let mut req = request.into_request(); req.extensions_mut() - .insert( - GrpcMethod::new("store.BlockProducer", "GetBlockHeaderByNumber"), - ); + .insert(GrpcMethod::new("store.BlockProducer", "GetBlockHeaderByNumber")); self.inner.unary(req, path, codec).await } /// Returns data required to prove the next block. @@ -1734,18 +1514,11 @@ pub mod block_producer_client { &mut self, request: impl tonic::IntoRequest, ) -> std::result::Result, tonic::Status> { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.BlockProducer/GetBlockInputs", - ); + let path = http::uri::PathAndQuery::from_static("/store.BlockProducer/GetBlockInputs"); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.BlockProducer", "GetBlockInputs")); @@ -1756,18 +1529,11 @@ pub mod block_producer_client { &mut self, request: impl tonic::IntoRequest, ) -> std::result::Result, tonic::Status> { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.BlockProducer/GetBatchInputs", - ); + let path = http::uri::PathAndQuery::from_static("/store.BlockProducer/GetBatchInputs"); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.BlockProducer", "GetBatchInputs")); @@ -1777,22 +1543,13 @@ pub mod block_producer_client { pub async fn get_transaction_inputs( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.BlockProducer/GetTransactionInputs", - ); + let path = + http::uri::PathAndQuery::from_static("/store.BlockProducer/GetTransactionInputs"); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.BlockProducer", "GetTransactionInputs")); @@ -1807,16 +1564,17 @@ pub mod block_producer_server { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value, + clippy::let_unit_value )] use tonic::codegen::*; - /// Generated trait containing gRPC methods that should be implemented for use with BlockProducerServer. + /// Generated trait containing gRPC methods that should be implemented for use with + /// BlockProducerServer. #[async_trait] pub trait BlockProducer: std::marker::Send + std::marker::Sync + 'static { /// Applies changes of a new block to the DB and in-memory data structures. async fn apply_block( &self, - request: tonic::Request, + request: tonic::Request, ) -> std::result::Result, tonic::Status>; /// Retrieves block header by given block number. Optionally, it also returns the MMR path /// and current chain length to authenticate the block's inclusion. @@ -1841,10 +1599,7 @@ pub mod block_producer_server { async fn get_transaction_inputs( &self, request: tonic::Request, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; + ) -> std::result::Result, tonic::Status>; } /// Store API for the BlockProducer component #[derive(Debug)] @@ -1868,10 +1623,7 @@ pub mod block_producer_server { max_encoding_message_size: None, } } - pub fn with_interceptor( - inner: T, - interceptor: F, - ) -> InterceptedService + pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService where F: tonic::service::Interceptor, { @@ -1926,18 +1678,12 @@ pub mod block_producer_server { "/store.BlockProducer/ApplyBlock" => { #[allow(non_camel_case_types)] struct ApplyBlockSvc(pub Arc); - impl< - T: BlockProducer, - > tonic::server::UnaryService - for ApplyBlockSvc { + impl tonic::server::UnaryService for ApplyBlockSvc { type Response = (); - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { @@ -1967,32 +1713,23 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - } + }, "/store.BlockProducer/GetBlockHeaderByNumber" => { #[allow(non_camel_case_types)] struct GetBlockHeaderByNumberSvc(pub Arc); - impl< - T: BlockProducer, - > tonic::server::UnaryService< - super::super::rpc::BlockHeaderByNumberRequest, - > for GetBlockHeaderByNumberSvc { + impl + tonic::server::UnaryService + for GetBlockHeaderByNumberSvc + { type Response = super::super::rpc::BlockHeaderByNumberResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::rpc::BlockHeaderByNumberRequest, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_block_header_by_number( - &inner, - request, - ) + ::get_block_header_by_number(&inner, request) .await }; Box::pin(fut) @@ -2019,27 +1756,22 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - } + }, "/store.BlockProducer/GetBlockInputs" => { #[allow(non_camel_case_types)] struct GetBlockInputsSvc(pub Arc); - impl< - T: BlockProducer, - > tonic::server::UnaryService - for GetBlockInputsSvc { + impl tonic::server::UnaryService + for GetBlockInputsSvc + { type Response = super::BlockInputs; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_block_inputs(&inner, request) - .await + ::get_block_inputs(&inner, request).await }; Box::pin(fut) } @@ -2065,27 +1797,22 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - } + }, "/store.BlockProducer/GetBatchInputs" => { #[allow(non_camel_case_types)] struct GetBatchInputsSvc(pub Arc); - impl< - T: BlockProducer, - > tonic::server::UnaryService - for GetBatchInputsSvc { + impl tonic::server::UnaryService + for GetBatchInputsSvc + { type Response = super::BatchInputs; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_batch_inputs(&inner, request) - .await + ::get_batch_inputs(&inner, request).await }; Box::pin(fut) } @@ -2111,30 +1838,23 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - } + }, "/store.BlockProducer/GetTransactionInputs" => { #[allow(non_camel_case_types)] struct GetTransactionInputsSvc(pub Arc); - impl< - T: BlockProducer, - > tonic::server::UnaryService - for GetTransactionInputsSvc { + impl + tonic::server::UnaryService + for GetTransactionInputsSvc + { type Response = super::TransactionInputs; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_transaction_inputs( - &inner, - request, - ) - .await + ::get_transaction_inputs(&inner, request).await }; Box::pin(fut) } @@ -2160,26 +1880,17 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - } - _ => { - Box::pin(async move { - let mut response = http::Response::new( - tonic::body::Body::default(), - ); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) - }) - } + }, + _ => Box::pin(async move { + let mut response = http::Response::new(tonic::body::Body::default()); + let headers = response.headers_mut(); + headers.insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers.insert(http::header::CONTENT_TYPE, tonic::metadata::GRPC_CONTENT_TYPE); + Ok(response) + }), } } } @@ -2208,10 +1919,10 @@ pub mod ntx_builder_client { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value, + clippy::let_unit_value )] - use tonic::codegen::*; use tonic::codegen::http::Uri; + use tonic::codegen::*; /// Store API for the network transaction builder component #[derive(Debug, Clone)] pub struct NtxBuilderClient { @@ -2251,14 +1962,13 @@ pub mod ntx_builder_client { F: tonic::service::Interceptor, T::ResponseBody: Default, T: tonic::codegen::Service< - http::Request, - Response = http::Response< - >::ResponseBody, + http::Request, + Response = http::Response< + >::ResponseBody, + >, >, - >, - , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: + Into + std::marker::Send + std::marker::Sync, { NtxBuilderClient::new(InterceptedService::new(inner, interceptor)) } @@ -2297,25 +2007,17 @@ pub mod ntx_builder_client { /// and current chain length to authenticate the block's inclusion. pub async fn get_block_header_by_number( &mut self, - request: impl tonic::IntoRequest< - super::super::rpc::BlockHeaderByNumberRequest, - >, + request: impl tonic::IntoRequest, ) -> std::result::Result< tonic::Response, tonic::Status, > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.NtxBuilder/GetBlockHeaderByNumber", - ); + let path = + http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetBlockHeaderByNumber"); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.NtxBuilder", "GetBlockHeaderByNumber")); @@ -2325,51 +2027,33 @@ pub mod ntx_builder_client { pub async fn get_unconsumed_network_notes( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.NtxBuilder/GetUnconsumedNetworkNotes", - ); + let path = + http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetUnconsumedNetworkNotes"); let mut req = request.into_request(); req.extensions_mut() - .insert( - GrpcMethod::new("store.NtxBuilder", "GetUnconsumedNetworkNotes"), - ); + .insert(GrpcMethod::new("store.NtxBuilder", "GetUnconsumedNetworkNotes")); self.inner.unary(req, path, codec).await } - /// Returns the block header at the chain tip, as well as the MMR peaks corresponding to this - /// header for executing network transactions. If the block number is not provided, the latest - /// header and peaks will be retrieved. + /// Returns the block header at the chain tip, as well as the MMR peaks corresponding to + /// this header for executing network transactions. If the block number is not + /// provided, the latest header and peaks will be retrieved. pub async fn get_current_blockchain_data( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.NtxBuilder/GetCurrentBlockchainData", - ); + let path = + http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetCurrentBlockchainData"); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.NtxBuilder", "GetCurrentBlockchainData")); @@ -2379,52 +2063,32 @@ pub mod ntx_builder_client { pub async fn get_network_account_details_by_prefix( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static( "/store.NtxBuilder/GetNetworkAccountDetailsByPrefix", ); let mut req = request.into_request(); req.extensions_mut() - .insert( - GrpcMethod::new( - "store.NtxBuilder", - "GetNetworkAccountDetailsByPrefix", - ), - ); + .insert(GrpcMethod::new("store.NtxBuilder", "GetNetworkAccountDetailsByPrefix")); self.inner.unary(req, path, codec).await } /// Returns a list of all network account ids. pub async fn get_network_account_ids( &mut self, request: impl tonic::IntoRequest<()>, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.NtxBuilder/GetNetworkAccountIds", - ); + let path = + http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetNetworkAccountIds"); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.NtxBuilder", "GetNetworkAccountIds")); @@ -2434,22 +2098,14 @@ pub mod ntx_builder_client { pub async fn get_note_script_by_root( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - > { - self.inner - .ready() - .await - .map_err(|e| { - tonic::Status::unknown( - format!("Service was not ready: {}", e.into()), - ) - })?; + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static( - "/store.NtxBuilder/GetNoteScriptByRoot", - ); + let path = + http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetNoteScriptByRoot"); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.NtxBuilder", "GetNoteScriptByRoot")); @@ -2464,10 +2120,11 @@ pub mod ntx_builder_server { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value, + clippy::let_unit_value )] use tonic::codegen::*; - /// Generated trait containing gRPC methods that should be implemented for use with NtxBuilderServer. + /// Generated trait containing gRPC methods that should be implemented for use with + /// NtxBuilderServer. #[async_trait] pub trait NtxBuilder: std::marker::Send + std::marker::Sync + 'static { /// Retrieves block header by given block number. Optionally, it also returns the MMR path @@ -2483,44 +2140,29 @@ pub mod ntx_builder_server { async fn get_unconsumed_network_notes( &self, request: tonic::Request, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; - /// Returns the block header at the chain tip, as well as the MMR peaks corresponding to this - /// header for executing network transactions. If the block number is not provided, the latest - /// header and peaks will be retrieved. + ) -> std::result::Result, tonic::Status>; + /// Returns the block header at the chain tip, as well as the MMR peaks corresponding to + /// this header for executing network transactions. If the block number is not + /// provided, the latest header and peaks will be retrieved. async fn get_current_blockchain_data( &self, request: tonic::Request, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; + ) -> std::result::Result, tonic::Status>; /// Returns the latest state of a network account with the specified account prefix. async fn get_network_account_details_by_prefix( &self, request: tonic::Request, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; + ) -> std::result::Result, tonic::Status>; /// Returns a list of all network account ids. async fn get_network_account_ids( &self, request: tonic::Request<()>, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; + ) -> std::result::Result, tonic::Status>; /// Returns the script for a note by its root. async fn get_note_script_by_root( &self, request: tonic::Request, - ) -> std::result::Result< - tonic::Response, - tonic::Status, - >; + ) -> std::result::Result, tonic::Status>; } /// Store API for the network transaction builder component #[derive(Debug)] @@ -2544,10 +2186,7 @@ pub mod ntx_builder_server { max_encoding_message_size: None, } } - pub fn with_interceptor( - inner: T, - interceptor: F, - ) -> InterceptedService + pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService where F: tonic::service::Interceptor, { @@ -2602,29 +2241,19 @@ pub mod ntx_builder_server { "/store.NtxBuilder/GetBlockHeaderByNumber" => { #[allow(non_camel_case_types)] struct GetBlockHeaderByNumberSvc(pub Arc); - impl< - T: NtxBuilder, - > tonic::server::UnaryService< - super::super::rpc::BlockHeaderByNumberRequest, - > for GetBlockHeaderByNumberSvc { + impl + tonic::server::UnaryService + for GetBlockHeaderByNumberSvc + { type Response = super::super::rpc::BlockHeaderByNumberResponse; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::rpc::BlockHeaderByNumberRequest, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_block_header_by_number( - &inner, - request, - ) - .await + ::get_block_header_by_number(&inner, request).await }; Box::pin(fut) } @@ -2650,29 +2279,23 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - } + }, "/store.NtxBuilder/GetUnconsumedNetworkNotes" => { #[allow(non_camel_case_types)] struct GetUnconsumedNetworkNotesSvc(pub Arc); - impl< - T: NtxBuilder, - > tonic::server::UnaryService - for GetUnconsumedNetworkNotesSvc { + impl + tonic::server::UnaryService + for GetUnconsumedNetworkNotesSvc + { type Response = super::UnconsumedNetworkNotes; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_unconsumed_network_notes( - &inner, - request, - ) + ::get_unconsumed_network_notes(&inner, request) .await }; Box::pin(fut) @@ -2699,32 +2322,23 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - } + }, "/store.NtxBuilder/GetCurrentBlockchainData" => { #[allow(non_camel_case_types)] struct GetCurrentBlockchainDataSvc(pub Arc); - impl< - T: NtxBuilder, - > tonic::server::UnaryService< - super::super::blockchain::MaybeBlockNumber, - > for GetCurrentBlockchainDataSvc { + impl + tonic::server::UnaryService + for GetCurrentBlockchainDataSvc + { type Response = super::CurrentBlockchainData; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, - request: tonic::Request< - super::super::blockchain::MaybeBlockNumber, - >, + request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_current_blockchain_data( - &inner, - request, - ) + ::get_current_blockchain_data(&inner, request) .await }; Box::pin(fut) @@ -2751,21 +2365,15 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - } + }, "/store.NtxBuilder/GetNetworkAccountDetailsByPrefix" => { #[allow(non_camel_case_types)] - struct GetNetworkAccountDetailsByPrefixSvc( - pub Arc, - ); - impl< - T: NtxBuilder, - > tonic::server::UnaryService - for GetNetworkAccountDetailsByPrefixSvc { + struct GetNetworkAccountDetailsByPrefixSvc(pub Arc); + impl tonic::server::UnaryService + for GetNetworkAccountDetailsByPrefixSvc + { type Response = super::MaybeAccountDetails; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, @@ -2773,10 +2381,9 @@ pub mod ntx_builder_server { let inner = Arc::clone(&self.0); let fut = async move { ::get_network_account_details_by_prefix( - &inner, - request, - ) - .await + &inner, request, + ) + .await }; Box::pin(fut) } @@ -2802,22 +2409,17 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - } + }, "/store.NtxBuilder/GetNetworkAccountIds" => { #[allow(non_camel_case_types)] struct GetNetworkAccountIdsSvc(pub Arc); - impl tonic::server::UnaryService<()> - for GetNetworkAccountIdsSvc { + impl tonic::server::UnaryService<()> for GetNetworkAccountIdsSvc { type Response = super::NetworkAccountIdList; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call(&mut self, request: tonic::Request<()>) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_network_account_ids(&inner, request) - .await + ::get_network_account_ids(&inner, request).await }; Box::pin(fut) } @@ -2843,27 +2445,22 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - } + }, "/store.NtxBuilder/GetNoteScriptByRoot" => { #[allow(non_camel_case_types)] struct GetNoteScriptByRootSvc(pub Arc); - impl< - T: NtxBuilder, - > tonic::server::UnaryService - for GetNoteScriptByRootSvc { + impl tonic::server::UnaryService + for GetNoteScriptByRootSvc + { type Response = super::super::rpc::MaybeNoteScript; - type Future = BoxFuture< - tonic::Response, - tonic::Status, - >; + type Future = BoxFuture, tonic::Status>; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_note_script_by_root(&inner, request) - .await + ::get_note_script_by_root(&inner, request).await }; Box::pin(fut) } @@ -2889,26 +2486,17 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - } - _ => { - Box::pin(async move { - let mut response = http::Response::new( - tonic::body::Body::default(), - ); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) - }) - } + }, + _ => Box::pin(async move { + let mut response = http::Response::new(tonic::body::Body::default()); + let headers = response.headers_mut(); + headers.insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers.insert(http::header::CONTENT_TYPE, tonic::metadata::GRPC_CONTENT_TYPE); + Ok(response) + }), } } } diff --git a/crates/store/src/db/models/queries/block_headers.rs b/crates/store/src/db/models/queries/block_headers.rs index 3388b8328..7850dfaa9 100644 --- a/crates/store/src/db/models/queries/block_headers.rs +++ b/crates/store/src/db/models/queries/block_headers.rs @@ -131,7 +131,7 @@ pub struct BlockHeaderRawRow { #[allow(dead_code)] pub block_num: i64, pub block_header: Vec, - pub signature: Vec, + pub signature: Vec, // TODO(currentpr): use? } impl TryInto for BlockHeaderRawRow { type Error = DatabaseError; diff --git a/crates/store/src/server/block_producer.rs b/crates/store/src/server/block_producer.rs index 91b595aad..f3e85ac52 100644 --- a/crates/store/src/server/block_producer.rs +++ b/crates/store/src/server/block_producer.rs @@ -5,7 +5,8 @@ use miden_node_proto::generated::{self as proto}; use miden_node_proto::try_convert; use miden_node_utils::ErrorReport; use miden_objects::Word; -use miden_objects::block::{BlockNumber, ProvenBlock}; +use miden_objects::block::{BlockBody, BlockHeader, BlockNumber}; +use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::utils::Deserializable; use tonic::{Request, Response, Status}; use tracing::{debug, info, instrument}; @@ -56,28 +57,34 @@ impl block_producer_server::BlockProducer for StoreApi { )] async fn apply_block( &self, - request: Request, + request: Request, ) -> Result, Status> { let request = request.into_inner(); debug!(target: COMPONENT, ?request); - let block = ProvenBlock::read_from_bytes(&request.block).map_err(|err| { - Status::invalid_argument(err.as_report_context("block deserialization error")) + let header = BlockHeader::read_from_bytes(&request.header).map_err(|err| { + Status::invalid_argument(err.as_report_context("header deserialization error")) + })?; + let body = BlockBody::read_from_bytes(&request.body).map_err(|err| { + Status::invalid_argument(err.as_report_context("body deserialization error")) + })?; + let signature = Signature::read_from_bytes(&request.signature).map_err(|err| { + Status::invalid_argument(err.as_report_context("signature deserialization error")) })?; - let block_num = block.header().block_num().as_u32(); + let block_num = header.block_num().as_u32(); info!( target: COMPONENT, block_num, - block_commitment = %block.header().commitment(), - account_count = block.body().updated_accounts().len(), - note_count = block.body().output_notes().count(), - nullifier_count = block.body().created_nullifiers().len(), + block_commitment = %header.commitment(), + account_count = body.updated_accounts().len(), + note_count = body.output_notes().count(), + nullifier_count = body.created_nullifiers().len(), ); - self.state.apply_block(block).await?; + self.state.apply_block(header, body, signature).await?; Ok(Response::new(())) } diff --git a/crates/store/src/state.rs b/crates/store/src/state.rs index bd62a1c71..dbf658c04 100644 --- a/crates/store/src/state.rs +++ b/crates/store/src/state.rs @@ -26,7 +26,7 @@ use miden_node_utils::formatting::format_array; use miden_objects::account::{AccountHeader, AccountId, StorageSlot, StorageSlotContent}; use miden_objects::block::account_tree::{AccountTree, AccountWitness, account_id_to_smt_key}; use miden_objects::block::nullifier_tree::{NullifierTree, NullifierWitness}; -use miden_objects::block::{BlockHeader, BlockInputs, BlockNumber, Blockchain, ProvenBlock}; +use miden_objects::block::{BlockBody, BlockHeader, BlockInputs, BlockNumber, Blockchain}; use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::crypto::merkle::{ Forest, @@ -197,11 +197,7 @@ impl State { ) -> Result<(), ApplyBlockError> { let _lock = self.writer.try_lock().map_err(|_| ApplyBlockError::ConcurrentWrite)?; - let header = block.header().clone(); - let block_body = block.body().clone(); - let signature = block.signature().clone(); - - let tx_commitment = block_body.transactions().commitment(); + let tx_commitment = body.transactions().commitment(); if header.tx_commitment() != tx_commitment { return Err(InvalidBlockError::InvalidBlockTxCommitment { @@ -212,7 +208,7 @@ impl State { } let block_num = header.block_num(); - let block_commitment = block.header().commitment(); + let block_commitment = header.commitment(); // ensures the right block header is being processed let prev_block = self @@ -233,7 +229,7 @@ impl State { return Err(InvalidBlockError::NewBlockInvalidPrevCommitment.into()); } - let block_data = block.to_bytes(); + let block_data = body.to_bytes(); // TODO(currentpr): is this correct? // Save the block to the block store. In a case of a rolled-back DB transaction, the // in-memory state will be unchanged, but the block might still be written into the @@ -257,8 +253,7 @@ impl State { let _span = info_span!(target: COMPONENT, "update_in_memory_structs").entered(); // nullifiers can be produced only once - let duplicate_nullifiers: Vec<_> = block - .body() + let duplicate_nullifiers: Vec<_> = body .created_nullifiers() .iter() .filter(|&n| inner.nullifier_tree.get_block_num(n).is_some()) @@ -280,11 +275,7 @@ impl State { let nullifier_tree_update = inner .nullifier_tree .compute_mutations( - block - .body() - .created_nullifiers() - .iter() - .map(|nullifier| (*nullifier, block_num)), + body.created_nullifiers().iter().map(|nullifier| (*nullifier, block_num)), ) .map_err(InvalidBlockError::NewBlockNullifierAlreadySpent)?; @@ -296,9 +287,7 @@ impl State { let account_tree_update = inner .account_tree .compute_mutations( - block - .body() - .updated_accounts() + body.updated_accounts() .iter() .map(|update| (update.account_id(), update.final_state_commitment())), ) @@ -324,13 +313,12 @@ impl State { }; // build note tree - let note_tree = block.body().compute_block_note_tree(); + let note_tree = body.compute_block_note_tree(); if note_tree.root() != header.note_root() { return Err(InvalidBlockError::NewBlockInvalidNoteRoot.into()); } - let notes = block - .body() + let notes = body .output_notes() .map(|(note_index, note)| { let (details, nullifier) = match note { @@ -372,7 +360,7 @@ impl State { // spawned. let db = Arc::clone(&self.db); let db_update_task = tokio::spawn(async move { - db.apply_block(allow_acquire, acquire_done, header, block_body, signature, notes) + db.apply_block(allow_acquire, acquire_done, header, body, signature, notes) .await }); diff --git a/proto/proto/internal/store.proto b/proto/proto/internal/store.proto index 05f515ccf..ced52f1b7 100644 --- a/proto/proto/internal/store.proto +++ b/proto/proto/internal/store.proto @@ -89,7 +89,7 @@ service Rpc { // Store API for the BlockProducer component service BlockProducer { // Applies changes of a new block to the DB and in-memory data structures. - rpc ApplyBlock(blockchain.Block) returns (google.protobuf.Empty) {} + rpc ApplyBlock(ApplyBlockRequest) returns (google.protobuf.Empty) {} // Retrieves block header by given block number. Optionally, it also returns the MMR path // and current chain length to authenticate the block's inclusion. @@ -105,6 +105,22 @@ service BlockProducer { rpc GetTransactionInputs(TransactionInputsRequest) returns (TransactionInputs) {} } +// APPLY BLOCK REQUEST +// ================================================================================================ + +// Applies a block to the state. +message ApplyBlockRequest { + // Block header encoded using [winter_utils::Serializable] implementation for + // [miden_objects::block::BlockHeader]. + bytes header = 1; + // Block header encoded using [winter_utils::Serializable] implementation for + // [miden_objects::block::BlockBody]. + bytes body = 2; + // Signature encoded using [winter_utils::Serializable] implementation for + // [crypto::dsa::ecdsa_k256_keccak::Signature]. + bytes signature = 3; +} + // GET BLOCK INPUTS // ================================================================================================ diff --git a/proto/proto/types/blockchain.proto b/proto/proto/types/blockchain.proto index 619ccf1cf..866a8c7d5 100644 --- a/proto/proto/types/blockchain.proto +++ b/proto/proto/types/blockchain.proto @@ -4,15 +4,15 @@ package blockchain; import "types/account.proto"; import "types/primitives.proto"; -// BLOCK -// ================================================================================================ - -// Represents a block. -message Block { - // Block data encoded using [winter_utils::Serializable] implementation for - // [miden_objects::block::Block]. - bytes block = 1; -} +//// BLOCK +//// ================================================================================================ +// +//// Represents a block. +//message Block { +// // Block data encoded using [winter_utils::Serializable] implementation for +// // [miden_objects::block::Block]. +// bytes block = 1; +//} // Represents a proposed block. message ProposedBlock { From 3aa241bf048516796d8ad424ab0c0cafc64a48f6 Mon Sep 17 00:00:00 2001 From: sergerad Date: Thu, 18 Dec 2025 15:22:48 +1300 Subject: [PATCH 03/13] Partial refactor commands --- Cargo.lock | 2 ++ bin/node/Cargo.toml | 1 + bin/node/src/commands/block_producer.rs | 3 --- bin/node/src/commands/bundled.rs | 13 ++++++++++- bin/node/src/commands/mod.rs | 5 ----- bin/node/src/commands/store.rs | 13 +++++++++++ crates/block-producer/src/server/mod.rs | 2 -- crates/store/Cargo.toml | 1 + crates/store/src/server/api.rs | 29 ++++++++++++++++++++++++- crates/store/src/server/mod.rs | 10 +++++++-- 10 files changed, 65 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 38063686b..c7bf7e004 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2707,6 +2707,7 @@ dependencies = [ "miden-node-utils", "miden-node-validator", "miden-objects", + "miden-remote-prover-client", "tokio", "url", ] @@ -2864,6 +2865,7 @@ dependencies = [ "miden-node-test-macro", "miden-node-utils", "miden-objects", + "miden-remote-prover-client", "pretty_assertions", "rand 0.9.2", "rand_chacha 0.9.0", diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index 7ebdf8d4a..de6ef9803 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -32,6 +32,7 @@ miden-node-validator = { workspace = true } miden-objects = { workspace = true } tokio = { features = ["macros", "net", "rt-multi-thread"], workspace = true } url = { workspace = true } +miden-remote-prover-client = { workspace = true } [dev-dependencies] figment = { features = ["env", "test", "toml"], version = "0.10" } diff --git a/bin/node/src/commands/block_producer.rs b/bin/node/src/commands/block_producer.rs index c099a7002..c1359473d 100644 --- a/bin/node/src/commands/block_producer.rs +++ b/bin/node/src/commands/block_producer.rs @@ -88,7 +88,6 @@ impl BlockProducerCommand { store_url, validator_url, batch_prover_url: block_producer.batch_prover_url, - block_prover_url: block_producer.block_prover_url, batch_interval: block_producer.batch_interval, block_interval: block_producer.block_interval, max_txs_per_batch: block_producer.max_txs_per_batch, @@ -128,7 +127,6 @@ mod tests { validator_url: dummy_url(), block_producer: BlockProducerConfig { batch_prover_url: None, - block_prover_url: None, block_interval: std::time::Duration::from_secs(1), batch_interval: std::time::Duration::from_secs(1), max_txs_per_batch: 8, @@ -152,7 +150,6 @@ mod tests { validator_url: dummy_url(), block_producer: BlockProducerConfig { batch_prover_url: None, - block_prover_url: None, block_interval: std::time::Duration::from_secs(1), batch_interval: std::time::Duration::from_secs(1), max_txs_per_batch: miden_objects::MAX_ACCOUNTS_PER_BATCH + 1, /* Use protocol diff --git a/bin/node/src/commands/bundled.rs b/bin/node/src/commands/bundled.rs index bf66f6c04..cbe7e0774 100644 --- a/bin/node/src/commands/bundled.rs +++ b/bin/node/src/commands/bundled.rs @@ -13,6 +13,7 @@ use miden_node_validator::Validator; use miden_objects::block::BlockSigner; use miden_objects::crypto::dsa::ecdsa_k256_keccak::SecretKey; use miden_objects::utils::Deserializable; +use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use tokio::net::TcpListener; use tokio::sync::Barrier; use tokio::task::JoinSet; @@ -22,6 +23,7 @@ use super::{ENV_DATA_DIRECTORY, ENV_RPC_URL}; use crate::commands::{ BlockProducerConfig, DEFAULT_TIMEOUT, + ENV_BLOCK_PROVER_URL, ENV_ENABLE_OTEL, ENV_GENESIS_CONFIG_FILE, ENV_VALIDATOR_INSECURE_SECRET_KEY, @@ -70,6 +72,10 @@ pub enum BundledCommand { #[arg(long = "rpc.url", env = ENV_RPC_URL, value_name = "URL")] rpc_url: Url, + /// The remote block prover's gRPC url. + #[arg(long = "block-prover.url", env = ENV_BLOCK_PROVER_URL, value_name = "URL")] + block_prover_url: Url, + /// Directory in which the Store component should store the database and raw block data. #[arg(long = "data-directory", env = ENV_DATA_DIRECTORY, value_name = "DIR")] data_directory: PathBuf, @@ -130,6 +136,7 @@ impl BundledCommand { }, BundledCommand::Start { rpc_url, + block_prover_url, data_directory, block_producer, ntx_builder, @@ -142,6 +149,7 @@ impl BundledCommand { let signer = SecretKey::read_from_bytes(hex::decode(secret_key_hex)?.as_ref())?; Self::start( rpc_url, + block_prover_url, data_directory, ntx_builder, block_producer, @@ -156,6 +164,7 @@ impl BundledCommand { #[allow(clippy::too_many_lines)] async fn start( rpc_url: Url, + block_prover_url: Url, data_directory: PathBuf, ntx_builder: NtxBuilderConfig, block_producer: BlockProducerConfig, @@ -172,6 +181,8 @@ impl BundledCommand { .await .context("Failed to bind to RPC gRPC endpoint")?; + let block_prover = Arc::new(RemoteBlockProver::new(block_prover_url)); + let block_producer_address = TcpListener::bind("127.0.0.1:0") .await .context("Failed to bind to block-producer gRPC endpoint")? @@ -214,6 +225,7 @@ impl BundledCommand { block_producer_listener: store_block_producer_listener, ntx_builder_listener: store_ntx_builder_listener, data_directory: data_directory_clone, + block_prover, grpc_timeout, } .serve() @@ -245,7 +257,6 @@ impl BundledCommand { store_url, validator_url, batch_prover_url: block_producer.batch_prover_url, - block_prover_url: block_producer.block_prover_url, batch_interval: block_producer.batch_interval, block_interval: block_producer.block_interval, max_batches_per_block: block_producer.max_batches_per_block, diff --git a/bin/node/src/commands/mod.rs b/bin/node/src/commands/mod.rs index ecfee995f..ea2356c8b 100644 --- a/bin/node/src/commands/mod.rs +++ b/bin/node/src/commands/mod.rs @@ -93,11 +93,6 @@ pub struct BlockProducerConfig { #[arg(long = "batch-prover.url", env = ENV_BATCH_PROVER_URL, value_name = "URL")] pub batch_prover_url: Option, - /// The remote block prover's gRPC url. If unset, will default to running a prover - /// in-process which is expensive. - #[arg(long = "block-prover.url", env = ENV_BLOCK_PROVER_URL, value_name = "URL")] - pub block_prover_url: Option, - /// The number of transactions per batch. #[arg( long = "max-txs-per-batch", diff --git a/bin/node/src/commands/store.rs b/bin/node/src/commands/store.rs index 4ba41e9eb..a56f23949 100644 --- a/bin/node/src/commands/store.rs +++ b/bin/node/src/commands/store.rs @@ -1,4 +1,5 @@ use std::path::{Path, PathBuf}; +use std::sync::Arc; use std::time::Duration; use anyhow::Context; @@ -7,6 +8,7 @@ use miden_node_store::genesis::config::{AccountFileWithName, GenesisConfig}; use miden_node_utils::grpc::UrlExt; use miden_objects::crypto::dsa::ecdsa_k256_keccak::SecretKey; use miden_objects::utils::Deserializable; +use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use url::Url; use super::{ @@ -17,6 +19,7 @@ use super::{ }; use crate::commands::{ DEFAULT_TIMEOUT, + ENV_BLOCK_PROVER_URL, ENV_ENABLE_OTEL, ENV_GENESIS_CONFIG_FILE, ENV_VALIDATOR_INSECURE_SECRET_KEY, @@ -72,6 +75,10 @@ pub enum StoreCommand { #[arg(long = "block-producer.url", env = ENV_STORE_BLOCK_PRODUCER_URL, value_name = "URL")] block_producer_url: Url, + /// The remote block prover's gRPC url. + #[arg(long = "block-prover.url", env = ENV_BLOCK_PROVER_URL, value_name = "URL")] + block_prover_url: Url, + /// Directory in which to store the database and raw block data. #[arg(long, env = ENV_DATA_DIRECTORY, value_name = "DIR")] data_directory: PathBuf, @@ -115,12 +122,14 @@ impl StoreCommand { rpc_url, ntx_builder_url, block_producer_url, + block_prover_url, data_directory, enable_otel: _, grpc_timeout, } => { Self::start( rpc_url, + block_prover_url, ntx_builder_url, block_producer_url, data_directory, @@ -143,6 +152,7 @@ impl StoreCommand { rpc_url: Url, ntx_builder_url: Url, block_producer_url: Url, + block_prover_url: Url, data_directory: PathBuf, grpc_timeout: Duration, ) -> anyhow::Result<()> { @@ -167,8 +177,11 @@ impl StoreCommand { .await .context("Failed to bind to store's block-producer gRPC URL")?; + let block_prover = Arc::new(RemoteBlockProver::new(block_prover_url)); + Store { rpc_listener, + block_prover, ntx_builder_listener, block_producer_listener, data_directory, diff --git a/crates/block-producer/src/server/mod.rs b/crates/block-producer/src/server/mod.rs index c5354bba1..2a109fc41 100644 --- a/crates/block-producer/src/server/mod.rs +++ b/crates/block-producer/src/server/mod.rs @@ -55,8 +55,6 @@ pub struct BlockProducer { pub validator_url: Url, /// The address of the batch prover component. pub batch_prover_url: Option, - /// The address of the block prover component. - pub block_prover_url: Option, /// The interval at which to produce batches. pub batch_interval: Duration, /// The interval at which to produce blocks. diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index 41d82fd96..f6d671da2 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -42,6 +42,7 @@ tonic = { default-features = true, workspace = true } tonic-reflection = { workspace = true } tower-http = { features = ["util"], workspace = true } tracing = { workspace = true } +miden-remote-prover-client = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index b266feb59..3becd48eb 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -6,8 +6,12 @@ use miden_node_proto::generated as proto; use miden_node_utils::ErrorReport; use miden_objects::Word; use miden_objects::account::AccountId; -use miden_objects::block::BlockNumber; +use miden_objects::batch::OrderedBatches; +use miden_objects::block::{BlockBody, BlockHeader, BlockInputs, BlockNumber, ProvenBlock}; +use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::note::Nullifier; +use miden_remote_prover_client::RemoteProverClientError; +use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use tonic::{Request, Response, Status}; use tracing::{info, instrument}; @@ -19,6 +23,7 @@ use crate::state::State; pub struct StoreApi { pub(super) state: Arc, + pub(super) block_prover: Arc, } impl StoreApi { @@ -42,6 +47,28 @@ impl StoreApi { mmr_path: mmr_proof.map(|p| Into::into(&p.merkle_path)), })) } + + #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] + async fn prove_block( + &self, + ordered_batches: OrderedBatches, + block_inputs: BlockInputs, + header: BlockHeader, + signature: Signature, + body: BlockBody, + ) -> Result { + // Prove block. + let block_proof = self + .block_prover + .prove(ordered_batches.clone(), header.clone(), block_inputs) + .await?; + //self.simulate_proving().await; + + // SAFETY: The header and body are assumed valid and consistent with the proof. + let proven_block = ProvenBlock::new_unchecked(header, body, signature, block_proof); + + Ok(proven_block) + } } // UTILITIES diff --git a/crates/store/src/server/mod.rs b/crates/store/src/server/mod.rs index 036727a88..3e1999f3a 100644 --- a/crates/store/src/server/mod.rs +++ b/crates/store/src/server/mod.rs @@ -13,6 +13,7 @@ use miden_node_proto_build::{ use miden_node_utils::panic::{CatchPanicLayer, catch_panic_layer_fn}; use miden_node_utils::tracing::grpc::grpc_trace_fn; use miden_objects::block::BlockSigner; +use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use tokio::net::TcpListener; use tokio::task::JoinSet; use tokio_stream::wrappers::TcpListenerStream; @@ -36,6 +37,7 @@ pub struct Store { pub rpc_listener: TcpListener, pub ntx_builder_listener: TcpListener, pub block_producer_listener: TcpListener, + pub block_prover: Arc, pub data_directory: PathBuf, /// Server-side timeout for an individual gRPC request. /// @@ -98,14 +100,18 @@ impl Store { let db_maintenance_service = DbMaintenance::new(Arc::clone(&state), DATABASE_MAINTENANCE_INTERVAL); - let rpc_service = - store::rpc_server::RpcServer::new(api::StoreApi { state: Arc::clone(&state) }); + let rpc_service = store::rpc_server::RpcServer::new(api::StoreApi { + state: Arc::clone(&state), + block_prover: self.block_prover.clone(), + }); let ntx_builder_service = store::ntx_builder_server::NtxBuilderServer::new(api::StoreApi { state: Arc::clone(&state), + block_prover: self.block_prover.clone(), }); let block_producer_service = store::block_producer_server::BlockProducerServer::new(api::StoreApi { state: Arc::clone(&state), + block_prover: self.block_prover.clone(), }); let reflection_service = tonic_reflection::server::Builder::configure() .register_file_descriptor_set(store_rpc_api_descriptor()) From 893e5c094f3caf7f025faaa1b44715c533de4465 Mon Sep 17 00:00:00 2001 From: sergerad Date: Sun, 21 Dec 2025 15:02:02 +1300 Subject: [PATCH 04/13] Wire in ordered batches and block inputs --- Cargo.lock | 2 + bin/node/src/commands/bundled.rs | 5 +- bin/node/src/commands/store.rs | 19 +- bin/stress-test/Cargo.toml | 1 + bin/stress-test/src/seeding/mod.rs | 27 +- .../block-producer/src/block_builder/mod.rs | 6 +- crates/block-producer/src/store/mod.rs | 5 + crates/proto/src/generated/store.rs | 1308 +++++++++++------ crates/rpc/src/tests.rs | 9 +- crates/store/Cargo.toml | 3 +- .../src/db/models/queries/block_headers.rs | 2 +- crates/store/src/lib.rs | 2 +- crates/store/src/server/api.rs | 73 +- crates/store/src/server/block_producer.rs | 28 +- crates/store/src/server/mod.rs | 4 +- proto/proto/internal/store.proto | 12 +- 16 files changed, 1033 insertions(+), 473 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c7bf7e004..0f313e30a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2859,6 +2859,7 @@ dependencies = [ "fs-err", "hex", "indexmap 2.12.1", + "miden-block-prover", "miden-lib", "miden-node-proto", "miden-node-proto-build", @@ -2897,6 +2898,7 @@ dependencies = [ "miden-node-store", "miden-node-utils", "miden-objects", + "miden-remote-prover-client", "rand 0.9.2", "rayon", "tokio", diff --git a/bin/node/src/commands/bundled.rs b/bin/node/src/commands/bundled.rs index cbe7e0774..55622903d 100644 --- a/bin/node/src/commands/bundled.rs +++ b/bin/node/src/commands/bundled.rs @@ -7,13 +7,12 @@ use anyhow::Context; use miden_node_block_producer::BlockProducer; use miden_node_ntx_builder::NetworkTransactionBuilder; use miden_node_rpc::Rpc; -use miden_node_store::Store; +use miden_node_store::{BlockProver, Store}; use miden_node_utils::grpc::UrlExt; use miden_node_validator::Validator; use miden_objects::block::BlockSigner; use miden_objects::crypto::dsa::ecdsa_k256_keccak::SecretKey; use miden_objects::utils::Deserializable; -use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use tokio::net::TcpListener; use tokio::sync::Barrier; use tokio::task::JoinSet; @@ -181,7 +180,7 @@ impl BundledCommand { .await .context("Failed to bind to RPC gRPC endpoint")?; - let block_prover = Arc::new(RemoteBlockProver::new(block_prover_url)); + let block_prover = Arc::new(BlockProver::new_remote(block_prover_url)); let block_producer_address = TcpListener::bind("127.0.0.1:0") .await diff --git a/bin/node/src/commands/store.rs b/bin/node/src/commands/store.rs index a56f23949..a5b29e04b 100644 --- a/bin/node/src/commands/store.rs +++ b/bin/node/src/commands/store.rs @@ -3,12 +3,11 @@ use std::sync::Arc; use std::time::Duration; use anyhow::Context; -use miden_node_store::Store; use miden_node_store::genesis::config::{AccountFileWithName, GenesisConfig}; +use miden_node_store::{BlockProver, Store}; use miden_node_utils::grpc::UrlExt; use miden_objects::crypto::dsa::ecdsa_k256_keccak::SecretKey; use miden_objects::utils::Deserializable; -use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use url::Url; use super::{ @@ -75,9 +74,9 @@ pub enum StoreCommand { #[arg(long = "block-producer.url", env = ENV_STORE_BLOCK_PRODUCER_URL, value_name = "URL")] block_producer_url: Url, - /// The remote block prover's gRPC url. + /// The remote block prover's gRPC url. If not provided, a local block prover will be used. #[arg(long = "block-prover.url", env = ENV_BLOCK_PROVER_URL, value_name = "URL")] - block_prover_url: Url, + block_prover_url: Option, /// Directory in which to store the database and raw block data. #[arg(long, env = ENV_DATA_DIRECTORY, value_name = "DIR")] @@ -129,9 +128,9 @@ impl StoreCommand { } => { Self::start( rpc_url, - block_prover_url, ntx_builder_url, block_producer_url, + block_prover_url, data_directory, grpc_timeout, ) @@ -152,7 +151,7 @@ impl StoreCommand { rpc_url: Url, ntx_builder_url: Url, block_producer_url: Url, - block_prover_url: Url, + block_prover_url: Option, data_directory: PathBuf, grpc_timeout: Duration, ) -> anyhow::Result<()> { @@ -177,7 +176,13 @@ impl StoreCommand { .await .context("Failed to bind to store's block-producer gRPC URL")?; - let block_prover = Arc::new(RemoteBlockProver::new(block_prover_url)); + let block_prover = { + if let Some(url) = block_prover_url { + Arc::new(BlockProver::new_remote(url)) + } else { + Arc::new(BlockProver::new_local(None)) + } + }; Store { rpc_listener, diff --git a/bin/stress-test/Cargo.toml b/bin/stress-test/Cargo.toml index fa0bbec82..0a469509d 100644 --- a/bin/stress-test/Cargo.toml +++ b/bin/stress-test/Cargo.toml @@ -28,6 +28,7 @@ miden-node-proto = { workspace = true } miden-node-store = { workspace = true } miden-node-utils = { workspace = true } miden-objects = { workspace = true } +miden-remote-prover-client = { features = ["block-prover"], workspace = true } rand = { workspace = true } rayon = { version = "1.10" } tokio = { workspace = true } diff --git a/bin/stress-test/src/seeding/mod.rs b/bin/stress-test/src/seeding/mod.rs index e9cf832a1..891eaa5a0 100644 --- a/bin/stress-test/src/seeding/mod.rs +++ b/bin/stress-test/src/seeding/mod.rs @@ -14,7 +14,7 @@ use miden_lib::utils::Serializable; use miden_node_block_producer::store::StoreClient; use miden_node_proto::domain::batch::BatchInputs; use miden_node_proto::generated::store::rpc_client::RpcClient; -use miden_node_store::{DataDirectory, GenesisState, Store}; +use miden_node_store::{BlockProver, DataDirectory, GenesisState, Store}; use miden_node_utils::tracing::grpc::OtelInterceptor; use miden_objects::account::delta::AccountUpdateDetails; use miden_objects::account::{ @@ -247,9 +247,14 @@ async fn apply_block( let (header, body) = build_block(proposed_block.clone()).unwrap(); let signature = EcdsaSecretKey::new().sign(header.commitment()); let block_size: usize = header.to_bytes().len() + body.to_bytes().len(); + let ordered_batches = proposed_block.batches().clone(); let start = Instant::now(); - store_client.apply_block(header.clone(), body, signature).await.unwrap(); + + store_client + .apply_block(ordered_batches, block_inputs, header.clone(), body, signature) + .await + .unwrap(); metrics.track_block_insertion(start.elapsed(), block_size); header @@ -514,6 +519,15 @@ async fn get_block_inputs( /// - the URL of the store pub async fn start_store( data_directory: PathBuf, +) -> (RpcClient>, Url) { + start_store_with_prover(data_directory, None).await +} + +/// Starts the store with an optional remote block prover URL. +/// If `block_prover_url` is None, the store will use a local block prover. +pub async fn start_store_with_prover( + data_directory: PathBuf, + block_prover_url: Option, ) -> (RpcClient>, Url) { let rpc_listener = TcpListener::bind("127.0.0.1:0") .await @@ -531,10 +545,19 @@ pub async fn start_store( let dir = data_directory.clone(); task::spawn(async move { + let block_prover = { + if let Some(url) = block_prover_url { + Arc::new(BlockProver::new_remote(url)) + } else { + Arc::new(BlockProver::new_local(None)) + } + }; + Store { rpc_listener, ntx_builder_listener, block_producer_listener, + block_prover, data_directory: dir, grpc_timeout: Duration::from_secs(30), } diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index e34d034e6..d22c4dd03 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -112,7 +112,7 @@ impl BlockBuilder { ProposedBlock::inject_telemetry(proposed_block); }) .and_then(|(proposed_block, inputs)| self.validate_block(proposed_block, inputs)) - .and_then(|(_ordered_batches, _block_inputs, header, body, signature)| self.commit_block(mempool, header, body, signature)) + .and_then(|(ordered_batches, block_inputs, header, body, signature)| self.commit_block(mempool, ordered_batches, block_inputs, header, body, signature)) // Handle errors by propagating the error to the root span and rolling back the block. .inspect_err(|err| Span::current().set_error(err)) .or_else(|_err| self.rollback_block(mempool, block_num).never_error()) @@ -269,12 +269,14 @@ impl BlockBuilder { async fn commit_block( &self, mempool: &SharedMempool, + ordered_batches: OrderedBatches, + block_inputs: BlockInputs, header: BlockHeader, body: BlockBody, signature: Signature, ) -> Result<(), BuildBlockError> { self.store - .apply_block(header.clone(), body, signature) + .apply_block(ordered_batches, block_inputs, header.clone(), body, signature) .await .map_err(BuildBlockError::StoreApplyBlockFailed)?; diff --git a/crates/block-producer/src/store/mod.rs b/crates/block-producer/src/store/mod.rs index 85e013fe6..1787306e8 100644 --- a/crates/block-producer/src/store/mod.rs +++ b/crates/block-producer/src/store/mod.rs @@ -10,6 +10,7 @@ use miden_node_proto::{AccountState, generated as proto}; use miden_node_utils::formatting::format_opt; use miden_objects::Word; use miden_objects::account::AccountId; +use miden_objects::batch::OrderedBatches; use miden_objects::block::{BlockBody, BlockHeader, BlockInputs, BlockNumber}; use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::note::Nullifier; @@ -241,11 +242,15 @@ impl StoreClient { #[instrument(target = COMPONENT, name = "store.client.apply_block", skip_all, err)] pub async fn apply_block( &self, + ordered_batches: OrderedBatches, + block_inputs: BlockInputs, header: BlockHeader, body: BlockBody, signature: Signature, ) -> Result<(), StoreError> { let request = tonic::Request::new(proto::store::ApplyBlockRequest { + ordered_batches: ordered_batches.to_bytes(), + block_inputs: block_inputs.to_bytes(), header: header.to_bytes(), body: body.to_bytes(), signature: signature.to_bytes(), diff --git a/crates/proto/src/generated/store.rs b/crates/proto/src/generated/store.rs index b99816f80..91dc4229d 100644 --- a/crates/proto/src/generated/store.rs +++ b/crates/proto/src/generated/store.rs @@ -2,17 +2,25 @@ /// Applies a block to the state. #[derive(Clone, PartialEq, Eq, Hash, ::prost::Message)] pub struct ApplyBlockRequest { + /// Ordered batches encoded using \[winter_utils::Serializable\] implementation for + /// \[miden_objects::batch::OrderedBatches\]. + #[prost(bytes = "vec", tag = "1")] + pub ordered_batches: ::prost::alloc::vec::Vec, + /// Block inputs encoded using \[winter_utils::Serializable\] implementation for + /// \[miden_objects::block::BlockInputs\]. + #[prost(bytes = "vec", tag = "2")] + pub block_inputs: ::prost::alloc::vec::Vec, /// Block header encoded using \[winter_utils::Serializable\] implementation for /// \[miden_objects::block::BlockHeader\]. - #[prost(bytes = "vec", tag = "1")] + #[prost(bytes = "vec", tag = "3")] pub header: ::prost::alloc::vec::Vec, /// Block header encoded using \[winter_utils::Serializable\] implementation for /// \[miden_objects::block::BlockBody\]. - #[prost(bytes = "vec", tag = "2")] + #[prost(bytes = "vec", tag = "4")] pub body: ::prost::alloc::vec::Vec, /// Signature encoded using \[winter_utils::Serializable\] implementation for /// \[crypto::dsa::ecdsa_k256_keccak::Signature\]. - #[prost(bytes = "vec", tag = "3")] + #[prost(bytes = "vec", tag = "5")] pub signature: ::prost::alloc::vec::Vec, } /// Returns data required to prove the next block. @@ -32,8 +40,7 @@ pub struct BlockInputsRequest { /// provide a nullifier witness for it. #[prost(message, repeated, tag = "2")] pub nullifiers: ::prost::alloc::vec::Vec, - /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the - /// store**. + /// Array of note IDs for which to retrieve note inclusion proofs, **if they exist in the store**. #[prost(message, repeated, tag = "3")] pub unauthenticated_notes: ::prost::alloc::vec::Vec, /// Array of block numbers referenced by all batches in the block. @@ -49,8 +56,9 @@ pub struct BlockInputs { /// Proof of each requested unauthenticated note's inclusion in a block, **if it existed in /// the store**. #[prost(message, repeated, tag = "2")] - pub unauthenticated_note_proofs: - ::prost::alloc::vec::Vec, + pub unauthenticated_note_proofs: ::prost::alloc::vec::Vec< + super::note::NoteInclusionInBlockProof, + >, /// The serialized chain MMR which includes proofs for all blocks referenced by the /// above note inclusion proofs as well as proofs for inclusion of the requested blocks /// referenced by the batches in the block. @@ -91,7 +99,9 @@ pub struct BatchInputsRequest { pub struct BatchInputs { /// The block header that the transaction batch should reference. #[prost(message, optional, tag = "1")] - pub batch_reference_block_header: ::core::option::Option, + pub batch_reference_block_header: ::core::option::Option< + super::blockchain::BlockHeader, + >, /// Proof of each *found* unauthenticated note's inclusion in a block. #[prost(message, repeated, tag = "2")] pub note_proofs: ::prost::alloc::vec::Vec, @@ -122,10 +132,14 @@ pub struct TransactionInputsRequest { pub struct TransactionInputs { /// Account state proof. #[prost(message, optional, tag = "1")] - pub account_state: ::core::option::Option, + pub account_state: ::core::option::Option< + transaction_inputs::AccountTransactionInputRecord, + >, /// List of nullifiers that have been consumed. #[prost(message, repeated, tag = "2")] - pub nullifiers: ::prost::alloc::vec::Vec, + pub nullifiers: ::prost::alloc::vec::Vec< + transaction_inputs::NullifierTransactionInputRecord, + >, /// List of unauthenticated notes that were not found in the database. #[prost(message, repeated, tag = "3")] pub found_unauthenticated_notes: ::prost::alloc::vec::Vec, @@ -180,8 +194,8 @@ pub struct MaybeAccountDetails { /// Notes created or consumed after the specified block are excluded from the result. #[derive(Clone, Copy, PartialEq, Eq, Hash, ::prost::Message)] pub struct UnconsumedNetworkNotesRequest { - /// This should be null on the first call, and set to the response token until the response - /// token is null, at which point all data has been fetched. + /// This should be null on the first call, and set to the response token until the response token + /// is null, at which point all data has been fetched. /// /// Note that this token is only valid if used with the same parameters. #[prost(uint64, optional, tag = "1")] @@ -237,10 +251,10 @@ pub mod rpc_client { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value + clippy::let_unit_value, )] - use tonic::codegen::http::Uri; use tonic::codegen::*; + use tonic::codegen::http::Uri; /// Store API for the RPC component #[derive(Debug, Clone)] pub struct RpcClient { @@ -272,18 +286,22 @@ pub mod rpc_client { let inner = tonic::client::Grpc::with_origin(inner, origin); Self { inner } } - pub fn with_interceptor(inner: T, interceptor: F) -> RpcClient> + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> RpcClient> where F: tonic::service::Interceptor, T::ResponseBody: Default, T: tonic::codegen::Service< - http::Request, - Response = http::Response< - >::ResponseBody, - >, + http::Request, + Response = http::Response< + >::ResponseBody, >, - >>::Error: - Into + std::marker::Send + std::marker::Sync, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, { RpcClient::new(InterceptedService::new(inner, interceptor)) } @@ -322,11 +340,18 @@ pub mod rpc_client { pub async fn status( &mut self, request: impl tonic::IntoRequest<()>, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/Status"); let mut req = request.into_request(); @@ -341,11 +366,18 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.Rpc/CheckNullifiers"); + let path = http::uri::PathAndQuery::from_static( + "/store.Rpc/CheckNullifiers", + ); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "CheckNullifiers")); self.inner.unary(req, path, codec).await @@ -358,13 +390,21 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetAccountDetails"); + let path = http::uri::PathAndQuery::from_static( + "/store.Rpc/GetAccountDetails", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "GetAccountDetails")); + req.extensions_mut() + .insert(GrpcMethod::new("store.Rpc", "GetAccountDetails")); self.inner.unary(req, path, codec).await } /// Returns the latest state proof of the specified account. @@ -375,11 +415,18 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetAccountProof"); + let path = http::uri::PathAndQuery::from_static( + "/store.Rpc/GetAccountProof", + ); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "GetAccountProof")); self.inner.unary(req, path, codec).await @@ -388,31 +435,50 @@ pub mod rpc_client { pub async fn get_block_by_number( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetBlockByNumber"); + let path = http::uri::PathAndQuery::from_static( + "/store.Rpc/GetBlockByNumber", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "GetBlockByNumber")); + req.extensions_mut() + .insert(GrpcMethod::new("store.Rpc", "GetBlockByNumber")); self.inner.unary(req, path, codec).await } /// Retrieves block header by given block number. Optionally, it also returns the MMR path /// and current chain length to authenticate the block's inclusion. pub async fn get_block_header_by_number( &mut self, - request: impl tonic::IntoRequest, + request: impl tonic::IntoRequest< + super::super::rpc::BlockHeaderByNumberRequest, + >, ) -> std::result::Result< tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetBlockHeaderByNumber"); + let path = http::uri::PathAndQuery::from_static( + "/store.Rpc/GetBlockHeaderByNumber", + ); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.Rpc", "GetBlockHeaderByNumber")); @@ -426,9 +492,14 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetNotesById"); let mut req = request.into_request(); @@ -439,19 +510,28 @@ pub mod rpc_client { pub async fn get_note_script_by_root( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.Rpc/GetNoteScriptByRoot"); + let path = http::uri::PathAndQuery::from_static( + "/store.Rpc/GetNoteScriptByRoot", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "GetNoteScriptByRoot")); + req.extensions_mut() + .insert(GrpcMethod::new("store.Rpc", "GetNoteScriptByRoot")); self.inner.unary(req, path, codec).await } - /// Returns a list of nullifiers that match the specified prefixes and are recorded in the - /// node. + /// Returns a list of nullifiers that match the specified prefixes and are recorded in the node. /// /// Note that only 16-bit prefixes are supported at this time. pub async fn sync_nullifiers( @@ -461,65 +541,80 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncNullifiers"); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncNullifiers")); self.inner.unary(req, path, codec).await } - /// Returns info which can be used by the requester to sync up to the tip of chain for the - /// notes they are interested in. + /// Returns info which can be used by the requester to sync up to the tip of chain for the notes they are interested in. /// - /// requester specifies the `note_tags` they are interested in, and the block height from - /// which to search for new for matching notes for. The request will then return the - /// next block containing any note matching the provided tags. + /// requester specifies the `note_tags` they are interested in, and the block height from which to search for new for + /// matching notes for. The request will then return the next block containing any note matching the provided tags. /// /// The response includes each note's metadata and inclusion proof. /// - /// A basic note sync can be implemented by repeatedly requesting the previous response's - /// block until reaching the tip of the chain. + /// A basic note sync can be implemented by repeatedly requesting the previous response's block until reaching the + /// tip of the chain. pub async fn sync_notes( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncNotes"); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncNotes")); self.inner.unary(req, path, codec).await } - /// Returns info which can be used by the requester to sync up to the latest state of the - /// chain for the objects (accounts, notes, nullifiers) the requester is interested - /// in. + /// Returns info which can be used by the requester to sync up to the latest state of the chain + /// for the objects (accounts, notes, nullifiers) the requester is interested in. /// - /// This request returns the next block containing requested data. It also returns - /// `chain_tip` which is the latest block number in the chain. requester is expected - /// to repeat these requests in a loop until `response.block_header.block_num == - /// response.chain_tip`, at which point the requester is fully synchronized with the - /// chain. + /// This request returns the next block containing requested data. It also returns `chain_tip` + /// which is the latest block number in the chain. requester is expected to repeat these requests + /// in a loop until `response.block_header.block_num == response.chain_tip`, at which point + /// the requester is fully synchronized with the chain. /// /// Each request also returns info about new notes, nullifiers etc. created. It also returns - /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both - /// chain MMR peaks and chain MMR nodes. + /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both chain + /// MMR peaks and chain MMR nodes. /// - /// For preserving some degree of privacy, note tags and nullifiers filters contain only - /// high part of hashes. Thus, returned data contains excessive notes and - /// nullifiers, requester can make additional filtering of that data on its side. + /// For preserving some degree of privacy, note tags and nullifiers filters contain only high + /// part of hashes. Thus, returned data contains excessive notes and nullifiers, requester can make + /// additional filtering of that data on its side. pub async fn sync_state( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncState"); let mut req = request.into_request(); @@ -534,17 +629,24 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncAccountVault"); + let path = http::uri::PathAndQuery::from_static( + "/store.Rpc/SyncAccountVault", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncAccountVault")); + req.extensions_mut() + .insert(GrpcMethod::new("store.Rpc", "SyncAccountVault")); self.inner.unary(req, path, codec).await } - /// Returns storage map updates for specified account and storage slots within a block - /// range. + /// Returns storage map updates for specified account and storage slots within a block range. pub async fn sync_storage_maps( &mut self, request: impl tonic::IntoRequest, @@ -552,11 +654,18 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncStorageMaps"); + let path = http::uri::PathAndQuery::from_static( + "/store.Rpc/SyncStorageMaps", + ); let mut req = request.into_request(); req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncStorageMaps")); self.inner.unary(req, path, codec).await @@ -569,13 +678,21 @@ pub mod rpc_client { tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.Rpc/SyncTransactions"); + let path = http::uri::PathAndQuery::from_static( + "/store.Rpc/SyncTransactions", + ); let mut req = request.into_request(); - req.extensions_mut().insert(GrpcMethod::new("store.Rpc", "SyncTransactions")); + req.extensions_mut() + .insert(GrpcMethod::new("store.Rpc", "SyncTransactions")); self.inner.unary(req, path, codec).await } } @@ -587,7 +704,7 @@ pub mod rpc_server { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value + clippy::let_unit_value, )] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with RpcServer. @@ -597,7 +714,10 @@ pub mod rpc_server { async fn status( &self, request: tonic::Request<()>, - ) -> std::result::Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Returns a nullifier proof for each of the requested nullifiers. async fn check_nullifiers( &self, @@ -626,7 +746,10 @@ pub mod rpc_server { async fn get_block_by_number( &self, request: tonic::Request, - ) -> std::result::Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Retrieves block header by given block number. Optionally, it also returns the MMR path /// and current chain length to authenticate the block's inclusion. async fn get_block_header_by_number( @@ -648,9 +771,11 @@ pub mod rpc_server { async fn get_note_script_by_root( &self, request: tonic::Request, - ) -> std::result::Result, tonic::Status>; - /// Returns a list of nullifiers that match the specified prefixes and are recorded in the - /// node. + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Returns a list of nullifiers that match the specified prefixes and are recorded in the node. /// /// Note that only 16-bit prefixes are supported at this time. async fn sync_nullifiers( @@ -660,42 +785,44 @@ pub mod rpc_server { tonic::Response, tonic::Status, >; - /// Returns info which can be used by the requester to sync up to the tip of chain for the - /// notes they are interested in. + /// Returns info which can be used by the requester to sync up to the tip of chain for the notes they are interested in. /// - /// requester specifies the `note_tags` they are interested in, and the block height from - /// which to search for new for matching notes for. The request will then return the - /// next block containing any note matching the provided tags. + /// requester specifies the `note_tags` they are interested in, and the block height from which to search for new for + /// matching notes for. The request will then return the next block containing any note matching the provided tags. /// /// The response includes each note's metadata and inclusion proof. /// - /// A basic note sync can be implemented by repeatedly requesting the previous response's - /// block until reaching the tip of the chain. + /// A basic note sync can be implemented by repeatedly requesting the previous response's block until reaching the + /// tip of the chain. async fn sync_notes( &self, request: tonic::Request, - ) -> std::result::Result, tonic::Status>; - /// Returns info which can be used by the requester to sync up to the latest state of the - /// chain for the objects (accounts, notes, nullifiers) the requester is interested - /// in. + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Returns info which can be used by the requester to sync up to the latest state of the chain + /// for the objects (accounts, notes, nullifiers) the requester is interested in. /// - /// This request returns the next block containing requested data. It also returns - /// `chain_tip` which is the latest block number in the chain. requester is expected - /// to repeat these requests in a loop until `response.block_header.block_num == - /// response.chain_tip`, at which point the requester is fully synchronized with the - /// chain. + /// This request returns the next block containing requested data. It also returns `chain_tip` + /// which is the latest block number in the chain. requester is expected to repeat these requests + /// in a loop until `response.block_header.block_num == response.chain_tip`, at which point + /// the requester is fully synchronized with the chain. /// /// Each request also returns info about new notes, nullifiers etc. created. It also returns - /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both - /// chain MMR peaks and chain MMR nodes. + /// Chain MMR delta that can be used to update the state of Chain MMR. This includes both chain + /// MMR peaks and chain MMR nodes. /// - /// For preserving some degree of privacy, note tags and nullifiers filters contain only - /// high part of hashes. Thus, returned data contains excessive notes and - /// nullifiers, requester can make additional filtering of that data on its side. + /// For preserving some degree of privacy, note tags and nullifiers filters contain only high + /// part of hashes. Thus, returned data contains excessive notes and nullifiers, requester can make + /// additional filtering of that data on its side. async fn sync_state( &self, request: tonic::Request, - ) -> std::result::Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Returns account vault updates for specified account within a block range. async fn sync_account_vault( &self, @@ -704,8 +831,7 @@ pub mod rpc_server { tonic::Response, tonic::Status, >; - /// Returns storage map updates for specified account and storage slots within a block - /// range. + /// Returns storage map updates for specified account and storage slots within a block range. async fn sync_storage_maps( &self, request: tonic::Request, @@ -744,7 +870,10 @@ pub mod rpc_server { max_encoding_message_size: None, } } - pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService where F: tonic::service::Interceptor, { @@ -801,10 +930,15 @@ pub mod rpc_server { struct StatusSvc(pub Arc); impl tonic::server::UnaryService<()> for StatusSvc { type Response = super::super::rpc::StoreStatus; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call(&mut self, request: tonic::Request<()>) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { ::status(&inner, request).await }; + let fut = async move { + ::status(&inner, request).await + }; Box::pin(fut) } } @@ -829,22 +963,27 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/CheckNullifiers" => { #[allow(non_camel_case_types)] struct CheckNullifiersSvc(pub Arc); - impl tonic::server::UnaryService - for CheckNullifiersSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService + for CheckNullifiersSvc { type Response = super::super::rpc::CheckNullifiersResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = - async move { ::check_nullifiers(&inner, request).await }; + let fut = async move { + ::check_nullifiers(&inner, request).await + }; Box::pin(fut) } } @@ -869,15 +1008,19 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/GetAccountDetails" => { #[allow(non_camel_case_types)] struct GetAccountDetailsSvc(pub Arc); - impl tonic::server::UnaryService - for GetAccountDetailsSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService + for GetAccountDetailsSvc { type Response = super::super::account::AccountDetails; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, @@ -910,22 +1053,29 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/GetAccountProof" => { #[allow(non_camel_case_types)] struct GetAccountProofSvc(pub Arc); - impl tonic::server::UnaryService - for GetAccountProofSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService + for GetAccountProofSvc { type Response = super::super::rpc::AccountProofResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::rpc::AccountProofRequest, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = - async move { ::get_account_proof(&inner, request).await }; + let fut = async move { + ::get_account_proof(&inner, request).await + }; Box::pin(fut) } } @@ -950,18 +1100,24 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/GetBlockByNumber" => { #[allow(non_camel_case_types)] struct GetBlockByNumberSvc(pub Arc); - impl tonic::server::UnaryService - for GetBlockByNumberSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService + for GetBlockByNumberSvc { type Response = super::super::blockchain::MaybeBlock; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::blockchain::BlockNumber, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { @@ -991,23 +1147,30 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/GetBlockHeaderByNumber" => { #[allow(non_camel_case_types)] struct GetBlockHeaderByNumberSvc(pub Arc); - impl - tonic::server::UnaryService - for GetBlockHeaderByNumberSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService< + super::super::rpc::BlockHeaderByNumberRequest, + > for GetBlockHeaderByNumberSvc { type Response = super::super::rpc::BlockHeaderByNumberResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::rpc::BlockHeaderByNumberRequest, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_block_header_by_number(&inner, request).await + ::get_block_header_by_number(&inner, request) + .await }; Box::pin(fut) } @@ -1033,20 +1196,27 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/GetNotesById" => { #[allow(non_camel_case_types)] struct GetNotesByIdSvc(pub Arc); - impl tonic::server::UnaryService for GetNotesByIdSvc { + impl< + T: Rpc, + > tonic::server::UnaryService + for GetNotesByIdSvc { type Response = super::super::note::CommittedNoteList; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = - async move { ::get_notes_by_id(&inner, request).await }; + let fut = async move { + ::get_notes_by_id(&inner, request).await + }; Box::pin(fut) } } @@ -1071,15 +1241,19 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/GetNoteScriptByRoot" => { #[allow(non_camel_case_types)] struct GetNoteScriptByRootSvc(pub Arc); - impl tonic::server::UnaryService - for GetNoteScriptByRootSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService + for GetNoteScriptByRootSvc { type Response = super::super::rpc::MaybeNoteScript; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, @@ -1112,23 +1286,30 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/SyncNullifiers" => { #[allow(non_camel_case_types)] struct SyncNullifiersSvc(pub Arc); - impl - tonic::server::UnaryService - for SyncNullifiersSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService< + super::super::rpc::SyncNullifiersRequest, + > for SyncNullifiersSvc { type Response = super::super::rpc::SyncNullifiersResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::rpc::SyncNullifiersRequest, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = - async move { ::sync_nullifiers(&inner, request).await }; + let fut = async move { + ::sync_nullifiers(&inner, request).await + }; Box::pin(fut) } } @@ -1153,19 +1334,27 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/SyncNotes" => { #[allow(non_camel_case_types)] struct SyncNotesSvc(pub Arc); - impl tonic::server::UnaryService for SyncNotesSvc { + impl< + T: Rpc, + > tonic::server::UnaryService + for SyncNotesSvc { type Response = super::super::rpc::SyncNotesResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { ::sync_notes(&inner, request).await }; + let fut = async move { + ::sync_notes(&inner, request).await + }; Box::pin(fut) } } @@ -1190,19 +1379,27 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/SyncState" => { #[allow(non_camel_case_types)] struct SyncStateSvc(pub Arc); - impl tonic::server::UnaryService for SyncStateSvc { + impl< + T: Rpc, + > tonic::server::UnaryService + for SyncStateSvc { type Response = super::super::rpc::SyncStateResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = async move { ::sync_state(&inner, request).await }; + let fut = async move { + ::sync_state(&inner, request).await + }; Box::pin(fut) } } @@ -1227,19 +1424,25 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/SyncAccountVault" => { #[allow(non_camel_case_types)] struct SyncAccountVaultSvc(pub Arc); - impl - tonic::server::UnaryService - for SyncAccountVaultSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService< + super::super::rpc::SyncAccountVaultRequest, + > for SyncAccountVaultSvc { type Response = super::super::rpc::SyncAccountVaultResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::rpc::SyncAccountVaultRequest, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { @@ -1269,23 +1472,30 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/SyncStorageMaps" => { #[allow(non_camel_case_types)] struct SyncStorageMapsSvc(pub Arc); - impl - tonic::server::UnaryService - for SyncStorageMapsSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService< + super::super::rpc::SyncStorageMapsRequest, + > for SyncStorageMapsSvc { type Response = super::super::rpc::SyncStorageMapsResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::rpc::SyncStorageMapsRequest, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = - async move { ::sync_storage_maps(&inner, request).await }; + let fut = async move { + ::sync_storage_maps(&inner, request).await + }; Box::pin(fut) } } @@ -1310,23 +1520,30 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, + } "/store.Rpc/SyncTransactions" => { #[allow(non_camel_case_types)] struct SyncTransactionsSvc(pub Arc); - impl - tonic::server::UnaryService - for SyncTransactionsSvc - { + impl< + T: Rpc, + > tonic::server::UnaryService< + super::super::rpc::SyncTransactionsRequest, + > for SyncTransactionsSvc { type Response = super::super::rpc::SyncTransactionsResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::rpc::SyncTransactionsRequest, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); - let fut = - async move { ::sync_transactions(&inner, request).await }; + let fut = async move { + ::sync_transactions(&inner, request).await + }; Box::pin(fut) } } @@ -1351,17 +1568,26 @@ pub mod rpc_server { Ok(res) }; Box::pin(fut) - }, - _ => Box::pin(async move { - let mut response = http::Response::new(tonic::body::Body::default()); - let headers = response.headers_mut(); - headers.insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers.insert(http::header::CONTENT_TYPE, tonic::metadata::GRPC_CONTENT_TYPE); - Ok(response) - }), + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } } } } @@ -1390,10 +1616,10 @@ pub mod block_producer_client { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value + clippy::let_unit_value, )] - use tonic::codegen::http::Uri; use tonic::codegen::*; + use tonic::codegen::http::Uri; /// Store API for the BlockProducer component #[derive(Debug, Clone)] pub struct BlockProducerClient { @@ -1433,13 +1659,14 @@ pub mod block_producer_client { F: tonic::service::Interceptor, T::ResponseBody: Default, T: tonic::codegen::Service< - http::Request, - Response = http::Response< - >::ResponseBody, - >, + http::Request, + Response = http::Response< + >::ResponseBody, >, - >>::Error: - Into + std::marker::Send + std::marker::Sync, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, { BlockProducerClient::new(InterceptedService::new(inner, interceptor)) } @@ -1479,11 +1706,18 @@ pub mod block_producer_client { &mut self, request: impl tonic::IntoRequest, ) -> std::result::Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.BlockProducer/ApplyBlock"); + let path = http::uri::PathAndQuery::from_static( + "/store.BlockProducer/ApplyBlock", + ); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.BlockProducer", "ApplyBlock")); @@ -1493,20 +1727,30 @@ pub mod block_producer_client { /// and current chain length to authenticate the block's inclusion. pub async fn get_block_header_by_number( &mut self, - request: impl tonic::IntoRequest, + request: impl tonic::IntoRequest< + super::super::rpc::BlockHeaderByNumberRequest, + >, ) -> std::result::Result< tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = - http::uri::PathAndQuery::from_static("/store.BlockProducer/GetBlockHeaderByNumber"); + let path = http::uri::PathAndQuery::from_static( + "/store.BlockProducer/GetBlockHeaderByNumber", + ); let mut req = request.into_request(); req.extensions_mut() - .insert(GrpcMethod::new("store.BlockProducer", "GetBlockHeaderByNumber")); + .insert( + GrpcMethod::new("store.BlockProducer", "GetBlockHeaderByNumber"), + ); self.inner.unary(req, path, codec).await } /// Returns data required to prove the next block. @@ -1514,11 +1758,18 @@ pub mod block_producer_client { &mut self, request: impl tonic::IntoRequest, ) -> std::result::Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.BlockProducer/GetBlockInputs"); + let path = http::uri::PathAndQuery::from_static( + "/store.BlockProducer/GetBlockInputs", + ); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.BlockProducer", "GetBlockInputs")); @@ -1529,11 +1780,18 @@ pub mod block_producer_client { &mut self, request: impl tonic::IntoRequest, ) -> std::result::Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = http::uri::PathAndQuery::from_static("/store.BlockProducer/GetBatchInputs"); + let path = http::uri::PathAndQuery::from_static( + "/store.BlockProducer/GetBatchInputs", + ); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.BlockProducer", "GetBatchInputs")); @@ -1543,13 +1801,22 @@ pub mod block_producer_client { pub async fn get_transaction_inputs( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = - http::uri::PathAndQuery::from_static("/store.BlockProducer/GetTransactionInputs"); + let path = http::uri::PathAndQuery::from_static( + "/store.BlockProducer/GetTransactionInputs", + ); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.BlockProducer", "GetTransactionInputs")); @@ -1564,11 +1831,10 @@ pub mod block_producer_server { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value + clippy::let_unit_value, )] use tonic::codegen::*; - /// Generated trait containing gRPC methods that should be implemented for use with - /// BlockProducerServer. + /// Generated trait containing gRPC methods that should be implemented for use with BlockProducerServer. #[async_trait] pub trait BlockProducer: std::marker::Send + std::marker::Sync + 'static { /// Applies changes of a new block to the DB and in-memory data structures. @@ -1599,7 +1865,10 @@ pub mod block_producer_server { async fn get_transaction_inputs( &self, request: tonic::Request, - ) -> std::result::Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; } /// Store API for the BlockProducer component #[derive(Debug)] @@ -1623,7 +1892,10 @@ pub mod block_producer_server { max_encoding_message_size: None, } } - pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService where F: tonic::service::Interceptor, { @@ -1678,9 +1950,15 @@ pub mod block_producer_server { "/store.BlockProducer/ApplyBlock" => { #[allow(non_camel_case_types)] struct ApplyBlockSvc(pub Arc); - impl tonic::server::UnaryService for ApplyBlockSvc { + impl< + T: BlockProducer, + > tonic::server::UnaryService + for ApplyBlockSvc { type Response = (); - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, @@ -1713,23 +1991,32 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - }, + } "/store.BlockProducer/GetBlockHeaderByNumber" => { #[allow(non_camel_case_types)] struct GetBlockHeaderByNumberSvc(pub Arc); - impl - tonic::server::UnaryService - for GetBlockHeaderByNumberSvc - { + impl< + T: BlockProducer, + > tonic::server::UnaryService< + super::super::rpc::BlockHeaderByNumberRequest, + > for GetBlockHeaderByNumberSvc { type Response = super::super::rpc::BlockHeaderByNumberResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::rpc::BlockHeaderByNumberRequest, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_block_header_by_number(&inner, request) + ::get_block_header_by_number( + &inner, + request, + ) .await }; Box::pin(fut) @@ -1756,22 +2043,27 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - }, + } "/store.BlockProducer/GetBlockInputs" => { #[allow(non_camel_case_types)] struct GetBlockInputsSvc(pub Arc); - impl tonic::server::UnaryService - for GetBlockInputsSvc - { + impl< + T: BlockProducer, + > tonic::server::UnaryService + for GetBlockInputsSvc { type Response = super::BlockInputs; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_block_inputs(&inner, request).await + ::get_block_inputs(&inner, request) + .await }; Box::pin(fut) } @@ -1797,22 +2089,27 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - }, + } "/store.BlockProducer/GetBatchInputs" => { #[allow(non_camel_case_types)] struct GetBatchInputsSvc(pub Arc); - impl tonic::server::UnaryService - for GetBatchInputsSvc - { + impl< + T: BlockProducer, + > tonic::server::UnaryService + for GetBatchInputsSvc { type Response = super::BatchInputs; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_batch_inputs(&inner, request).await + ::get_batch_inputs(&inner, request) + .await }; Box::pin(fut) } @@ -1838,23 +2135,30 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - }, + } "/store.BlockProducer/GetTransactionInputs" => { #[allow(non_camel_case_types)] struct GetTransactionInputsSvc(pub Arc); - impl - tonic::server::UnaryService - for GetTransactionInputsSvc - { + impl< + T: BlockProducer, + > tonic::server::UnaryService + for GetTransactionInputsSvc { type Response = super::TransactionInputs; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_transaction_inputs(&inner, request).await + ::get_transaction_inputs( + &inner, + request, + ) + .await }; Box::pin(fut) } @@ -1880,17 +2184,26 @@ pub mod block_producer_server { Ok(res) }; Box::pin(fut) - }, - _ => Box::pin(async move { - let mut response = http::Response::new(tonic::body::Body::default()); - let headers = response.headers_mut(); - headers.insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers.insert(http::header::CONTENT_TYPE, tonic::metadata::GRPC_CONTENT_TYPE); - Ok(response) - }), + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } } } } @@ -1919,10 +2232,10 @@ pub mod ntx_builder_client { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value + clippy::let_unit_value, )] - use tonic::codegen::http::Uri; use tonic::codegen::*; + use tonic::codegen::http::Uri; /// Store API for the network transaction builder component #[derive(Debug, Clone)] pub struct NtxBuilderClient { @@ -1962,13 +2275,14 @@ pub mod ntx_builder_client { F: tonic::service::Interceptor, T::ResponseBody: Default, T: tonic::codegen::Service< - http::Request, - Response = http::Response< - >::ResponseBody, - >, + http::Request, + Response = http::Response< + >::ResponseBody, >, - >>::Error: - Into + std::marker::Send + std::marker::Sync, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, { NtxBuilderClient::new(InterceptedService::new(inner, interceptor)) } @@ -2007,17 +2321,25 @@ pub mod ntx_builder_client { /// and current chain length to authenticate the block's inclusion. pub async fn get_block_header_by_number( &mut self, - request: impl tonic::IntoRequest, + request: impl tonic::IntoRequest< + super::super::rpc::BlockHeaderByNumberRequest, + >, ) -> std::result::Result< tonic::Response, tonic::Status, > { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = - http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetBlockHeaderByNumber"); + let path = http::uri::PathAndQuery::from_static( + "/store.NtxBuilder/GetBlockHeaderByNumber", + ); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.NtxBuilder", "GetBlockHeaderByNumber")); @@ -2027,33 +2349,51 @@ pub mod ntx_builder_client { pub async fn get_unconsumed_network_notes( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = - http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetUnconsumedNetworkNotes"); + let path = http::uri::PathAndQuery::from_static( + "/store.NtxBuilder/GetUnconsumedNetworkNotes", + ); let mut req = request.into_request(); req.extensions_mut() - .insert(GrpcMethod::new("store.NtxBuilder", "GetUnconsumedNetworkNotes")); + .insert( + GrpcMethod::new("store.NtxBuilder", "GetUnconsumedNetworkNotes"), + ); self.inner.unary(req, path, codec).await } - /// Returns the block header at the chain tip, as well as the MMR peaks corresponding to - /// this header for executing network transactions. If the block number is not - /// provided, the latest header and peaks will be retrieved. + /// Returns the block header at the chain tip, as well as the MMR peaks corresponding to this + /// header for executing network transactions. If the block number is not provided, the latest + /// header and peaks will be retrieved. pub async fn get_current_blockchain_data( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = - http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetCurrentBlockchainData"); + let path = http::uri::PathAndQuery::from_static( + "/store.NtxBuilder/GetCurrentBlockchainData", + ); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.NtxBuilder", "GetCurrentBlockchainData")); @@ -2063,32 +2403,52 @@ pub mod ntx_builder_client { pub async fn get_network_account_details_by_prefix( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); let path = http::uri::PathAndQuery::from_static( "/store.NtxBuilder/GetNetworkAccountDetailsByPrefix", ); let mut req = request.into_request(); req.extensions_mut() - .insert(GrpcMethod::new("store.NtxBuilder", "GetNetworkAccountDetailsByPrefix")); + .insert( + GrpcMethod::new( + "store.NtxBuilder", + "GetNetworkAccountDetailsByPrefix", + ), + ); self.inner.unary(req, path, codec).await } /// Returns a list of all network account ids. pub async fn get_network_account_ids( &mut self, request: impl tonic::IntoRequest<()>, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = - http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetNetworkAccountIds"); + let path = http::uri::PathAndQuery::from_static( + "/store.NtxBuilder/GetNetworkAccountIds", + ); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.NtxBuilder", "GetNetworkAccountIds")); @@ -2098,14 +2458,22 @@ pub mod ntx_builder_client { pub async fn get_note_script_by_root( &mut self, request: impl tonic::IntoRequest, - ) -> std::result::Result, tonic::Status> - { - self.inner.ready().await.map_err(|e| { - tonic::Status::unknown(format!("Service was not ready: {}", e.into())) - })?; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; let codec = tonic_prost::ProstCodec::default(); - let path = - http::uri::PathAndQuery::from_static("/store.NtxBuilder/GetNoteScriptByRoot"); + let path = http::uri::PathAndQuery::from_static( + "/store.NtxBuilder/GetNoteScriptByRoot", + ); let mut req = request.into_request(); req.extensions_mut() .insert(GrpcMethod::new("store.NtxBuilder", "GetNoteScriptByRoot")); @@ -2120,11 +2488,10 @@ pub mod ntx_builder_server { dead_code, missing_docs, clippy::wildcard_imports, - clippy::let_unit_value + clippy::let_unit_value, )] use tonic::codegen::*; - /// Generated trait containing gRPC methods that should be implemented for use with - /// NtxBuilderServer. + /// Generated trait containing gRPC methods that should be implemented for use with NtxBuilderServer. #[async_trait] pub trait NtxBuilder: std::marker::Send + std::marker::Sync + 'static { /// Retrieves block header by given block number. Optionally, it also returns the MMR path @@ -2140,29 +2507,44 @@ pub mod ntx_builder_server { async fn get_unconsumed_network_notes( &self, request: tonic::Request, - ) -> std::result::Result, tonic::Status>; - /// Returns the block header at the chain tip, as well as the MMR peaks corresponding to - /// this header for executing network transactions. If the block number is not - /// provided, the latest header and peaks will be retrieved. + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Returns the block header at the chain tip, as well as the MMR peaks corresponding to this + /// header for executing network transactions. If the block number is not provided, the latest + /// header and peaks will be retrieved. async fn get_current_blockchain_data( &self, request: tonic::Request, - ) -> std::result::Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Returns the latest state of a network account with the specified account prefix. async fn get_network_account_details_by_prefix( &self, request: tonic::Request, - ) -> std::result::Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Returns a list of all network account ids. async fn get_network_account_ids( &self, request: tonic::Request<()>, - ) -> std::result::Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; /// Returns the script for a note by its root. async fn get_note_script_by_root( &self, request: tonic::Request, - ) -> std::result::Result, tonic::Status>; + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; } /// Store API for the network transaction builder component #[derive(Debug)] @@ -2186,7 +2568,10 @@ pub mod ntx_builder_server { max_encoding_message_size: None, } } - pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService where F: tonic::service::Interceptor, { @@ -2241,19 +2626,29 @@ pub mod ntx_builder_server { "/store.NtxBuilder/GetBlockHeaderByNumber" => { #[allow(non_camel_case_types)] struct GetBlockHeaderByNumberSvc(pub Arc); - impl - tonic::server::UnaryService - for GetBlockHeaderByNumberSvc - { + impl< + T: NtxBuilder, + > tonic::server::UnaryService< + super::super::rpc::BlockHeaderByNumberRequest, + > for GetBlockHeaderByNumberSvc { type Response = super::super::rpc::BlockHeaderByNumberResponse; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::rpc::BlockHeaderByNumberRequest, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_block_header_by_number(&inner, request).await + ::get_block_header_by_number( + &inner, + request, + ) + .await }; Box::pin(fut) } @@ -2279,23 +2674,29 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - }, + } "/store.NtxBuilder/GetUnconsumedNetworkNotes" => { #[allow(non_camel_case_types)] struct GetUnconsumedNetworkNotesSvc(pub Arc); - impl - tonic::server::UnaryService - for GetUnconsumedNetworkNotesSvc - { + impl< + T: NtxBuilder, + > tonic::server::UnaryService + for GetUnconsumedNetworkNotesSvc { type Response = super::UnconsumedNetworkNotes; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_unconsumed_network_notes(&inner, request) + ::get_unconsumed_network_notes( + &inner, + request, + ) .await }; Box::pin(fut) @@ -2322,23 +2723,32 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - }, + } "/store.NtxBuilder/GetCurrentBlockchainData" => { #[allow(non_camel_case_types)] struct GetCurrentBlockchainDataSvc(pub Arc); - impl - tonic::server::UnaryService - for GetCurrentBlockchainDataSvc - { + impl< + T: NtxBuilder, + > tonic::server::UnaryService< + super::super::blockchain::MaybeBlockNumber, + > for GetCurrentBlockchainDataSvc { type Response = super::CurrentBlockchainData; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, - request: tonic::Request, + request: tonic::Request< + super::super::blockchain::MaybeBlockNumber, + >, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_current_blockchain_data(&inner, request) + ::get_current_blockchain_data( + &inner, + request, + ) .await }; Box::pin(fut) @@ -2365,15 +2775,21 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - }, + } "/store.NtxBuilder/GetNetworkAccountDetailsByPrefix" => { #[allow(non_camel_case_types)] - struct GetNetworkAccountDetailsByPrefixSvc(pub Arc); - impl tonic::server::UnaryService - for GetNetworkAccountDetailsByPrefixSvc - { + struct GetNetworkAccountDetailsByPrefixSvc( + pub Arc, + ); + impl< + T: NtxBuilder, + > tonic::server::UnaryService + for GetNetworkAccountDetailsByPrefixSvc { type Response = super::MaybeAccountDetails; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, @@ -2381,9 +2797,10 @@ pub mod ntx_builder_server { let inner = Arc::clone(&self.0); let fut = async move { ::get_network_account_details_by_prefix( - &inner, request, - ) - .await + &inner, + request, + ) + .await }; Box::pin(fut) } @@ -2409,17 +2826,22 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - }, + } "/store.NtxBuilder/GetNetworkAccountIds" => { #[allow(non_camel_case_types)] struct GetNetworkAccountIdsSvc(pub Arc); - impl tonic::server::UnaryService<()> for GetNetworkAccountIdsSvc { + impl tonic::server::UnaryService<()> + for GetNetworkAccountIdsSvc { type Response = super::NetworkAccountIdList; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call(&mut self, request: tonic::Request<()>) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_network_account_ids(&inner, request).await + ::get_network_account_ids(&inner, request) + .await }; Box::pin(fut) } @@ -2445,22 +2867,27 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - }, + } "/store.NtxBuilder/GetNoteScriptByRoot" => { #[allow(non_camel_case_types)] struct GetNoteScriptByRootSvc(pub Arc); - impl tonic::server::UnaryService - for GetNoteScriptByRootSvc - { + impl< + T: NtxBuilder, + > tonic::server::UnaryService + for GetNoteScriptByRootSvc { type Response = super::super::rpc::MaybeNoteScript; - type Future = BoxFuture, tonic::Status>; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; fn call( &mut self, request: tonic::Request, ) -> Self::Future { let inner = Arc::clone(&self.0); let fut = async move { - ::get_note_script_by_root(&inner, request).await + ::get_note_script_by_root(&inner, request) + .await }; Box::pin(fut) } @@ -2486,17 +2913,26 @@ pub mod ntx_builder_server { Ok(res) }; Box::pin(fut) - }, - _ => Box::pin(async move { - let mut response = http::Response::new(tonic::body::Body::default()); - let headers = response.headers_mut(); - headers.insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers.insert(http::header::CONTENT_TYPE, tonic::metadata::GRPC_CONTENT_TYPE); - Ok(response) - }), + } + _ => { + Box::pin(async move { + let mut response = http::Response::new( + tonic::body::Body::default(), + ); + let headers = response.headers_mut(); + headers + .insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers + .insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }) + } } } } diff --git a/crates/rpc/src/tests.rs b/crates/rpc/src/tests.rs index 05e9ee263..36b4b3efc 100644 --- a/crates/rpc/src/tests.rs +++ b/crates/rpc/src/tests.rs @@ -1,4 +1,5 @@ use std::net::SocketAddr; +use std::sync::Arc; use std::time::Duration; use http::header::{ACCEPT, CONTENT_TYPE}; @@ -7,8 +8,8 @@ use miden_lib::account::wallets::BasicWallet; use miden_node_proto::clients::{Builder, RpcClient}; use miden_node_proto::generated::rpc::api_client::ApiClient as ProtoClient; use miden_node_proto::generated::{self as proto}; -use miden_node_store::Store; use miden_node_store::genesis::config::GenesisConfig; +use miden_node_store::{BlockProver, Store}; use miden_node_utils::fee::test_fee; use miden_objects::Word; use miden_objects::account::delta::AccountUpdateDetails; @@ -141,11 +142,15 @@ async fn rpc_startup_is_robust_to_network_failures() { .expect("Failed to bind store ntx-builder gRPC endpoint"); let block_producer_listener = TcpListener::bind("127.0.0.1:0").await.expect("store should bind a port"); + + let block_prover = Arc::new(BlockProver::new_local(None)); + task::spawn(async move { Store { rpc_listener, ntx_builder_listener, block_producer_listener, + block_prover, data_directory: data_directory.path().to_path_buf(), grpc_timeout: Duration::from_secs(10), } @@ -443,11 +448,13 @@ async fn start_store(store_addr: SocketAddr) -> (Runtime, TempDir, Word) { // kill the runtime. let store_runtime = runtime::Builder::new_multi_thread().enable_time().enable_io().build().unwrap(); + let block_prover = Arc::new(BlockProver::new_local(None)); store_runtime.spawn(async move { Store { rpc_listener, ntx_builder_listener, block_producer_listener, + block_prover, data_directory: dir, grpc_timeout: Duration::from_secs(30), } diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index f6d671da2..779d0c6c4 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -42,7 +42,8 @@ tonic = { default-features = true, workspace = true } tonic-reflection = { workspace = true } tower-http = { features = ["util"], workspace = true } tracing = { workspace = true } -miden-remote-prover-client = { workspace = true } +miden-remote-prover-client = { features = ["block-prover"], workspace = true } +miden-block-prover = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } diff --git a/crates/store/src/db/models/queries/block_headers.rs b/crates/store/src/db/models/queries/block_headers.rs index 7850dfaa9..3388b8328 100644 --- a/crates/store/src/db/models/queries/block_headers.rs +++ b/crates/store/src/db/models/queries/block_headers.rs @@ -131,7 +131,7 @@ pub struct BlockHeaderRawRow { #[allow(dead_code)] pub block_num: i64, pub block_header: Vec, - pub signature: Vec, // TODO(currentpr): use? + pub signature: Vec, } impl TryInto for BlockHeaderRawRow { type Error = DatabaseError; diff --git a/crates/store/src/lib.rs b/crates/store/src/lib.rs index ce4956470..50cb264b2 100644 --- a/crates/store/src/lib.rs +++ b/crates/store/src/lib.rs @@ -10,7 +10,7 @@ pub mod state; pub use accounts::{AccountTreeWithHistory, HistoricalError, InMemoryAccountTree}; pub use genesis::GenesisState; -pub use server::{DataDirectory, Store}; +pub use server::{BlockProver, DataDirectory, Store}; // CONSTANTS // ================================================================================================= diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 3becd48eb..c594545cf 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -1,15 +1,23 @@ use std::collections::BTreeSet; use std::sync::Arc; +use miden_block_prover::{BlockProverError, LocalBlockProver}; use miden_node_proto::errors::ConversionError; use miden_node_proto::generated as proto; use miden_node_utils::ErrorReport; -use miden_objects::Word; use miden_objects::account::AccountId; use miden_objects::batch::OrderedBatches; -use miden_objects::block::{BlockBody, BlockHeader, BlockInputs, BlockNumber, ProvenBlock}; +use miden_objects::block::{ + BlockBody, + BlockHeader, + BlockInputs, + BlockNumber, + BlockProof, + ProvenBlock, +}; use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::note::Nullifier; +use miden_objects::{MIN_PROOF_SECURITY_LEVEL, Word}; use miden_remote_prover_client::RemoteProverClientError; use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use tonic::{Request, Response, Status}; @@ -18,12 +26,63 @@ use tracing::{info, instrument}; use crate::COMPONENT; use crate::state::State; +// TODO(currentpr): move error + +#[derive(Debug, thiserror::Error)] +pub enum StoreProverError { + #[error("local proving failed")] + LocalProvingFailed(#[from] BlockProverError), + #[error("remote proving failed")] + RemoteProvingFailed(#[from] RemoteProverClientError), +} + +// TODO(currentpr): move block prover +// BLOCK PROVER +// ================================================================================================ + +/// Block prover which allows for proving via either local or remote backend. +/// +/// The local proving variant is intended for development and testing purposes. +/// The remote proving variant is intended for production use. +pub enum BlockProver { + Local(LocalBlockProver), + Remote(RemoteBlockProver), +} + +impl BlockProver { + pub fn new_local(security_level: Option) -> Self { + info!(target: COMPONENT, "Using local block prover"); + let security_level = security_level.unwrap_or(MIN_PROOF_SECURITY_LEVEL); + Self::Local(LocalBlockProver::new(security_level)) + } + + pub fn new_remote(endpoint: impl Into) -> Self { + info!(target: COMPONENT, "Using remote block prover"); + Self::Remote(RemoteBlockProver::new(endpoint)) + } + + #[instrument(target = COMPONENT, skip_all, err)] + pub async fn prove( + &self, + tx_batches: OrderedBatches, + block_header: BlockHeader, + block_inputs: BlockInputs, + ) -> Result { + match self { + Self::Local(prover) => Ok(prover.prove(tx_batches, block_header, block_inputs)?), + Self::Remote(prover) => { + Ok(prover.prove(tx_batches, block_header, block_inputs).await?) + }, + } + } +} + // STORE API // ================================================================================================ pub struct StoreApi { pub(super) state: Arc, - pub(super) block_prover: Arc, + pub(super) block_prover: Arc, } impl StoreApi { @@ -48,20 +107,22 @@ impl StoreApi { })) } - #[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] - async fn prove_block( + #[instrument(target = COMPONENT, name = "store.prove_block", skip_all, err)] + pub async fn prove_block( &self, ordered_batches: OrderedBatches, block_inputs: BlockInputs, header: BlockHeader, signature: Signature, body: BlockBody, - ) -> Result { + ) -> Result { // Prove block. let block_proof = self .block_prover .prove(ordered_batches.clone(), header.clone(), block_inputs) .await?; + + // TODO(currentpr): reinstate simulation //self.simulate_proving().await; // SAFETY: The header and body are assumed valid and consistent with the proof. diff --git a/crates/store/src/server/block_producer.rs b/crates/store/src/server/block_producer.rs index f3e85ac52..d1c18de34 100644 --- a/crates/store/src/server/block_producer.rs +++ b/crates/store/src/server/block_producer.rs @@ -5,7 +5,8 @@ use miden_node_proto::generated::{self as proto}; use miden_node_proto::try_convert; use miden_node_utils::ErrorReport; use miden_objects::Word; -use miden_objects::block::{BlockBody, BlockHeader, BlockNumber}; +use miden_objects::batch::OrderedBatches; +use miden_objects::block::{BlockBody, BlockHeader, BlockInputs, BlockNumber}; use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::utils::Deserializable; use tonic::{Request, Response, Status}; @@ -59,22 +60,26 @@ impl block_producer_server::BlockProducer for StoreApi { &self, request: Request, ) -> Result, Status> { + // Read the request. let request = request.into_inner(); - debug!(target: COMPONENT, ?request); - + let ordered_batches = OrderedBatches::read_from_bytes(&request.header).map_err(|err| { + Status::invalid_argument(err.as_report_context("failed to deserialize ordered batches")) + })?; + let block_inputs = BlockInputs::read_from_bytes(&request.header).map_err(|err| { + Status::invalid_argument(err.as_report_context("failed to deserialize block inputs")) + })?; let header = BlockHeader::read_from_bytes(&request.header).map_err(|err| { - Status::invalid_argument(err.as_report_context("header deserialization error")) + Status::invalid_argument(err.as_report_context("failed to deserialize block header")) })?; let body = BlockBody::read_from_bytes(&request.body).map_err(|err| { - Status::invalid_argument(err.as_report_context("body deserialization error")) + Status::invalid_argument(err.as_report_context("failed to deserialize block body")) })?; let signature = Signature::read_from_bytes(&request.signature).map_err(|err| { - Status::invalid_argument(err.as_report_context("signature deserialization error")) + Status::invalid_argument(err.as_report_context("failed to deserialize signature")) })?; let block_num = header.block_num().as_u32(); - info!( target: COMPONENT, block_num, @@ -84,7 +89,14 @@ impl block_producer_server::BlockProducer for StoreApi { nullifier_count = body.created_nullifiers().len(), ); - self.state.apply_block(header, body, signature).await?; + // Apply the block to the state. + self.state.apply_block(header.clone(), body.clone(), signature.clone()).await?; + + // TODO(currentpr): this need to be a separate task or set of tasks? + // Prove the block. + self.prove_block(ordered_batches, block_inputs, header, signature, body) + .await + .map_err(|err| Status::internal(err.as_report_context("failed to prove block")))?; Ok(Response::new(())) } diff --git a/crates/store/src/server/mod.rs b/crates/store/src/server/mod.rs index 3e1999f3a..bdebdd236 100644 --- a/crates/store/src/server/mod.rs +++ b/crates/store/src/server/mod.rs @@ -13,7 +13,6 @@ use miden_node_proto_build::{ use miden_node_utils::panic::{CatchPanicLayer, catch_panic_layer_fn}; use miden_node_utils::tracing::grpc::grpc_trace_fn; use miden_objects::block::BlockSigner; -use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use tokio::net::TcpListener; use tokio::task::JoinSet; use tokio_stream::wrappers::TcpListenerStream; @@ -22,6 +21,7 @@ use tracing::{info, instrument}; use crate::blocks::BlockStore; use crate::db::Db; +pub use crate::server::api::BlockProver; use crate::server::db_maintenance::DbMaintenance; use crate::state::State; use crate::{COMPONENT, DATABASE_MAINTENANCE_INTERVAL, GenesisState}; @@ -37,7 +37,7 @@ pub struct Store { pub rpc_listener: TcpListener, pub ntx_builder_listener: TcpListener, pub block_producer_listener: TcpListener, - pub block_prover: Arc, + pub block_prover: Arc, pub data_directory: PathBuf, /// Server-side timeout for an individual gRPC request. /// diff --git a/proto/proto/internal/store.proto b/proto/proto/internal/store.proto index ced52f1b7..91ef42c7a 100644 --- a/proto/proto/internal/store.proto +++ b/proto/proto/internal/store.proto @@ -110,15 +110,21 @@ service BlockProducer { // Applies a block to the state. message ApplyBlockRequest { + // Ordered batches encoded using [winter_utils::Serializable] implementation for + // [miden_objects::batch::OrderedBatches]. + bytes ordered_batches = 1; + // Block inputs encoded using [winter_utils::Serializable] implementation for + // [miden_objects::block::BlockInputs]. + bytes block_inputs = 2; // Block header encoded using [winter_utils::Serializable] implementation for // [miden_objects::block::BlockHeader]. - bytes header = 1; + bytes header = 3; // Block header encoded using [winter_utils::Serializable] implementation for // [miden_objects::block::BlockBody]. - bytes body = 2; + bytes body = 4; // Signature encoded using [winter_utils::Serializable] implementation for // [crypto::dsa::ecdsa_k256_keccak::Signature]. - bytes signature = 3; + bytes signature = 5; } // GET BLOCK INPUTS From 1275e29d22cb1479abb781c8f4be8d68daea25e2 Mon Sep 17 00:00:00 2001 From: sergerad Date: Mon, 22 Dec 2025 08:07:59 +1300 Subject: [PATCH 05/13] Fix deser --- .../block-producer/src/block_builder/mod.rs | 32 ------------------- crates/store/src/server/block_producer.rs | 11 ++++--- 2 files changed, 7 insertions(+), 36 deletions(-) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index d22c4dd03..f77e1787c 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -233,38 +233,6 @@ impl BlockBuilder { Ok((ordered_batches, block_inputs, header, body, signature)) } - //#[instrument(target = COMPONENT, name = "block_builder.prove_block", skip_all, err)] - //async fn prove_block( - // &self, - // ordered_batches: OrderedBatches, - // block_inputs: BlockInputs, - // header: BlockHeader, - // signature: Signature, - // body: BlockBody, - //) -> Result { - // // Prove block using header and body from validator. - // let block_proof = self - // .block_prover - // .prove(ordered_batches.clone(), header.clone(), block_inputs) - // .await?; - // self.simulate_proving().await; - - // // SAFETY: The header and body are assumed valid and consistent with the proof. - // let proven_block = ProvenBlock::new_unchecked(header, body, signature, block_proof); - // if proven_block.proof_security_level() < MIN_PROOF_SECURITY_LEVEL { - // return Err(BuildBlockError::SecurityLevelTooLow( - // proven_block.proof_security_level(), - // MIN_PROOF_SECURITY_LEVEL, - // )); - // } - // // TODO(sergerad): Consider removing this validation. Once block proving is implemented, - // // this would be replaced with verifying the proof returned from the prover against - // // the block header. - // validate_tx_headers(&proven_block, &ordered_batches.to_transactions())?; - - // Ok(proven_block) - //} - #[instrument(target = COMPONENT, name = "block_builder.commit_block", skip_all, err)] async fn commit_block( &self, diff --git a/crates/store/src/server/block_producer.rs b/crates/store/src/server/block_producer.rs index d1c18de34..4b4143a4a 100644 --- a/crates/store/src/server/block_producer.rs +++ b/crates/store/src/server/block_producer.rs @@ -63,10 +63,13 @@ impl block_producer_server::BlockProducer for StoreApi { // Read the request. let request = request.into_inner(); debug!(target: COMPONENT, ?request); - let ordered_batches = OrderedBatches::read_from_bytes(&request.header).map_err(|err| { - Status::invalid_argument(err.as_report_context("failed to deserialize ordered batches")) - })?; - let block_inputs = BlockInputs::read_from_bytes(&request.header).map_err(|err| { + let ordered_batches = + OrderedBatches::read_from_bytes(&request.ordered_batches).map_err(|err| { + Status::invalid_argument( + err.as_report_context("failed to deserialize ordered batches"), + ) + })?; + let block_inputs = BlockInputs::read_from_bytes(&request.block_inputs).map_err(|err| { Status::invalid_argument(err.as_report_context("failed to deserialize block inputs")) })?; let header = BlockHeader::read_from_bytes(&request.header).map_err(|err| { From b2b1b7fa3eda4498823d8baad5d2971a3bc08317 Mon Sep 17 00:00:00 2001 From: sergerad Date: Mon, 22 Dec 2025 10:27:43 +1300 Subject: [PATCH 06/13] Optional url for bundled --- bin/node/src/commands/bundled.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/bin/node/src/commands/bundled.rs b/bin/node/src/commands/bundled.rs index 55622903d..417601273 100644 --- a/bin/node/src/commands/bundled.rs +++ b/bin/node/src/commands/bundled.rs @@ -71,9 +71,9 @@ pub enum BundledCommand { #[arg(long = "rpc.url", env = ENV_RPC_URL, value_name = "URL")] rpc_url: Url, - /// The remote block prover's gRPC url. + /// The remote block prover's gRPC url. If not provided, a local block prover will be used. #[arg(long = "block-prover.url", env = ENV_BLOCK_PROVER_URL, value_name = "URL")] - block_prover_url: Url, + block_prover_url: Option, /// Directory in which the Store component should store the database and raw block data. #[arg(long = "data-directory", env = ENV_DATA_DIRECTORY, value_name = "DIR")] @@ -163,7 +163,7 @@ impl BundledCommand { #[allow(clippy::too_many_lines)] async fn start( rpc_url: Url, - block_prover_url: Url, + block_prover_url: Option, data_directory: PathBuf, ntx_builder: NtxBuilderConfig, block_producer: BlockProducerConfig, @@ -180,7 +180,12 @@ impl BundledCommand { .await .context("Failed to bind to RPC gRPC endpoint")?; - let block_prover = Arc::new(BlockProver::new_remote(block_prover_url)); + // Initialize local or remote block prover. + let block_prover = if let Some(url) = block_prover_url { + Arc::new(BlockProver::new_remote(url)) + } else { + Arc::new(BlockProver::new_local(None)) + }; let block_producer_address = TcpListener::bind("127.0.0.1:0") .await From 2d0a933e26b9f4e6c45d9eb719efbad58f6d1f1c Mon Sep 17 00:00:00 2001 From: sergerad Date: Mon, 22 Dec 2025 10:38:39 +1300 Subject: [PATCH 07/13] Machete --- Cargo.lock | 4 ---- bin/node/Cargo.toml | 1 - bin/stress-test/Cargo.toml | 2 -- crates/block-producer/Cargo.toml | 1 - crates/store/Cargo.toml | 30 +++++++++++++++--------------- 5 files changed, 15 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0f313e30a..71be1e71e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2707,7 +2707,6 @@ dependencies = [ "miden-node-utils", "miden-node-validator", "miden-objects", - "miden-remote-prover-client", "tokio", "url", ] @@ -2720,7 +2719,6 @@ dependencies = [ "assert_matches", "futures", "itertools 0.14.0", - "miden-block-prover", "miden-lib", "miden-node-proto", "miden-node-proto-build", @@ -2891,14 +2889,12 @@ dependencies = [ "fs-err", "futures", "miden-air", - "miden-block-prover", "miden-lib", "miden-node-block-producer", "miden-node-proto", "miden-node-store", "miden-node-utils", "miden-objects", - "miden-remote-prover-client", "rand 0.9.2", "rayon", "tokio", diff --git a/bin/node/Cargo.toml b/bin/node/Cargo.toml index de6ef9803..7ebdf8d4a 100644 --- a/bin/node/Cargo.toml +++ b/bin/node/Cargo.toml @@ -32,7 +32,6 @@ miden-node-validator = { workspace = true } miden-objects = { workspace = true } tokio = { features = ["macros", "net", "rt-multi-thread"], workspace = true } url = { workspace = true } -miden-remote-prover-client = { workspace = true } [dev-dependencies] figment = { features = ["env", "test", "toml"], version = "0.10" } diff --git a/bin/stress-test/Cargo.toml b/bin/stress-test/Cargo.toml index 0a469509d..d78e3df8a 100644 --- a/bin/stress-test/Cargo.toml +++ b/bin/stress-test/Cargo.toml @@ -21,14 +21,12 @@ clap = { features = ["derive"], version = "4.5" } fs-err = { workspace = true } futures = { workspace = true } miden-air = { features = ["testing"], workspace = true } -miden-block-prover = { features = ["testing"], workspace = true } miden-lib = { workspace = true } miden-node-block-producer = { workspace = true } miden-node-proto = { workspace = true } miden-node-store = { workspace = true } miden-node-utils = { workspace = true } miden-objects = { workspace = true } -miden-remote-prover-client = { features = ["block-prover"], workspace = true } rand = { workspace = true } rayon = { version = "1.10" } tokio = { workspace = true } diff --git a/crates/block-producer/Cargo.toml b/crates/block-producer/Cargo.toml index 3b0c37300..26d2fdbed 100644 --- a/crates/block-producer/Cargo.toml +++ b/crates/block-producer/Cargo.toml @@ -22,7 +22,6 @@ tracing-forest = ["miden-node-utils/tracing-forest"] anyhow = { workspace = true } futures = { workspace = true } itertools = { workspace = true } -miden-block-prover = { workspace = true } miden-lib = { workspace = true } miden-node-proto = { workspace = true } miden-node-proto-build = { features = ["internal"], workspace = true } diff --git a/crates/store/Cargo.toml b/crates/store/Cargo.toml index 779d0c6c4..47c9381eb 100644 --- a/crates/store/Cargo.toml +++ b/crates/store/Cargo.toml @@ -29,21 +29,21 @@ miden-node-proto = { workspace = true } miden-node-proto-build = { features = ["internal"], workspace = true } miden-node-utils = { workspace = true } # TODO remove `testing` from `miden-objects`, required for `BlockProof::new_dummy` -miden-objects = { features = ["std", "testing"], workspace = true } -pretty_assertions = { workspace = true } -rand = { workspace = true } -rand_chacha = { workspace = true } -serde = { features = ["derive"], version = "1" } -thiserror = { workspace = true } -tokio = { features = ["fs", "rt-multi-thread"], workspace = true } -tokio-stream = { features = ["net"], workspace = true } -toml = { version = "0.9" } -tonic = { default-features = true, workspace = true } -tonic-reflection = { workspace = true } -tower-http = { features = ["util"], workspace = true } -tracing = { workspace = true } -miden-remote-prover-client = { features = ["block-prover"], workspace = true } -miden-block-prover = { workspace = true } +miden-block-prover = { workspace = true } +miden-objects = { features = ["std", "testing"], workspace = true } +miden-remote-prover-client = { features = ["block-prover"], workspace = true } +pretty_assertions = { workspace = true } +rand = { workspace = true } +rand_chacha = { workspace = true } +serde = { features = ["derive"], version = "1" } +thiserror = { workspace = true } +tokio = { features = ["fs", "rt-multi-thread"], workspace = true } +tokio-stream = { features = ["net"], workspace = true } +toml = { version = "0.9" } +tonic = { default-features = true, workspace = true } +tonic-reflection = { workspace = true } +tower-http = { features = ["util"], workspace = true } +tracing = { workspace = true } [dev-dependencies] assert_matches = { workspace = true } From 2b7e094f249ee99dbcabcee2a05c2841ec094b00 Mon Sep 17 00:00:00 2001 From: sergerad Date: Mon, 22 Dec 2025 10:46:10 +1300 Subject: [PATCH 08/13] Update todo comment --- bin/stress-test/src/seeding/mod.rs | 4 ++-- crates/store/src/server/block_producer.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bin/stress-test/src/seeding/mod.rs b/bin/stress-test/src/seeding/mod.rs index 891eaa5a0..9b4490765 100644 --- a/bin/stress-test/src/seeding/mod.rs +++ b/bin/stress-test/src/seeding/mod.rs @@ -234,9 +234,9 @@ async fn generate_blocks( metrics } -/// Sends block data to the store +/// Sends block data to the store for committal. /// -/// Returns the the inserted block header. +/// Returns the the applied block header. async fn apply_block( batches: Vec, block_inputs: BlockInputs, diff --git a/crates/store/src/server/block_producer.rs b/crates/store/src/server/block_producer.rs index 4b4143a4a..17f8104e6 100644 --- a/crates/store/src/server/block_producer.rs +++ b/crates/store/src/server/block_producer.rs @@ -95,8 +95,8 @@ impl block_producer_server::BlockProducer for StoreApi { // Apply the block to the state. self.state.apply_block(header.clone(), body.clone(), signature.clone()).await?; - // TODO(currentpr): this need to be a separate task or set of tasks? - // Prove the block. + // TODO(sergerad): Make block proving async/deferred. I.E. return from this fn before block + // is proven. Prove the block. self.prove_block(ordered_batches, block_inputs, header, signature, body) .await .map_err(|err| Status::internal(err.as_report_context("failed to prove block")))?; From 84a80d376cf5ac6ddff99ed74b5a68d0a116f6e3 Mon Sep 17 00:00:00 2001 From: sergerad Date: Mon, 22 Dec 2025 10:47:27 +1300 Subject: [PATCH 09/13] Add todo comment --- crates/block-producer/src/block_builder/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/block-producer/src/block_builder/mod.rs b/crates/block-producer/src/block_builder/mod.rs index f77e1787c..4d862ebff 100644 --- a/crates/block-producer/src/block_builder/mod.rs +++ b/crates/block-producer/src/block_builder/mod.rs @@ -112,6 +112,7 @@ impl BlockBuilder { ProposedBlock::inject_telemetry(proposed_block); }) .and_then(|(proposed_block, inputs)| self.validate_block(proposed_block, inputs)) + // TODO(sergerad): Add SignedBlock to miden-base and update validate_block to return it. .and_then(|(ordered_batches, block_inputs, header, body, signature)| self.commit_block(mempool, ordered_batches, block_inputs, header, body, signature)) // Handle errors by propagating the error to the root span and rolling back the block. .inspect_err(|err| Span::current().set_error(err)) From ffd41c4b84fff7c996ea2d6049d3a2e95d18c0ea Mon Sep 17 00:00:00 2001 From: sergerad Date: Mon, 22 Dec 2025 10:51:10 +1300 Subject: [PATCH 10/13] Reinstate simulation --- crates/store/src/server/api.rs | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index c594545cf..777c52fea 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -1,10 +1,12 @@ use std::collections::BTreeSet; use std::sync::Arc; +use std::time::Duration; use miden_block_prover::{BlockProverError, LocalBlockProver}; use miden_node_proto::errors::ConversionError; use miden_node_proto::generated as proto; use miden_node_utils::ErrorReport; +use miden_node_utils::tracing::OpenTelemetrySpanExt; use miden_objects::account::AccountId; use miden_objects::batch::OrderedBatches; use miden_objects::block::{ @@ -20,8 +22,9 @@ use miden_objects::note::Nullifier; use miden_objects::{MIN_PROOF_SECURITY_LEVEL, Word}; use miden_remote_prover_client::RemoteProverClientError; use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; +use rand::Rng; use tonic::{Request, Response, Status}; -use tracing::{info, instrument}; +use tracing::{Span, info, instrument}; use crate::COMPONENT; use crate::state::State; @@ -122,14 +125,26 @@ impl StoreApi { .prove(ordered_batches.clone(), header.clone(), block_inputs) .await?; - // TODO(currentpr): reinstate simulation - //self.simulate_proving().await; + // TODO: remove simulation when block proving is implemented. + self.simulate_proving().await; // SAFETY: The header and body are assumed valid and consistent with the proof. let proven_block = ProvenBlock::new_unchecked(header, body, signature, block_proof); Ok(proven_block) } + + #[instrument(target = COMPONENT, name = "store.simulate_proving", skip_all)] + async fn simulate_proving(&self) { + let simulated_proof_time = Duration::ZERO..Duration::from_millis(1); + let proving_duration = rand::rng().random_range(simulated_proof_time.clone()); + + Span::current().set_attribute("range.min_s", simulated_proof_time.start); + Span::current().set_attribute("range.max_s", simulated_proof_time.end); + Span::current().set_attribute("dice_roll_s", proving_duration); + + tokio::time::sleep(proving_duration).await; + } } // UTILITIES From a392c3d23a557178b5a5830ee1207e25d12f9fa8 Mon Sep 17 00:00:00 2001 From: sergerad Date: Mon, 22 Dec 2025 10:53:20 +1300 Subject: [PATCH 11/13] RM migrations --- .../2025070100000_add_signature_to_block_headers/down.sql | 2 -- .../2025070100000_add_signature_to_block_headers/up.sql | 5 ----- 2 files changed, 7 deletions(-) delete mode 100644 crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/down.sql delete mode 100644 crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/up.sql diff --git a/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/down.sql b/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/down.sql deleted file mode 100644 index 3d8c27c3d..000000000 --- a/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/down.sql +++ /dev/null @@ -1,2 +0,0 @@ --- Remove signature column from block_headers table -ALTER TABLE block_headers DROP COLUMN signature; diff --git a/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/up.sql b/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/up.sql deleted file mode 100644 index 034f0554c..000000000 --- a/crates/store/src/db/migrations/2025070100000_add_signature_to_block_headers/up.sql +++ /dev/null @@ -1,5 +0,0 @@ --- Add signature column to block_headers table -ALTER TABLE block_headers ADD COLUMN signature BLOB NOT NULL DEFAULT ''; - --- Update existing rows to have empty signature (will be populated later if needed) --- The default empty blob will be used for existing entries From 336e606c303c8988e1d294b8f44cbba4c0788bea Mon Sep 17 00:00:00 2001 From: sergerad Date: Mon, 22 Dec 2025 10:58:02 +1300 Subject: [PATCH 12/13] Move block prover client --- crates/store/src/server/api.rs | 56 ++----------------------- crates/store/src/server/block_prover.rs | 49 ++++++++++++++++++++++ crates/store/src/server/mod.rs | 1 + 3 files changed, 54 insertions(+), 52 deletions(-) create mode 100644 crates/store/src/server/block_prover.rs diff --git a/crates/store/src/server/api.rs b/crates/store/src/server/api.rs index 777c52fea..5f1f42329 100644 --- a/crates/store/src/server/api.rs +++ b/crates/store/src/server/api.rs @@ -2,31 +2,24 @@ use std::collections::BTreeSet; use std::sync::Arc; use std::time::Duration; -use miden_block_prover::{BlockProverError, LocalBlockProver}; +use miden_block_prover::BlockProverError; use miden_node_proto::errors::ConversionError; use miden_node_proto::generated as proto; use miden_node_utils::ErrorReport; use miden_node_utils::tracing::OpenTelemetrySpanExt; +use miden_objects::Word; use miden_objects::account::AccountId; use miden_objects::batch::OrderedBatches; -use miden_objects::block::{ - BlockBody, - BlockHeader, - BlockInputs, - BlockNumber, - BlockProof, - ProvenBlock, -}; +use miden_objects::block::{BlockBody, BlockHeader, BlockInputs, BlockNumber, ProvenBlock}; use miden_objects::crypto::dsa::ecdsa_k256_keccak::Signature; use miden_objects::note::Nullifier; -use miden_objects::{MIN_PROOF_SECURITY_LEVEL, Word}; use miden_remote_prover_client::RemoteProverClientError; -use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; use rand::Rng; use tonic::{Request, Response, Status}; use tracing::{Span, info, instrument}; use crate::COMPONENT; +pub use crate::server::block_prover::BlockProver; use crate::state::State; // TODO(currentpr): move error @@ -39,47 +32,6 @@ pub enum StoreProverError { RemoteProvingFailed(#[from] RemoteProverClientError), } -// TODO(currentpr): move block prover -// BLOCK PROVER -// ================================================================================================ - -/// Block prover which allows for proving via either local or remote backend. -/// -/// The local proving variant is intended for development and testing purposes. -/// The remote proving variant is intended for production use. -pub enum BlockProver { - Local(LocalBlockProver), - Remote(RemoteBlockProver), -} - -impl BlockProver { - pub fn new_local(security_level: Option) -> Self { - info!(target: COMPONENT, "Using local block prover"); - let security_level = security_level.unwrap_or(MIN_PROOF_SECURITY_LEVEL); - Self::Local(LocalBlockProver::new(security_level)) - } - - pub fn new_remote(endpoint: impl Into) -> Self { - info!(target: COMPONENT, "Using remote block prover"); - Self::Remote(RemoteBlockProver::new(endpoint)) - } - - #[instrument(target = COMPONENT, skip_all, err)] - pub async fn prove( - &self, - tx_batches: OrderedBatches, - block_header: BlockHeader, - block_inputs: BlockInputs, - ) -> Result { - match self { - Self::Local(prover) => Ok(prover.prove(tx_batches, block_header, block_inputs)?), - Self::Remote(prover) => { - Ok(prover.prove(tx_batches, block_header, block_inputs).await?) - }, - } - } -} - // STORE API // ================================================================================================ diff --git a/crates/store/src/server/block_prover.rs b/crates/store/src/server/block_prover.rs new file mode 100644 index 000000000..f6052c7a5 --- /dev/null +++ b/crates/store/src/server/block_prover.rs @@ -0,0 +1,49 @@ +use miden_block_prover::LocalBlockProver; +use miden_objects::MIN_PROOF_SECURITY_LEVEL; +use miden_objects::batch::OrderedBatches; +use miden_objects::block::{BlockHeader, BlockInputs, BlockProof}; +use miden_remote_prover_client::remote_prover::block_prover::RemoteBlockProver; +use tracing::{info, instrument}; + +use crate::COMPONENT; +use crate::server::api::StoreProverError; + +// BLOCK PROVER +// ================================================================================================ + +/// Block prover which allows for proving via either local or remote backend. +/// +/// The local proving variant is intended for development and testing purposes. +/// The remote proving variant is intended for production use. +pub enum BlockProver { + Local(LocalBlockProver), + Remote(RemoteBlockProver), +} + +impl BlockProver { + pub fn new_local(security_level: Option) -> Self { + info!(target: COMPONENT, "Using local block prover"); + let security_level = security_level.unwrap_or(MIN_PROOF_SECURITY_LEVEL); + Self::Local(LocalBlockProver::new(security_level)) + } + + pub fn new_remote(endpoint: impl Into) -> Self { + info!(target: COMPONENT, "Using remote block prover"); + Self::Remote(RemoteBlockProver::new(endpoint)) + } + + #[instrument(target = COMPONENT, skip_all, err)] + pub async fn prove( + &self, + tx_batches: OrderedBatches, + block_header: BlockHeader, + block_inputs: BlockInputs, + ) -> Result { + match self { + Self::Local(prover) => Ok(prover.prove(tx_batches, block_header, block_inputs)?), + Self::Remote(prover) => { + Ok(prover.prove(tx_batches, block_header, block_inputs).await?) + }, + } + } +} diff --git a/crates/store/src/server/mod.rs b/crates/store/src/server/mod.rs index bdebdd236..408459fb7 100644 --- a/crates/store/src/server/mod.rs +++ b/crates/store/src/server/mod.rs @@ -28,6 +28,7 @@ use crate::{COMPONENT, DATABASE_MAINTENANCE_INTERVAL, GenesisState}; mod api; mod block_producer; +mod block_prover; mod db_maintenance; mod ntx_builder; mod rpc_api; From 19d84e45fbf027abfda0d13cf44bbe00e8e9d5e1 Mon Sep 17 00:00:00 2001 From: sergerad Date: Mon, 22 Dec 2025 13:14:13 +1300 Subject: [PATCH 13/13] Fix commented out --- proto/proto/types/blockchain.proto | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/proto/proto/types/blockchain.proto b/proto/proto/types/blockchain.proto index 866a8c7d5..f233f8640 100644 --- a/proto/proto/types/blockchain.proto +++ b/proto/proto/types/blockchain.proto @@ -4,15 +4,8 @@ package blockchain; import "types/account.proto"; import "types/primitives.proto"; -//// BLOCK -//// ================================================================================================ -// -//// Represents a block. -//message Block { -// // Block data encoded using [winter_utils::Serializable] implementation for -// // [miden_objects::block::Block]. -// bytes block = 1; -//} +// BLOCK +// ================================================================================================ // Represents a proposed block. message ProposedBlock {