Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions crates/walrus-core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ test-utils = ["walrus-test-utils"]
[dependencies]
base64.workspace = true
bcs.workspace = true
blake2 = "0.10.6"
digest = "0.10.7"
enum_dispatch = { workspace = true }
fastcrypto.workspace = true
hex.workspace = true
Expand Down
12 changes: 6 additions & 6 deletions crates/walrus-core/src/encoding/blob_encoding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use crate::{
SliverPairIndex,
encoding::{ReedSolomonEncoder, config::EncodingFactory as _},
ensure,
merkle::{MerkleTree, Node, leaf_hash},
merkle::{MerkleTree, Node, leaf_hash_blake2b256},
metadata::{SliverPairMetadata, VerifiedBlobMetadataWithId},
};

Expand Down Expand Up @@ -175,14 +175,14 @@ impl BlobEncoderData {
.map(|sliver_index| {
// Column-major: primary tree gathers row `sliver_index` across all columns
// (strided access).
let primary_hash = MerkleTree::<Blake2b256>::build_from_leaf_hashes(
let primary_hash = MerkleTree::<Blake2b256>::build_from_leaf_hashes_fast(
(0..n_shards).map(|col| symbol_hashes[col * n_shards + sliver_index].clone()),
)
.root();
// Column-major: secondary tree reads column `n_shards - 1 - sliver_index`
// as a contiguous slice.
let sec_col = n_shards - 1 - sliver_index;
let secondary_hash = MerkleTree::<Blake2b256>::build_from_leaf_hashes(
let secondary_hash = MerkleTree::<Blake2b256>::build_from_leaf_hashes_fast(
symbol_hashes[sec_col * n_shards..(sec_col + 1) * n_shards]
.iter()
.cloned(),
Expand Down Expand Up @@ -376,7 +376,7 @@ impl<'a> BlobEncoder<'a> {

// Write hashes directly into the column-major slice.
for (row_index, symbol) in symbols.to_symbols().enumerate() {
hash_col[row_index] = leaf_hash::<Blake2b256>(symbol);
hash_col[row_index] = leaf_hash_blake2b256(symbol);
}

// Collect repair data only for systematic columns.
Expand Down Expand Up @@ -526,7 +526,7 @@ impl<'a> BlobEncoder<'a> {
.encode_all(column.symbols.data())
.expect("size has already been checked");
for (row_index, symbol) in symbols.to_symbols().enumerate() {
hash_col[row_index] = leaf_hash::<Blake2b256>(symbol);
hash_col[row_index] = leaf_hash_blake2b256(symbol);
}
});
});
Expand Down Expand Up @@ -776,7 +776,7 @@ impl<'a> ExpandedMessageMatrix<'a> {
// Column-major layout: symbol_hashes[col * n_shards + row].
for col in 0..n_shards {
for row in 0..n_shards {
symbol_hashes.push(leaf_hash::<Blake2b256>(&self.matrix[row][col]));
symbol_hashes.push(leaf_hash_blake2b256(&self.matrix[row][col]));
}
}

Expand Down
51 changes: 50 additions & 1 deletion crates/walrus-core/src/merkle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,43 @@
use alloc::{format, vec::Vec};
use core::{fmt::Debug, marker::PhantomData};

use blake2::Blake2b;
use digest::typenum::U32;
use fastcrypto::hash::{Blake2b256, Digest, HashFunction};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tracing::Level;

use crate::ensure;

type Blake2b256Inner = Blake2b<U32>;

static LEAF_HASHER: std::sync::LazyLock<Blake2b256Inner> = std::sync::LazyLock::new(|| {
let mut h = <Blake2b256Inner as digest::Digest>::new();
digest::Digest::update(&mut h, LEAF_PREFIX);
h
});

static INNER_HASHER: std::sync::LazyLock<Blake2b256Inner> = std::sync::LazyLock::new(|| {
let mut h = <Blake2b256Inner as digest::Digest>::new();
digest::Digest::update(&mut h, INNER_PREFIX);
h
});

/// Leaf hash using pre-initialized Blake2b state (avoids `new_with_params` per call).
pub(crate) fn leaf_hash_blake2b256(input: &[u8]) -> Node {
let mut h = LEAF_HASHER.clone();
digest::Digest::update(&mut h, input);
Node::Digest(digest::Digest::finalize(h).into())
}

fn inner_hash_blake2b256(left: &Node, right: &Node) -> Node {
let mut h = INNER_HASHER.clone();
digest::Digest::update(&mut h, left.bytes());
digest::Digest::update(&mut h, right.bytes());
Node::Digest(digest::Digest::finalize(h).into())
}

/// The length of the digests used in the merkle tree.
pub const DIGEST_LEN: usize = 32;

Expand Down Expand Up @@ -224,6 +254,14 @@ where

/// Create the [`MerkleTree`] as a commitment to the provided data hashes.
pub fn build_from_leaf_hashes<I>(iter: I) -> Self
where
I: IntoIterator,
I::IntoIter: ExactSizeIterator<Item = Node>,
{
Self::build_tree(iter, inner_hash::<T>)
}

fn build_tree<I>(iter: I, hash_fn: fn(&Node, &Node) -> Node) -> Self
where
I: IntoIterator,
I::IntoIter: ExactSizeIterator<Item = Node>,
Expand Down Expand Up @@ -252,7 +290,7 @@ where

(prev_level_index..new_level_index)
.step_by(2)
.for_each(|index| nodes.push(inner_hash::<T>(&nodes[index], &nodes[index + 1])));
.for_each(|index| nodes.push(hash_fn(&nodes[index], &nodes[index + 1])));

prev_level_index = new_level_index;
level_nodes /= 2;
Expand Down Expand Up @@ -309,6 +347,17 @@ where
}
}

impl MerkleTree<Blake2b256> {
/// Create the [`MerkleTree`] using pre-initialized Blake2b hashers for inner nodes.
pub(crate) fn build_from_leaf_hashes_fast<I>(iter: I) -> Self
where
I: IntoIterator,
I::IntoIter: ExactSizeIterator<Item = Node>,
{
Self::build_tree(iter, inner_hash_blake2b256)
}
}

/// Computes the hash of the provided input to be used as a leaf hash of a Merkle tree.
pub fn leaf_hash<T>(input: &[u8]) -> Node
where
Expand Down
Loading