Skip to content

Commit

Permalink
add partial decoding logic to be add to read raw blob
Browse files Browse the repository at this point in the history
  • Loading branch information
Ubuntu committed Dec 21, 2024
1 parent 0455f02 commit a5b38bc
Show file tree
Hide file tree
Showing 11 changed files with 584 additions and 80 deletions.
500 changes: 465 additions & 35 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,14 @@ cfg-if = "1.0.0"
reqwest = "0.12.9"
async-trait = "0.1.83"
linked_list_allocator = "0.10.5"
bytes = "1.9.0"

# General
sha2 = { version = "0.10.8", default-features = false }
c-kzg = { version = "2.0.0", default-features = false }
anyhow = { version = "1.0.93", default-features = false }
thiserror = { version = "2.0.4", default-features = false }
rust-kzg-bn254 = { version = "0.2.1", default-features = false }

# Tracing
tracing-loki = "0.2.5"
Expand Down
2 changes: 1 addition & 1 deletion bin/client/justfile
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ run-client-native-against-devnet verbosity='' block_number='' rollup_config_path
L1_BEACON_RPC="http://127.0.0.1:5052"
L2_RPC="http://127.0.0.1:9545"
ROLLUP_NODE_RPC="http://127.0.0.1:7545"
ROLLUP_CONFIG_PATH="../../../optimism/.devnet/rollup.json"
ROLLUP_CONFIG_PATH="~/op-main-repo/.devnet/rollup.json"
if [ -z "{{block_number}}" ]; then
BLOCK_NUMBER=$(cast block finalized --json --rpc-url $L2_RPC | jq -r .number | cast 2d)
Expand Down
2 changes: 1 addition & 1 deletion bin/host/src/eigenda_blobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ impl OnlineEigenDABlobProvider {
) -> Result<alloy_rlp::Bytes, reqwest::Error> {
let url = format!("{}/{}/{}", self.base, GET_METHOD, cert.slice(1..));

let raw_response = self.inner.get(url).send().await?;
let raw_response = self.inner.get(url).header("raw", "true").send().await?;

raw_response.bytes().await
}
Expand Down
44 changes: 26 additions & 18 deletions bin/host/src/eigenda_fetcher/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,29 +161,35 @@ where

// ToDo - remove it once cert is actually correct
kv_write_lock.set(
PreimageKey::new(*keccak256(cert), PreimageKeyType::GlobalGeneric).into(),
PreimageKey::new(*keccak256(cert.clone()), PreimageKeyType::GlobalGeneric).into(),
eigenda_blob.to_vec(),
)?;

// fake a commitment
let t1 = cert.clone();
let mut kzg_commitment = [0u8; 32];
let mut a = 32;
if a > t1.len() {
a = t1.len()
}
kzg_commitment[..a].copy_from_slice(t1.as_ref());
let blob_length = (eigenda_blob.len() + 32 - 1) / 32; // in term of field element
let kzg_proof = cert.clone();
// end of fake
// data
let item_slice = cert.as_ref();
let cert_blob_info = BlobInfo::decode(&mut &item_slice[4..]).unwrap();
info!("cert_blob_info {:?}", cert_blob_info);

// Write all the field elements to the key-value store.
// The preimage oracle key for each field element is the keccak256 hash of
// `abi.encodePacked(cert.KZGCommitment, uint256(i))`
let mut blob_key = [0u8; 80];
blob_key[..32].copy_from_slice(kzg_commitment.as_ref());

let mut blob_key = [0u8; 96];
blob_key[..32].copy_from_slice(cert_blob_info.blob_header.commitment.x.as_ref());
blob_key[32..64].copy_from_slice(cert_blob_info.blob_header.commitment.y.as_ref());

// Todo ensure data_length is always power of 2. Proxy made mistake
let data_size = cert_blob_info.blob_header.data_length as u64;
let blob_length: u64 = data_size / 32;

// proxy could just return the original blob
let mut padded_eigenda_blob = vec![0u8; data_size as usize];
padded_eigenda_blob[..eigenda_blob.len()].copy_from_slice(eigenda_blob.as_ref());

info!("cert_blob_info blob_length {:?}", blob_length);

for i in 0..blob_length {
blob_key[72..].copy_from_slice(i.to_be_bytes().as_ref());
blob_key[88..].copy_from_slice(i.to_be_bytes().as_ref());
let blob_key_hash = keccak256(blob_key.as_ref());

kv_write_lock.set(
Expand All @@ -192,22 +198,24 @@ where
)?;
kv_write_lock.set(
PreimageKey::new(*blob_key_hash, PreimageKeyType::GlobalGeneric).into(),
eigenda_blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(),
padded_eigenda_blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(),
)?;
}

// Write the KZG Proof as the last element.
blob_key[72..].copy_from_slice((blob_length).to_be_bytes().as_ref());
blob_key[88..].copy_from_slice((blob_length).to_be_bytes().as_ref());
let blob_key_hash = keccak256(blob_key.as_ref());

kv_write_lock.set(
PreimageKey::new(*blob_key_hash, PreimageKeyType::Keccak256).into(),
blob_key.into(),
)?;
// proof to be done
kv_write_lock.set(
PreimageKey::new(*blob_key_hash, PreimageKeyType::GlobalGeneric).into(),
kzg_proof.to_vec(),
[1,2,3].to_vec(),
)?;


} else {
panic!("Invalid hint type: {hint_type}. FetcherWithEigenDASupport.prefetch only supports EigenDACommitment hints.");
Expand Down
3 changes: 3 additions & 0 deletions crates/eigenda/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ alloy-primitives.workspace = true
alloy-rlp.workspace = true
tracing.workspace = true
async-trait.workspace = true
thiserror.workspace = true
bytes.workspace = true
rust-kzg-bn254.workspace = true

[features]
default = []
Expand Down
43 changes: 40 additions & 3 deletions crates/eigenda/src/eigenda.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,22 @@
//! Contains the [EigenDADataSource], which is a concrete implementation of the
//! [DataAvailabilityProvider] trait for the EigenDA protocol.
use crate::eigenda_blobs::EigenDABlobSource;
use crate::traits::EigenDABlobProvider;
use crate::errors::CodecError;

use alloc::{boxed::Box, fmt::Debug};
use alloy_primitives::Bytes;
use bytes::buf::Buf;
use async_trait::async_trait;
use kona_derive::{
sources::EthereumDataSource,
traits::{BlobProvider, ChainProvider, DataAvailabilityProvider},
types::PipelineResult,
errors::{PipelineErrorKind, PipelineError, PipelineEncodingError},
};
use op_alloy_protocol::BlockInfo;
use rust_kzg_bn254::helpers::remove_empty_byte_from_padded_bytes_unchecked;

/// A factory for creating an Ethereum data source provider.
#[derive(Debug, Clone)]
Expand Down Expand Up @@ -61,13 +66,45 @@ where
// just dump all the data out
info!(target: "eth-datasource", "next item {:?}", item);

let eigenda_source_result = self.eigenda_source.next(&item).await;
info!(target: "eigenda-datasource", "eigenda_source_result {:?}", eigenda_source_result);
eigenda_source_result
let padded_eigenda_blob = self.eigenda_source.next(&item).await?;
info!(target: "eigenda-datasource", "eigenda_source_result {:?}", padded_eigenda_blob);

// get the actual blob as encoded inside blob
let eigenda_blob = self.default_decode_blob(padded_eigenda_blob)?;

Ok(eigenda_blob)
}

fn clear(&mut self) {
self.eigenda_source.clear();
self.ethereum_source.clear();
}
}

impl<C, B, A> EigenDADataSource<C, B, A>
where
C: ChainProvider + Send + Sync + Clone + Debug,
B: BlobProvider + Send + Sync + Clone + Debug,
A: EigenDABlobProvider + Send + Sync + Clone + Debug,
{
// https://github.com/Layr-Labs/eigenda/blob/1345e77c8a91fed8e5e33f02c3e32c9ed9921670/api/clients/codecs/default_blob_codec.go#L44
fn default_decode_blob(&self, padded_eigenda_blob: Bytes) -> PipelineResult<Bytes> {
if padded_eigenda_blob.len() < 32 {
// ToDo format error better
//return Err(PipelineErrorKind::Temporary(PipelineError::BadEncoding(PipelineEncodingError::SpanBatchError(()))));
unimplemented!()
}

info!(target: "eigenda-datasource", "padded_eigenda_blob {:?}", padded_eigenda_blob);

let content_size = padded_eigenda_blob.slice(2..6).get_u32();
info!(target: "eigenda-datasource", "content_size {:?}", content_size);
let codec_data = padded_eigenda_blob.slice(32..);

let blob_content = remove_empty_byte_from_padded_bytes_unchecked(codec_data.as_ref());
let blob_content: Bytes = blob_content.into();

Ok(blob_content.slice(..content_size as usize))
}

}
7 changes: 7 additions & 0 deletions crates/eigenda/src/errors.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
use thiserror::Error;

#[derive(Error, Debug)]
pub enum CodecError {
#[error("blob does not contain 32 header bytes, meaning it is malformed")]
BlobTooShort,
}
3 changes: 3 additions & 0 deletions crates/eigenda/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,6 @@ pub use eigenda_data::EigenDABlobData;

mod certificate;
pub use certificate::BlobInfo;

mod errors;
pub use errors::CodecError;
4 changes: 4 additions & 0 deletions crates/proof/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@ alloy-primitives.workspace = true
op-alloy-protocol.workspace = true
op-alloy-rpc-types-engine.workspace = true
op-alloy-genesis = { workspace = true, features = ["serde"] }
alloy-rlp.workspace = true


tracing.workspace = true

# General
async-trait.workspace = true
54 changes: 32 additions & 22 deletions crates/proof/src/eigenda_provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@ use alloc::boxed::Box;
use alloc::sync::Arc;
use alloy_primitives::{keccak256, Bytes};
use async_trait::async_trait;
use hokulea_eigenda::EigenDABlobProvider;
use hokulea_eigenda::{EigenDABlobProvider, BlobInfo};
use kona_preimage::{CommsClient, PreimageKey, PreimageKeyType};

use kona_proof::errors::OracleProviderError;

use crate::hint::ExtendedHintType;
use alloy_rlp::Decodable;
use tracing::info;

/// The oracle-backed EigenDA provider for the client program.
#[derive(Debug, Clone)]
Expand All @@ -33,43 +35,51 @@ impl<T: CommsClient + Sync + Send> EigenDABlobProvider for OracleEigenDAProvider
.await
.map_err(OracleProviderError::Preimage)?;

// the fourth because 0x01010000 in the beginnin is metadata
let item_slice = cert.as_ref();
let cert_blob_info = BlobInfo::decode(&mut &item_slice[4..]).unwrap();
info!("cert_blob_info {:?}", cert_blob_info);

// hack - remove later, when cert actually contain length
let data = self
.oracle
.get(PreimageKey::new(
*keccak256(cert),
PreimageKeyType::GlobalGeneric,
))
.await
.map_err(OracleProviderError::Preimage)?;
let blob_size = data.len();
//

let mut blob = vec![0, blob_size];
let mut field_element_key = [0u8; 80];
.oracle
.get(PreimageKey::new(
*keccak256(cert),
PreimageKeyType::GlobalGeneric,
))
.await
.map_err(OracleProviderError::Preimage)?;


let mut blob: Vec<u8> = vec![0; cert_blob_info.blob_header.data_length as usize];

// 96 because our g1 commitment has 64 bytes in v1
let mut field_element_key = [0u8; 96];

field_element_key[..48].copy_from_slice(commitment.as_ref());
for i in 0..FIELD_ELEMENTS_PER_BLOB {
field_element_key[72..].copy_from_slice(i.to_be_bytes().as_ref());
// ToDo data_length should be power of 2, proxy should have returned it with dividing 32
let data_length = cert_blob_info.blob_header.data_length as u64 / 32;

info!("cert_blob_info.blob_header.data_length {:?}", data_length);

field_element_key[..32].copy_from_slice(&cert_blob_info.blob_header.commitment.x);
field_element_key[32..64].copy_from_slice(&cert_blob_info.blob_header.commitment.y);
for i in 0..data_length {
field_element_key[88..].copy_from_slice(i.to_be_bytes().as_ref());

let mut field_element = [0u8; 32];
self.oracle
.get_exact(
PreimageKey::new(*keccak256(field_element_key), PreimageKeyType::Blob),
PreimageKey::new(*keccak256(field_element_key), PreimageKeyType::GlobalGeneric),
&mut field_element,
)
.await
.map_err(OracleProviderError::Preimage)?;
blob[(i as usize) << 5..(i as usize + 1) << 5].copy_from_slice(field_element.as_ref());
}

info!("cert_blob_info blob {:?}", blob);

tracing::info!(target: "client_oracle", "Retrieved blob {blob_hash:?} from the oracle.");



Ok(data.into())
Ok(blob.into())
}

async fn get_element(&mut self, cert: &Bytes, element: &Bytes) -> Result<Bytes, Self::Error> {
Expand Down

0 comments on commit a5b38bc

Please sign in to comment.