Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(eigen-client-extra-features): blob size limit #325

Merged
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions core/node/da_clients/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,3 +57,5 @@ pbjson-types.workspace = true
tokio-stream.workspace = true
rlp.workspace = true
kzgpad-rs = { git = "https://github.com/Layr-Labs/kzgpad-rs.git", tag = "v0.1.0" }
rand.workspace = true
sha3.workspace = true
119 changes: 91 additions & 28 deletions core/node/da_clients/src/eigen/client.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::{str::FromStr, sync::Arc};

use anyhow::anyhow;
use async_trait::async_trait;
use secp256k1::SecretKey;
use subxt_signer::ExposeSecret;
Expand All @@ -9,37 +10,27 @@ use zksync_da_client::{
DataAvailabilityClient,
};

use super::sdk::RawEigenClient;
use super::{memstore::MemStore, sdk::RawEigenClient, Disperser};
use crate::utils::to_non_retriable_da_error;

#[derive(Debug, Clone)]
pub struct EigenClient {
client: Arc<RawEigenClient>,
client: Disperser,
}

impl EigenClient {
pub async fn new(config: EigenConfig, secrets: EigenSecrets) -> anyhow::Result<Self> {
let private_key = SecretKey::from_str(secrets.private_key.0.expose_secret().as_str())
.map_err(|e| anyhow::anyhow!("Failed to parse private key: {}", e))?;

match config {
let disperser: Disperser = match config.clone() {
EigenConfig::Disperser(config) => {
// TODO: add complete config
let client = RawEigenClient::new(
config.disperser_rpc,
config.status_query_interval,
private_key,
config.authenticaded,
)
.await?;
Ok(EigenClient {
client: Arc::new(client),
})
let client = RawEigenClient::new(private_key, config).await?;
Disperser::Remote(Arc::new(client))
}
EigenConfig::MemStore(_) => {
todo!()
}
}
EigenConfig::MemStore(config) => Disperser::Memory(MemStore::new(config)),
};
Ok(Self { client: disperser })
}
}

Expand All @@ -50,11 +41,26 @@ impl DataAvailabilityClient for EigenClient {
_: u32, // batch number
data: Vec<u8>,
) -> Result<DispatchResponse, DAError> {
let blob_id = self
.client
.dispatch_blob(data)
.await
.map_err(to_non_retriable_da_error)?;
if let Some(blob_size_limit) = self.blob_size_limit() {
if data.len() > blob_size_limit {
return Err(DAError {
error: anyhow!("Blob size limit exceeded"),
is_retriable: false,
});
}
}

let blob_id = match &self.client {
Disperser::Remote(remote_disperser) => remote_disperser
.dispatch_blob(data)
.await
.map_err(to_non_retriable_da_error)?,
Disperser::Memory(memstore) => memstore
.clone()
.put_blob(data)
.await
.map_err(to_non_retriable_da_error)?,
};

Ok(DispatchResponse::from(blob_id))
}
Expand All @@ -68,27 +74,32 @@ impl DataAvailabilityClient for EigenClient {
}

fn blob_size_limit(&self) -> Option<usize> {
Some(1920 * 1024) // 2mb - 128kb as a buffer
match self.client.clone() {
Disperser::Memory(mem_store) => Some(mem_store.config.max_blob_size_bytes as usize),
Disperser::Remote(raw_eigen_client) => {
Some(raw_eigen_client.config.blob_size_limit as usize)
}
}
}
}

#[cfg(test)]
impl EigenClient {
pub async fn get_blob_data(&self, blob_id: &str) -> anyhow::Result<Option<Vec<u8>>, DAError> {
self.client.get_blob_data(blob_id).await
/*match &self.disperser {
match &self.client {
Disperser::Remote(remote_client) => remote_client.get_blob_data(blob_id).await,
Disperser::Memory(memstore) => memstore.clone().get_blob_data(blob_id).await,
}*/
}
}
}
#[cfg(test)]
mod tests {
use zksync_config::configs::da_client::eigen::DisperserConfig;
use zksync_config::configs::da_client::eigen::{DisperserConfig, MemStoreConfig};
use zksync_types::secrets::PrivateKey;

use super::*;
use crate::eigen::blob_info::BlobInfo;

#[tokio::test]
async fn test_non_auth_dispersal() {
let config = EigenConfig::Disperser(DisperserConfig {
Expand Down Expand Up @@ -147,4 +158,56 @@ mod tests {
let retrieved_data = client.get_blob_data(&result.blob_id).await.unwrap();
assert_eq!(retrieved_data.unwrap(), data);
}

#[tokio::test]
async fn test_eigenda_memory_disperser() {
let config = EigenConfig::MemStore(MemStoreConfig {
max_blob_size_bytes: 2 * 1024 * 1024, // 2MB,
blob_expiration: 60 * 2,
get_latency: 0,
put_latency: 0,
});
let secrets = EigenSecrets {
private_key: PrivateKey::from_str(
"d08aa7ae1bb5ddd46c3c2d8cdb5894ab9f54dec467233686ca42629e826ac4c6",
)
.unwrap(),
};
let client = EigenClient::new(config, secrets).await.unwrap();
let data = vec![1u8; 100];
let result = client.dispatch_blob(0, data.clone()).await.unwrap();

let blob_info: BlobInfo =
rlp::decode(&hex::decode(result.blob_id.clone()).unwrap()).unwrap();
// TODO: once get inclusion data is added, check it

let retrieved_data = client.get_blob_data(&result.blob_id).await.unwrap();
assert_eq!(retrieved_data.unwrap(), data);
}

#[tokio::test]
async fn test_eigenda_dispatch_blob_too_large() {
let config = EigenConfig::MemStore(MemStoreConfig {
max_blob_size_bytes: 99,
blob_expiration: 60 * 2,
get_latency: 0,
put_latency: 0,
});
let secrets = EigenSecrets {
private_key: PrivateKey::from_str(
"d08aa7ae1bb5ddd46c3c2d8cdb5894ab9f54dec467233686ca42629e826ac4c6",
)
.unwrap(),
};
let client = EigenClient::new(config, secrets).await.unwrap();
let data = vec![1u8; 100];
let actual_error = client
.dispatch_blob(0, data.clone())
.await
.err()
.unwrap()
.error;
let expected_error = anyhow!("Blob size limit exceeded");
assert_eq!(format!("{}", actual_error), format!("{}", expected_error));
}
}
Loading
Loading