Skip to content

Commit

Permalink
rebased
Browse files Browse the repository at this point in the history
  • Loading branch information
distractedm1nd committed Sep 22, 2024
1 parent 3793995 commit 4e2521a
Show file tree
Hide file tree
Showing 4 changed files with 146 additions and 35 deletions.
24 changes: 23 additions & 1 deletion crates/prism/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,16 @@ mod webserver;

use cfg::{initialize_da_layer, load_config};
use clap::Parser;
use ed25519_dalek::VerifyingKey;
use keystore_rs::{KeyChain, KeyStore, KeyStoreType};

use crate::cfg::{CommandLineArgs, Commands};
use node_types::{lightclient::LightClient, sequencer::Sequencer, NodeType};
use std::sync::Arc;
use storage::RedisConnection;

use base64::{engine::general_purpose::STANDARD as engine, Engine as _};

#[macro_use]
extern crate log;

Expand All @@ -39,7 +42,26 @@ async fn main() -> std::io::Result<()> {
"celestia configuration not found",
)
})?;
Arc::new(LightClient::new(da, celestia_config, config.verifying_key))

let sequencer_pubkey = config.verifying_key.and_then(|s| {
engine
.decode(&s)
.map_err(|e| error!("Failed to decode base64 string: {}", e))
.ok()
.and_then(|bytes| {
bytes
.try_into()
.map_err(|e| error!("Failed to convert bytes into [u8; 32]: {:?}", e))
.ok()
})
.and_then(|array| {
VerifyingKey::from_bytes(&array)
.map_err(|e| error!("Failed to create VerifyingKey: {}", e))
.ok()
})
});

Arc::new(LightClient::new(da, celestia_config, sequencer_pubkey))
}
Commands::Sequencer {} => {
let redis_config = config.clone().redis_config.ok_or_else(|| {
Expand Down
7 changes: 1 addition & 6 deletions crates/prism/src/node_types/lightclient.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,16 +41,11 @@ impl LightClient {
pub fn new(
da: Arc<dyn DataAvailabilityLayer>,
cfg: CelestiaConfig,
sequencer_pubkey: Option<String>,
sequencer_pubkey: Option<VerifyingKey>,
) -> LightClient {
let client = ProverClient::new();
let (_, verifying_key) = client.setup(PRISM_ELF);

let sequencer_pubkey = sequencer_pubkey.map(|s| {
// TODO: Graceful error handling
VerifyingKey::from_bytes(&hex::decode(s).unwrap().try_into().unwrap()).unwrap()
});

LightClient {
da,
verifying_key,
Expand Down
2 changes: 1 addition & 1 deletion crates/prism/src/node_types/sequencer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use prism_common::{
hash, Batch, Digest, Hasher, KeyDirectoryTree, NonMembershipProof, Proof, SnarkableTree,
},
};
use prism_errors::{DataAvailabilityError, DatabaseError, GeneralError};
use prism_errors::DataAvailabilityError;

pub const PRISM_ELF: &[u8] = include_bytes!("../../../../elf/riscv32im-succinct-zkvm-elf");

Expand Down
148 changes: 121 additions & 27 deletions crates/prism/tests/integration_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,77 @@
extern crate log;

use anyhow::Result;
use ed25519_dalek::{Signer, SigningKey};
use ed25519_dalek::SigningKey;
use keystore_rs::create_signing_key;
use prism_common::operation::{AccountSource, Operation};
use prism_common::operation::{
CreateAccountArgs, KeyOperationArgs, Operation, PublicKey, ServiceChallengeInput,
SignatureBundle,
};
use prism_main::{
cfg::{CelestiaConfig, Config, RedisConfig},
da::{celestia::CelestiaConnection, DataAvailabilityLayer},
node_types::{lightclient::LightClient, sequencer::Sequencer, NodeType},
storage::{Database, RedisConnection},
webserver::OperationInput,
};
use rand::{rngs::StdRng, Rng, SeedableRng};
use std::sync::Arc;
use std::{collections::HashMap, sync::Arc};
use tokio::{spawn, time::Duration};

use base64::{engine::general_purpose::STANDARD as engine, Engine as _};

fn create_random_user(id: &str, signing_key: SigningKey) -> Operation {
let mut op = Operation::CreateAccount(CreateAccountArgs {
id: id.to_string(),
value: PublicKey::Ed25519(signing_key.verifying_key().to_bytes().to_vec()),
service_id: "test_service".to_string(),
signature: Vec::new(),
challenge: ServiceChallengeInput::Signed(vec![]),
});

op.insert_signature(&signing_key)
.expect("Inserting signature into operation should succeed");
op
}

fn add_key(id: &str, key_idx: u64, new_key: PublicKey, signing_key: SigningKey) -> Operation {
let mut op = Operation::AddKey(KeyOperationArgs {
id: id.to_string(),
value: new_key.clone(),
signature: SignatureBundle {
key_idx,
signature: Vec::new(),
},
});

op.insert_signature(&signing_key)
.expect("Inserting signature into operation should succeed");
op
}

fn revoke_key(
id: &str,
key_idx: u64,
key_to_revoke: PublicKey,
signing_key: SigningKey,
) -> Operation {
let mut op = Operation::RevokeKey(KeyOperationArgs {
id: id.to_string(),
value: key_to_revoke.clone(),
signature: SignatureBundle {
key_idx,
signature: Vec::new(),
},
});
op.insert_signature(&signing_key)
.expect("Inserting signature into operation should succeed");
op
}

fn setup_db() -> Arc<Box<dyn Database>> {
let redis_connection = RedisConnection::new(&RedisConfig::default()).unwrap();
Arc::new(Box::new(redis_connection) as Box<dyn Database>)
}

#[tokio::test]
async fn test_light_client_sequencer_talking() -> Result<()> {
std::env::set_var(
Expand All @@ -40,7 +97,7 @@ async fn test_light_client_sequencer_talking() -> Result<()> {
let db = setup_db();
let cfg = Config::default();
let signing_key = create_signing_key();
let pubkey = engine.encode(signing_key.verifying_key().to_bytes());
let pubkey = signing_key.verifying_key();

let sequencer = Arc::new(Sequencer::new(
db.clone(),
Expand All @@ -67,29 +124,66 @@ async fn test_light_client_sequencer_talking() -> Result<()> {
lc_clone.start().await.unwrap();
});

loop {
// Create 1 to 10 new accounts
let num_new_accounts = rng.gen_range(1..=3);
for _ in 0..num_new_accounts {
let new_acc = create_new_account_operation(
format!("{}@gmail.com", i),
format!("key_{}", i),
&signing_key,
);
sequencer
.clone()
.validate_and_queue_update(&new_acc)
.await
.unwrap();
accounts.push(format!("{}@gmail.com", i));
i += 1;
}
let mut rx = lc_da_layer.clone().subscribe_to_heights();
while let Ok(height) = rx.recv().await {
debug!("received height {}", height);
if height == 100 {
break;
spawn(async move {
let mut rng = StdRng::from_entropy();
let mut accounts: HashMap<String, Vec<SigningKey>> = HashMap::new();
let mut i = 0;

loop {
// Create 1 to 3 new accounts
let num_new_accounts = rng.gen_range(1..=3);
for _ in 0..num_new_accounts {
let new_key = create_signing_key();
let new_acc =
create_random_user(format!("{}@gmail.com", i).as_str(), new_key.clone());
sequencer
.clone()
.validate_and_queue_update(&new_acc)
.await
.unwrap();
accounts
.insert(format!("{}@gmail.com", i), vec![new_key])
.unwrap();
i += 1;
}

// Update 5 random existing accounts (if we have at least 5)
if accounts.len() >= 5 {
for _ in 0..5 {
let account_id = accounts
.keys()
.nth(rng.gen_range(0..accounts.len()))
.unwrap();
let signing_keys = accounts.get(account_id).unwrap();
let signing_key = signing_keys.last().unwrap();
let new_key = create_signing_key();
let new_public_key =
PublicKey::Ed25519(new_key.verifying_key().to_bytes().to_vec());
let update_op = add_key(
account_id,
(signing_keys.len() - 1) as u64,
new_public_key,
signing_key.clone(),
);
sequencer
.clone()
.validate_and_queue_update(&update_op)
.await
.unwrap();
}
}

tokio::time::sleep(Duration::from_millis(5000)).await;
}
});

let mut rx = lc_da_layer.clone().subscribe_to_heights();
while let Ok(height) = rx.recv().await {
debug!("received height {}", height);
if height == 100 {
break;
}
}

Ok(())
}

0 comments on commit 4e2521a

Please sign in to comment.