Skip to content

Commit

Permalink
integration test
Browse files Browse the repository at this point in the history
  • Loading branch information
distractedm1nd committed Sep 18, 2024
1 parent 21116d4 commit 767a07e
Show file tree
Hide file tree
Showing 6 changed files with 163 additions and 144 deletions.
5 changes: 5 additions & 0 deletions crates/prism/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,3 +56,8 @@ sp1-sdk = { workspace = true }
[dev-dependencies]
serial_test = "3.1.1"
criterion = "0.5.1"

[[test]]
name = "integration_tests"
path = "tests/integration_tests.rs"
harness = true
2 changes: 1 addition & 1 deletion crates/prism/src/cfg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@ use crate::{
consts::{DA_RETRY_COUNT, DA_RETRY_INTERVAL},
da::memory::InMemoryDataAvailabilityLayer,
};
use prism_errors::{DataAvailabilityError, GeneralError, PrismError};
use anyhow::{anyhow, Context, Result};
use clap::{Parser, Subcommand};
use config::{builder::DefaultState, ConfigBuilder, File};
use dirs::home_dir;
use dotenvy::dotenv;
use prism_errors::{DataAvailabilityError, GeneralError, PrismError};
use serde::{Deserialize, Serialize};
use std::{fs, path::Path, sync::Arc};

Expand Down
29 changes: 14 additions & 15 deletions crates/prism/src/node_types/sequencer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,12 @@ use ed25519_dalek::{Signer, SigningKey};
use jmt::KeyHash;
use prism_common::tree::{hash, Batch, Digest, Hasher, KeyDirectoryTree, Proof, SnarkableTree};
use std::{self, collections::VecDeque, str::FromStr, sync::Arc};
use tokio::{
sync::{
broadcast,
mpsc::{channel, Receiver, Sender},
Mutex,
},
task::spawn,
time::interval,
};
use tokio::sync::{broadcast, Mutex};

use sp1_sdk::{ProverClient, SP1ProvingKey, SP1Stdin, SP1VerifyingKey};

#[cfg(test)]
use prism_errors::DataAvailabilityError;

use crate::{
cfg::Config,
consts::{CHANNEL_BUFFER_SIZE, DA_RETRY_COUNT, DA_RETRY_INTERVAL},
da::{DataAvailabilityLayer, FinalizedEpoch},
node_types::NodeType,
storage::Database,
Expand Down Expand Up @@ -118,7 +106,14 @@ impl Sequencer {
}

async fn sync_range(&self, start_height: u64, end_height: u64) -> Result<()> {
let saved_epoch = self.db.get_epoch()?;
let saved_epoch = match self.db.get_epoch() {
Ok(epoch) => epoch,
Err(_) => {
debug!("no existing epoch state found, setting epoch to 0");
self.db.set_epoch(&0)?;
0
}
};
let mut current_epoch: u64 = 0;
let mut buffered_operations: VecDeque<Vec<Operation>> = VecDeque::new();
let mut current_height = start_height;
Expand Down Expand Up @@ -311,13 +306,17 @@ impl Sequencer {
let proofs = self.execute_block(operations, tree).await?;

let new_commitment = tree.get_commitment()?;
self.db.set_commitment(&height, &new_commitment);

let finalized_epoch = self
.prove_epoch(height, prev_commitment, new_commitment, proofs)
.await?;

self.da.submit_snark(finalized_epoch).await?;

self.db.set_commitment(&height, &new_commitment)?;
self.db
.set_epoch(&height)
.context("Failed to set new epoch")?;
Ok(())
}

Expand Down
2 changes: 1 addition & 1 deletion crates/prism/src/webserver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ use crate::{
node_types::sequencer::Sequencer,
utils::{verify_signature, SignedContent},
};
use prism_errors::GeneralError;
use anyhow::{Context, Result};
use axum::{
extract::State,
Expand All @@ -18,6 +17,7 @@ use indexed_merkle_tree::{
Hash as TreeHash,
};
use prism_common::{hashchain::Hashchain, operation::Operation};
use prism_errors::GeneralError;
use serde::{Deserialize, Serialize};
use std::{self, str::FromStr, sync::Arc};
use tower_http::cors::CorsLayer;
Expand Down
142 changes: 142 additions & 0 deletions crates/prism/tests/integration_tests.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
#![cfg(test)]

use anyhow::Result;
use ed25519_dalek::{Signer, SigningKey};
use keystore_rs::create_signing_key;
use prism_common::operation::{AccountSource, Operation};
use prism_main::{
cfg::{Config, RedisConfig},
da::{memory::InMemoryDataAvailabilityLayer, DataAvailabilityLayer},
node_types::{lightclient::LightClient, sequencer::Sequencer, NodeType},
storage::{Database, RedisConnection},
webserver::OperationInput,
};
use rand::{rngs::StdRng, Rng, SeedableRng};
use serde_json;
use std::sync::Arc;
use tokio::{spawn, time::Duration};

// Assuming 'engine' is a global or comes from a crate import
use base64::{engine::general_purpose::STANDARD as engine, Engine as _};

fn create_new_account_operation(id: String, value: String, key: &SigningKey) -> OperationInput {
let incoming = Operation::CreateAccount {
id: id.clone(),
value: value.clone(),
source: AccountSource::SignedBySequencer {
signature: key.sign(format!("{}{}", id, value).as_bytes()).to_string(),
},
};
let content = serde_json::to_string(&incoming).unwrap();
let sig = key.sign(content.as_bytes());
OperationInput {
operation: incoming,
signed_operation: sig.to_string(),
public_key: engine.encode(key.verifying_key().to_bytes()),
}
}

fn create_update_operation(id: String, value: String) -> OperationInput {
let key = create_signing_key();
let incoming = Operation::Add { id, value };
let content = serde_json::to_string(&incoming).unwrap();
let sig = key.sign(content.as_bytes());
OperationInput {
operation: incoming,
signed_operation: sig.to_string(),
public_key: engine.encode(key.verifying_key().to_bytes()),
}
}

fn setup_db() -> Arc<Box<dyn Database>> {
let redis_connection = RedisConnection::new(&RedisConfig::default()).unwrap();
Arc::new(Box::new(redis_connection) as Box<dyn Database>)
}

#[tokio::test]
async fn test_light_client_sequencer_talking() -> Result<()> {
std::env::set_var("RUST_LOG", "DEBUG");
pretty_env_logger::init();

let (da_layer, mut height_rx, mut _block_rx) = InMemoryDataAvailabilityLayer::new(10);
let da_layer = Arc::new(da_layer);
let db = setup_db();
let cfg = Config::default();
let signing_key = create_signing_key();
let pubkey = engine.encode(signing_key.verifying_key().to_bytes());

let sequencer = Arc::new(Sequencer::new(
db.clone(),
da_layer.clone(),
cfg.clone(),
signing_key.clone(),
)?);

let lightclient = Arc::new(LightClient::new(
da_layer,
cfg.celestia_config.unwrap(),
Some(pubkey),
));

let seq_clone = sequencer.clone();
spawn(async move {
seq_clone.start().await.unwrap();
});

let lc_clone = lightclient.clone();
spawn(async move {
lc_clone.start().await.unwrap();
});

spawn(async move {
let mut rng = StdRng::from_entropy();
let mut accounts = Vec::new();
let mut i = 0;

loop {
// Create 1 to 10 new accounts
let num_new_accounts = rng.gen_range(1..=10);
for _ in 0..num_new_accounts {
let new_acc = create_new_account_operation(
format!("{}@gmail.com", i),
format!("key_{}", i),
&signing_key,
);
sequencer
.clone()
.validate_and_queue_update(&new_acc)
.await
.unwrap();
accounts.push(format!("{}@gmail.com", i));
i += 1;
}

// Update 5 random existing accounts (if we have at least 5)
if accounts.len() >= 5 {
for _ in 0..5 {
let account_index = rng.gen_range(0..accounts.len());
let account_id = accounts[account_index].clone();
let update_op = create_update_operation(
account_id,
format!("updated_key_{}", rng.gen::<u32>()),
);
sequencer
.clone()
.validate_and_queue_update(&update_op)
.await
.unwrap();
}
}

tokio::time::sleep(Duration::from_millis(500)).await;
}
});

while let Ok(height) = height_rx.recv().await {
if height == 60 {
break;
}
}

Ok(())
}
127 changes: 0 additions & 127 deletions tests/integration_tests.rs

This file was deleted.

0 comments on commit 767a07e

Please sign in to comment.