From 30edda404193938fbd55815bed164b5321d7c642 Mon Sep 17 00:00:00 2001 From: Artem Fomiuk <88630083+Artemka374@users.noreply.github.com> Date: Thu, 22 Aug 2024 17:47:12 +0300 Subject: [PATCH] feat: Provide easy prover setup (#2683) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## What ❔ Allow to run `zk_inception prover init` without `chain init` Add docs for running provers and proving the batch. ## Why ❔ To provide easy way to spin up prover subsystem locally. ## Checklist - [ ] PR title corresponds to the body of PR (we generate changelog entries from PRs). - [ ] Tests for the changes have been added / updated. - [ ] Documentation comments have been added / updated. - [ ] Code has been formatted via `zk fmt` and `zk lint`. --- etc/env/file_based/general.yaml | 2 +- .../proof_fri_compressor/src/compressor.rs | 9 +- .../bin/proof_fri_compressor/src/main.rs | 5 + prover/crates/bin/prover_cli/src/cli.rs | 7 +- .../prover_cli/src/commands/insert_batch.rs | 43 ++++++ .../prover_cli/src/commands/insert_version.rs | 52 +++++++ .../crates/bin/prover_cli/src/commands/mod.rs | 2 + .../src/gpu_prover_job_processor.rs | 5 +- .../prover_fri/src/prover_job_processor.rs | 5 +- prover/crates/bin/witness_generator/README.md | 73 --------- .../witness_generator/src/leaf_aggregation.rs | 16 +- .../crates/bin/witness_generator/src/main.rs | 14 +- .../witness_generator/src/node_aggregation.rs | 8 +- .../witness_generator/src/recursion_tip.rs | 7 +- .../bin/witness_generator/src/scheduler.rs | 17 +- .../bin/witness_generator/tests/basic_test.rs | 20 ++- prover/docs/05_proving_batch.md | 145 ++++++++++++++++++ zk_toolbox/Cargo.lock | 1 + zk_toolbox/crates/config/src/secrets.rs | 12 ++ zk_toolbox/crates/zk_inception/README.md | 9 +- .../src/commands/prover/args/init.rs | 101 +++++++++++- .../src/commands/prover/args/run.rs | 2 + .../src/commands/prover/generate_sk.rs | 6 +- .../zk_inception/src/commands/prover/init.rs | 78 ++++++++-- .../zk_inception/src/commands/prover/run.rs | 18 ++- .../crates/zk_inception/src/messages.rs | 1 + zk_toolbox/crates/zk_supervisor/Cargo.toml | 1 + zk_toolbox/crates/zk_supervisor/README.md | 7 + .../crates/zk_supervisor/src/commands/mod.rs | 1 + .../src/commands/prover_version.rs | 41 +++++ zk_toolbox/crates/zk_supervisor/src/main.rs | 9 +- .../crates/zk_supervisor/src/messages.rs | 1 + 32 files changed, 589 insertions(+), 129 deletions(-) create mode 100644 prover/crates/bin/prover_cli/src/commands/insert_batch.rs create mode 100644 prover/crates/bin/prover_cli/src/commands/insert_version.rs create mode 100644 prover/docs/05_proving_batch.md create mode 100644 zk_toolbox/crates/zk_supervisor/src/commands/prover_version.rs diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 8e7e6eca428..19921cf536c 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -140,7 +140,7 @@ prover: file_backed: file_backed_base_path: artifacts max_retries: 10 - setup_data_path: vk_setup_data_generator_server_fri/data + setup_data_path: crates/bin/vk_setup_data_generator_server_fri/data prometheus_port: 3315 max_attempts: 10 generation_timeout_in_secs: 600 diff --git a/prover/crates/bin/proof_fri_compressor/src/compressor.rs b/prover/crates/bin/proof_fri_compressor/src/compressor.rs index 34a2c965a31..067114ca5a6 100644 --- a/prover/crates/bin/proof_fri_compressor/src/compressor.rs +++ b/prover/crates/bin/proof_fri_compressor/src/compressor.rs @@ -35,6 +35,7 @@ pub struct ProofCompressor { compression_mode: u8, max_attempts: u32, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, } impl ProofCompressor { @@ -44,6 +45,7 @@ impl ProofCompressor { compression_mode: u8, max_attempts: u32, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, ) -> Self { Self { blob_store, @@ -51,6 +53,7 @@ impl ProofCompressor { compression_mode, max_attempts, protocol_version, + setup_data_path, } } @@ -59,8 +62,9 @@ impl ProofCompressor { l1_batch: L1BatchNumber, proof: ZkSyncRecursionLayerProof, _compression_mode: u8, + setup_data_path: String, ) -> anyhow::Result { - let keystore = Keystore::default(); + let keystore = Keystore::new_with_setup_data_path(setup_data_path); let scheduler_vk = keystore .load_recursive_layer_verification_key( ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8, @@ -174,8 +178,9 @@ impl JobProcessor for ProofCompressor { ) -> JoinHandle> { let compression_mode = self.compression_mode; let block_number = *job_id; + let setup_data_path = self.setup_data_path.clone(); tokio::task::spawn_blocking(move || { - Self::compress_proof(block_number, job, compression_mode) + Self::compress_proof(block_number, job, compression_mode, setup_data_path) }) } diff --git a/prover/crates/bin/proof_fri_compressor/src/main.rs b/prover/crates/bin/proof_fri_compressor/src/main.rs index a1a8ac90253..e2086b228b6 100644 --- a/prover/crates/bin/proof_fri_compressor/src/main.rs +++ b/prover/crates/bin/proof_fri_compressor/src/main.rs @@ -59,6 +59,7 @@ async fn main() -> anyhow::Result<()> { let object_store_config = ProverObjectStoreConfig( general_config .prover_config + .clone() .expect("ProverConfig") .prover_object_store .context("ProverObjectStoreConfig")?, @@ -75,6 +76,10 @@ async fn main() -> anyhow::Result<()> { config.compression_mode, config.max_attempts, protocol_version, + general_config + .prover_config + .expect("ProverConfig doesn't exist") + .setup_data_path, ); let (stop_sender, stop_receiver) = watch::channel(false); diff --git a/prover/crates/bin/prover_cli/src/cli.rs b/prover/crates/bin/prover_cli/src/cli.rs index 0c7022cae29..41ef9498005 100644 --- a/prover/crates/bin/prover_cli/src/cli.rs +++ b/prover/crates/bin/prover_cli/src/cli.rs @@ -2,7 +2,8 @@ use clap::{command, Args, Parser, Subcommand}; use zksync_types::url::SensitiveUrl; use crate::commands::{ - config, debug_proof, delete, get_file_info, requeue, restart, stats, status::StatusCommand, + config, debug_proof, delete, get_file_info, insert_batch, insert_version, requeue, restart, + stats, status::StatusCommand, }; pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION"); @@ -27,6 +28,8 @@ impl ProverCLI { ProverCommand::Restart(args) => restart::run(args).await?, ProverCommand::DebugProof(args) => debug_proof::run(args).await?, ProverCommand::Stats(args) => stats::run(args, self.config).await?, + ProverCommand::InsertVersion(args) => insert_version::run(args, self.config).await?, + ProverCommand::InsertBatch(args) => insert_batch::run(args, self.config).await?, }; Ok(()) } @@ -55,4 +58,6 @@ pub enum ProverCommand { Restart(restart::Args), #[command(about = "Displays L1 Batch proving stats for a given period")] Stats(stats::Options), + InsertVersion(insert_version::Args), + InsertBatch(insert_batch::Args), } diff --git a/prover/crates/bin/prover_cli/src/commands/insert_batch.rs b/prover/crates/bin/prover_cli/src/commands/insert_batch.rs new file mode 100644 index 00000000000..add1474633d --- /dev/null +++ b/prover/crates/bin/prover_cli/src/commands/insert_batch.rs @@ -0,0 +1,43 @@ +use anyhow::Context as _; +use clap::Args as ClapArgs; +use zksync_basic_types::{ + protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch}, + L1BatchNumber, +}; +use zksync_db_connection::connection_pool::ConnectionPool; +use zksync_prover_dal::{Prover, ProverDal}; + +use crate::cli::ProverCLIConfig; + +#[derive(ClapArgs)] +pub struct Args { + #[clap(short, long)] + pub number: L1BatchNumber, + #[clap(short, long)] + pub version: u16, + #[clap(short, long)] + pub patch: u32, +} + +pub async fn run(args: Args, config: ProverCLIConfig) -> anyhow::Result<()> { + let connection = ConnectionPool::::singleton(config.db_url) + .build() + .await + .context("failed to build a prover_connection_pool")?; + let mut conn = connection.connection().await.unwrap(); + + let protocol_version = ProtocolVersionId::try_from(args.version) + .map_err(|_| anyhow::anyhow!("Invalid protocol version"))?; + + let protocol_version_patch = VersionPatch(args.patch); + + conn.fri_witness_generator_dal() + .save_witness_inputs( + args.number, + &format!("witness_inputs_{}", args.number.0), + ProtocolSemanticVersion::new(protocol_version, protocol_version_patch), + ) + .await; + + Ok(()) +} diff --git a/prover/crates/bin/prover_cli/src/commands/insert_version.rs b/prover/crates/bin/prover_cli/src/commands/insert_version.rs new file mode 100644 index 00000000000..7f30719a713 --- /dev/null +++ b/prover/crates/bin/prover_cli/src/commands/insert_version.rs @@ -0,0 +1,52 @@ +use std::str::FromStr; + +use anyhow::Context as _; +use clap::Args as ClapArgs; +use zksync_basic_types::{ + protocol_version::{ + L1VerifierConfig, ProtocolSemanticVersion, ProtocolVersionId, VersionPatch, + }, + H256, +}; +use zksync_db_connection::connection_pool::ConnectionPool; +use zksync_prover_dal::{Prover, ProverDal}; + +use crate::cli::ProverCLIConfig; + +#[derive(ClapArgs)] +pub struct Args { + #[clap(short, long)] + pub version: u16, + #[clap(short, long)] + pub patch: u32, + #[clap(short, long)] + pub snark_wrapper: String, +} + +pub async fn run(args: Args, config: ProverCLIConfig) -> anyhow::Result<()> { + let connection = ConnectionPool::::singleton(config.db_url) + .build() + .await + .context("failed to build a prover_connection_pool")?; + let mut conn = connection.connection().await.unwrap(); + + let protocol_version = ProtocolVersionId::try_from(args.version) + .map_err(|_| anyhow::anyhow!("Invalid protocol version"))?; + + let protocol_version_patch = VersionPatch(args.patch); + + let snark_wrapper = H256::from_str(&args.snark_wrapper).unwrap_or_else(|_| { + panic!("Invalid snark wrapper hash"); + }); + + conn.fri_protocol_versions_dal() + .save_prover_protocol_version( + ProtocolSemanticVersion::new(protocol_version, protocol_version_patch), + L1VerifierConfig { + recursion_scheduler_level_vk_hash: snark_wrapper, + }, + ) + .await; + + Ok(()) +} diff --git a/prover/crates/bin/prover_cli/src/commands/mod.rs b/prover/crates/bin/prover_cli/src/commands/mod.rs index d9dde52284b..bafe229884b 100644 --- a/prover/crates/bin/prover_cli/src/commands/mod.rs +++ b/prover/crates/bin/prover_cli/src/commands/mod.rs @@ -2,6 +2,8 @@ pub(crate) mod config; pub(crate) mod debug_proof; pub(crate) mod delete; pub(crate) mod get_file_info; +pub(crate) mod insert_batch; +pub(crate) mod insert_version; pub(crate) mod requeue; pub(crate) mod restart; pub(crate) mod stats; diff --git a/prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs b/prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs index 4407dbcd852..dc8594cbdc1 100644 --- a/prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs +++ b/prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs @@ -112,7 +112,8 @@ pub mod gpu_prover { .clone(), SetupLoadMode::FromDisk => { let started_at = Instant::now(); - let keystore = Keystore::default(); + let keystore = + Keystore::new_with_setup_data_path(self.config.setup_data_path.clone()); let artifact: GoldilocksGpuProverSetupData = keystore .load_gpu_setup_data_for_circuit_type(key.clone()) .context("load_gpu_setup_data_for_circuit_type()")?; @@ -347,7 +348,7 @@ pub mod gpu_prover { &config.specialized_group_id, prover_setup_metadata_list ); - let keystore = Keystore::default(); + let keystore = Keystore::new_with_setup_data_path(config.setup_data_path.clone()); for prover_setup_metadata in prover_setup_metadata_list { let key = setup_metadata_to_setup_data_key(&prover_setup_metadata); let setup_data = keystore diff --git a/prover/crates/bin/prover_fri/src/prover_job_processor.rs b/prover/crates/bin/prover_fri/src/prover_job_processor.rs index 09c9d38348f..2df1b626497 100644 --- a/prover/crates/bin/prover_fri/src/prover_job_processor.rs +++ b/prover/crates/bin/prover_fri/src/prover_job_processor.rs @@ -85,7 +85,8 @@ impl Prover { .clone(), SetupLoadMode::FromDisk => { let started_at = Instant::now(); - let keystore = Keystore::default(); + let keystore = + Keystore::new_with_setup_data_path(self.config.setup_data_path.clone()); let artifact: GoldilocksProverSetupData = keystore .load_cpu_setup_data_for_circuit_type(key.clone()) .context("get_cpu_setup_data_for_circuit_type()")?; @@ -298,7 +299,7 @@ pub fn load_setup_data_cache(config: &FriProverConfig) -> anyhow::Result.bin` generated by different core -components). - -This file is stored by prover gateway in GCS (or your choice of object storage -- check config). To access it from GCS -(assuming you have access to the bucket), run: - -```shell -gsutil cp gs://your_bucket/witness_inputs/witness_inputs_.bin -``` - -Note, that you need to have `gsutil` installed, and you need to have access to the bucket. - -Now, database needs to know about the batch and the protocol version it should use. Check the latest protocol version in -the codebase by checking const `PROVER_PROTOCOL_SEMANTIC_VERSION` or run the binary in `prover` workspace: - -```console -cargo run --bin prover_version -``` - -It will give you the latest prover protocol version in a semver format, like `0.24.2`, you need to know only minor and -patch versions. Now, go to the `prover/crates/bin/vk_setup_data_generator_server_fri/data/commitments.json` and get -`snark_wrapper` value from it. Then, you need to insert the info about protocol version into the database. First, -connect to the database, e.g. locally you can do it like that: - -```shell -psql postgres://postgres:notsecurepassword@localhost/prover_local -``` - -And run the following query: - -```shell -INSERT INTO -prover_fri_protocol_versions ( -id, -recursion_scheduler_level_vk_hash, -created_at, -protocol_version_patch -) -VALUES -(, ''::bytea, NOW(), ) -ON CONFLICT (id, protocol_version_patch) DO NOTHING - -``` - -Now, you need to insert the batch into the database. Run the following query: - -```shell -INSERT INTO -witness_inputs_fri ( -l1_batch_number, -witness_inputs_blob_url, -protocol_version, -status, -created_at, -updated_at, -protocol_version_patch -) -VALUES -(, 'witness_inputs_.bin', , 'queued', NOW(), NOW(), ) -ON CONFLICT (l1_batch_number) DO NOTHING -``` - -Finally, run the basic witness generator itself: - -```shell -API_PROMETHEUS_LISTENER_PORT=3116 zk f cargo run --release --bin zksync_witness_generator -- --round=basic_circuits -``` - -And you are good to go! diff --git a/prover/crates/bin/witness_generator/src/leaf_aggregation.rs b/prover/crates/bin/witness_generator/src/leaf_aggregation.rs index d8cad84e777..2f449418797 100644 --- a/prover/crates/bin/witness_generator/src/leaf_aggregation.rs +++ b/prover/crates/bin/witness_generator/src/leaf_aggregation.rs @@ -72,6 +72,7 @@ pub struct LeafAggregationWitnessGenerator { object_store: Arc, prover_connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, } impl LeafAggregationWitnessGenerator { @@ -80,12 +81,14 @@ impl LeafAggregationWitnessGenerator { object_store: Arc, prover_connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, ) -> Self { Self { config, object_store, prover_connection_pool, protocol_version, + setup_data_path, } } @@ -131,9 +134,13 @@ impl JobProcessor for LeafAggregationWitnessGenerator { tracing::info!("Processing leaf aggregation job {:?}", metadata.id); Ok(Some(( metadata.id, - prepare_leaf_aggregation_job(metadata, &*self.object_store) - .await - .context("prepare_leaf_aggregation_job()")?, + prepare_leaf_aggregation_job( + metadata, + &*self.object_store, + self.setup_data_path.clone(), + ) + .await + .context("prepare_leaf_aggregation_job()")?, ))) } @@ -219,6 +226,7 @@ impl JobProcessor for LeafAggregationWitnessGenerator { pub async fn prepare_leaf_aggregation_job( metadata: LeafAggregationJobMetadata, object_store: &dyn ObjectStore, + setup_data_path: String, ) -> anyhow::Result { let started_at = Instant::now(); let closed_form_input = get_artifacts(&metadata, object_store).await; @@ -227,7 +235,7 @@ pub async fn prepare_leaf_aggregation_job( .observe(started_at.elapsed()); let started_at = Instant::now(); - let keystore = Keystore::default(); + let keystore = Keystore::new_with_setup_data_path(setup_data_path); let base_vk = keystore .load_base_layer_verification_key(metadata.circuit_id) .context("get_base_layer_vk_for_circuit_type()")?; diff --git a/prover/crates/bin/witness_generator/src/main.rs b/prover/crates/bin/witness_generator/src/main.rs index a88dd8726d3..50c95516860 100644 --- a/prover/crates/bin/witness_generator/src/main.rs +++ b/prover/crates/bin/witness_generator/src/main.rs @@ -80,9 +80,10 @@ async fn main() -> anyhow::Result<()> { let store_factory = ObjectStoreFactory::new(object_store_config.0); let config = general_config .witness_generator_config - .context("witness generator config")?; + .context("witness generator config")? + .clone(); - let prometheus_config = general_config.prometheus_config; + let prometheus_config = general_config.prometheus_config.clone(); // If the prometheus listener port is not set in the witness generator config, use the one from the prometheus config. let prometheus_listener_port = if let Some(port) = config.prometheus_listener_port { @@ -158,6 +159,8 @@ async fn main() -> anyhow::Result<()> { let mut tasks = Vec::new(); tasks.push(tokio::spawn(prometheus_task)); + let setup_data_path = prover_config.setup_data_path.clone(); + for round in rounds { tracing::info!( "initializing the {:?} witness generator, batch size: {:?} with protocol_version: {:?}", @@ -168,8 +171,7 @@ async fn main() -> anyhow::Result<()> { let witness_generator_task = match round { AggregationRound::BasicCircuits => { - let setup_data_path = prover_config.setup_data_path.clone(); - let vk_commitments = get_cached_commitments(Some(setup_data_path)); + let vk_commitments = get_cached_commitments(Some(setup_data_path.clone())); assert_eq!( vk_commitments, vk_commitments_in_db, @@ -204,6 +206,7 @@ async fn main() -> anyhow::Result<()> { store_factory.create_store().await?, prover_connection_pool.clone(), protocol_version, + setup_data_path.clone(), ); generator.run(stop_receiver.clone(), opt.batch_size) } @@ -213,6 +216,7 @@ async fn main() -> anyhow::Result<()> { store_factory.create_store().await?, prover_connection_pool.clone(), protocol_version, + setup_data_path.clone(), ); generator.run(stop_receiver.clone(), opt.batch_size) } @@ -222,6 +226,7 @@ async fn main() -> anyhow::Result<()> { store_factory.create_store().await?, prover_connection_pool.clone(), protocol_version, + setup_data_path.clone(), ); generator.run(stop_receiver.clone(), opt.batch_size) } @@ -231,6 +236,7 @@ async fn main() -> anyhow::Result<()> { store_factory.create_store().await?, prover_connection_pool.clone(), protocol_version, + setup_data_path.clone(), ); generator.run(stop_receiver.clone(), opt.batch_size) } diff --git a/prover/crates/bin/witness_generator/src/node_aggregation.rs b/prover/crates/bin/witness_generator/src/node_aggregation.rs index a7dce2a513d..b6fc6b8f7c6 100644 --- a/prover/crates/bin/witness_generator/src/node_aggregation.rs +++ b/prover/crates/bin/witness_generator/src/node_aggregation.rs @@ -70,6 +70,7 @@ pub struct NodeAggregationWitnessGenerator { object_store: Arc, prover_connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, } impl NodeAggregationWitnessGenerator { @@ -78,12 +79,14 @@ impl NodeAggregationWitnessGenerator { object_store: Arc, prover_connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, ) -> Self { Self { config, object_store, prover_connection_pool, protocol_version, + setup_data_path, } } @@ -241,7 +244,7 @@ impl JobProcessor for NodeAggregationWitnessGenerator { tracing::info!("Processing node aggregation job {:?}", metadata.id); Ok(Some(( metadata.id, - prepare_job(metadata, &*self.object_store) + prepare_job(metadata, &*self.object_store, self.setup_data_path.clone()) .await .context("prepare_job()")?, ))) @@ -326,6 +329,7 @@ impl JobProcessor for NodeAggregationWitnessGenerator { pub async fn prepare_job( metadata: NodeAggregationJobMetadata, object_store: &dyn ObjectStore, + setup_data_path: String, ) -> anyhow::Result { let started_at = Instant::now(); let artifacts = get_artifacts(&metadata, object_store).await; @@ -334,7 +338,7 @@ pub async fn prepare_job( .observe(started_at.elapsed()); let started_at = Instant::now(); - let keystore = Keystore::default(); + let keystore = Keystore::new_with_setup_data_path(setup_data_path); let leaf_vk = keystore .load_recursive_layer_verification_key(metadata.circuit_id) .context("get_recursive_layer_vk_for_circuit_type")?; diff --git a/prover/crates/bin/witness_generator/src/recursion_tip.rs b/prover/crates/bin/witness_generator/src/recursion_tip.rs index 2a57ffff85f..e05a0cc38cf 100644 --- a/prover/crates/bin/witness_generator/src/recursion_tip.rs +++ b/prover/crates/bin/witness_generator/src/recursion_tip.rs @@ -75,6 +75,7 @@ pub struct RecursionTipWitnessGenerator { object_store: Arc, prover_connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, } impl RecursionTipWitnessGenerator { @@ -83,12 +84,14 @@ impl RecursionTipWitnessGenerator { object_store: Arc, prover_connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, ) -> Self { Self { config, object_store, prover_connection_pool, protocol_version, + setup_data_path, } } @@ -172,6 +175,7 @@ impl JobProcessor for RecursionTipWitnessGenerator { l1_batch_number, final_node_proof_job_ids, &*self.object_store, + self.setup_data_path.clone(), ) .await .context("prepare_job()")?, @@ -284,6 +288,7 @@ pub async fn prepare_job( l1_batch_number: L1BatchNumber, final_node_proof_job_ids: Vec<(u8, u32)>, object_store: &dyn ObjectStore, + setup_data_path: String, ) -> anyhow::Result { let started_at = Instant::now(); let recursion_tip_proofs = @@ -291,7 +296,7 @@ pub async fn prepare_job( WITNESS_GENERATOR_METRICS.blob_fetch_time[&AggregationRound::RecursionTip.into()] .observe(started_at.elapsed()); - let keystore = Keystore::default(); + let keystore = Keystore::new_with_setup_data_path(setup_data_path); let node_vk = keystore .load_recursive_layer_verification_key( ZkSyncRecursionLayerStorageType::NodeLayerCircuit as u8, diff --git a/prover/crates/bin/witness_generator/src/scheduler.rs b/prover/crates/bin/witness_generator/src/scheduler.rs index f69d338061e..c389e037ffa 100644 --- a/prover/crates/bin/witness_generator/src/scheduler.rs +++ b/prover/crates/bin/witness_generator/src/scheduler.rs @@ -57,6 +57,7 @@ pub struct SchedulerWitnessGenerator { object_store: Arc, prover_connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, } impl SchedulerWitnessGenerator { @@ -65,12 +66,14 @@ impl SchedulerWitnessGenerator { object_store: Arc, prover_connection_pool: ConnectionPool, protocol_version: ProtocolSemanticVersion, + setup_data_path: String, ) -> Self { Self { config, object_store, prover_connection_pool, protocol_version, + setup_data_path, } } @@ -147,9 +150,14 @@ impl JobProcessor for SchedulerWitnessGenerator { Ok(Some(( l1_batch_number, - prepare_job(l1_batch_number, recursion_tip_job_id, &*self.object_store) - .await - .context("prepare_job()")?, + prepare_job( + l1_batch_number, + recursion_tip_job_id, + &*self.object_store, + self.setup_data_path.clone(), + ) + .await + .context("prepare_job()")?, ))) } @@ -258,6 +266,7 @@ pub async fn prepare_job( l1_batch_number: L1BatchNumber, recursion_tip_job_id: u32, object_store: &dyn ObjectStore, + setup_data_path: String, ) -> anyhow::Result { let started_at = Instant::now(); let wrapper = object_store.get(recursion_tip_job_id).await?; @@ -271,7 +280,7 @@ pub async fn prepare_job( .observe(started_at.elapsed()); let started_at = Instant::now(); - let keystore = Keystore::default(); + let keystore = Keystore::new_with_setup_data_path(setup_data_path); let node_vk = keystore .load_recursive_layer_verification_key( ZkSyncRecursionLayerStorageType::NodeLayerCircuit as u8, diff --git a/prover/crates/bin/witness_generator/tests/basic_test.rs b/prover/crates/bin/witness_generator/tests/basic_test.rs index f8a21179adb..b034ab57d82 100644 --- a/prover/crates/bin/witness_generator/tests/basic_test.rs +++ b/prover/crates/bin/witness_generator/tests/basic_test.rs @@ -50,9 +50,13 @@ async fn test_leaf_witness_gen() { .await .unwrap(); - let job = prepare_leaf_aggregation_job(leaf_aggregation_job_metadata, &*object_store) - .await - .unwrap(); + let job = prepare_leaf_aggregation_job( + leaf_aggregation_job_metadata, + &*object_store, + "crates/bin/vk_setup_data_generator/data".to_string(), + ) + .await + .unwrap(); let artifacts = LeafAggregationWitnessGenerator::process_job_impl( job, @@ -139,9 +143,13 @@ async fn test_node_witness_gen() { prover_job_ids_for_proofs: vec![5211320], }; - let job = node_aggregation::prepare_job(node_aggregation_job_metadata, &*object_store) - .await - .unwrap(); + let job = node_aggregation::prepare_job( + node_aggregation_job_metadata, + &*object_store, + "crates/bin/vk_setup_data_generator/data".to_string(), + ) + .await + .unwrap(); let artifacts = NodeAggregationWitnessGenerator::process_job_impl( job, diff --git a/prover/docs/05_proving_batch.md b/prover/docs/05_proving_batch.md new file mode 100644 index 00000000000..441a8225f86 --- /dev/null +++ b/prover/docs/05_proving_batch.md @@ -0,0 +1,145 @@ +# Proving a batch + +If you got to this section, then most likely you are wondering how to prove and verify the batch by yourself. After +releases `prover-v15.1.0` and `core-v24.9.0` prover subsystem doesn't need access to core database anymore, which means +you can run only prover subsystem and prove batches without running the whole core system. This guide will help you with +that. + +## Requirements + +### Hardware + +Setup for running the whole process should be the same as described [here](./01_gcp_vm.md), except you need 48 GB of +GPU, which requires an NVIDIA A100 80GB GPU. + +### Prerequisites + +First of all, you need to install CUDA drivers, all other things will be dealt with by `zk_inception` and `prover_cli` +tools. For that, check the following [guide](./02_setup.md)(you can skip bellman-cuda step). + +Install the prerequisites, which you can find +[here](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/setup-dev.md). Note, that if you are not using +Google VM instance, you also need to install [gcloud](https://cloud.google.com/sdk/docs/install#deb). + +Now, you can use `zk_inception` and `prover_cli` tools for setting up the env and running prover subsystem. + +```shell +cargo +nightly-2024-08-01 install --git https://github.com/matter-labs/zksync-era/ --locked zk_inception zk_supervisor prover_cli --force +``` + +## Initializing system + +After you have installed the tool, you can create ecosystem(you need to run only if you are outside of `zksync-era`) by +running: + +```shell +zk_inception ecosystem create --l1-network=localhost --prover-mode=gpu --wallet-creation=localhost --l1-batch-commit-data-generator-mode=rollup --start-containers=true +``` + +The command will create the ecosystem and all the necessary components for the prover subsystem. You can leave default +values for all the prompts you will see Now, you need to initialize the prover subsystem by running: + +```shell +zk_inception prover init --shall-save-to-public-bucket=false --setup-database=true --use-default=true --dont-drop=false +``` + +For prompts you can leave default values as well. + +## Proving the batch + +### Getting data needed for proving + +At this step, we need to get the witness inputs data for the batch you want to prove. Database information now lives in +input file, called `witness_inputs_.bin` generated by different core components). + +- If batch was produced by your system, the file is stored by prover gateway in GCS (or your choice of object storage -- + check config). At the point of getting it, most likely there is no artifacts directory created. If you have cloned the + zksync-era repo, then it is in the root of ecosystem directory. Create artifacts directory by running: + + ```shell + mkdir -p + ``` + + To access it from GCS (assuming you have access to the bucket), run: + + ```shell + gsutil cp gs://your_bucket/witness_inputs/witness_inputs_.bin + ``` + +- If you want to prove the batch produced by zkSync, you can get the data from the `ExternalProofIntegrationAPI` using + `{address}/proof_generation_data` endpoint. You need to replace `{address}` with the address of the API and provide + the batch number as a query data to get the data for specific batch, otherwise, you will receive latest data for the + batch, that was already proven. Example: + + ```shell + curl -H "Content-Type: application/json" -X POST {address}/proof_generation_data -d 'null' + ``` + + or + + ```shell + curl -H "Content-Type: application/json" -X POST {address}/proof_generation_data -d '1000' + ``` + +### Preparing database + +After you have the data, you need to prepare the system to run the batch. So, database needs to know about the batch and +the protocol version it should use. You can do that with running + +```shell +zk_supervisor prover-version +``` + +Example output: + +```shell +Current protocol version found in zksync-era: 0.24.2, snark_wrapper: "0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2" +``` + +This command will provide you with the information about the semantic protocol version(you need to know only minor and +patch versions) and snark wrapper value. In the example, `MINOR_VERSION` is 24, `PATCH_VERSION` is 2, and +`SNARK_WRAPPER` is `0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2`. + +Now, with the use of `prover_cli` tool, you can insert the data about the batch and protocol version into the database: + +First, get the database URL(you can find it in `/chains//configs/secrets.yaml` - it is the +`prover_url` value) Now, insert the information about protocol version in the database: + +```shell +prover_cli insert-version --version= --patch= --snark-wrapper= +``` + +And finally, provide the data about the batch: + +```shell +prover_cli insert-batch --number= --version= --patch= +``` + +Also, provers need to know which setup keys they should use. It may take some time, but you can generate them with: + +```shell +zk_inception prover generate-sk +``` + +## Running prover subsystem + +At this step, all the data is prepared and you can run the prover subsystem. To do that, run the following commands: + +```shell +zk_inception prover run --component=prover +zk_inception prover run --component=witness-generator --round=all-rounds +zk_inception prover run --component=witness-vector-generator --threads=10 +zk_inception prover run --component=compressor +zk_inception prover run --component=prover-job-monitor +``` + +And you are good to go! The prover subsystem will prove the batch and you can check the results in the database. + +## Verifying zkSync batch + +Now, assuming the proof is already generated, you can verify using `ExternalProofIntegrationAPI`. Usually proof is +stored in GCS bucket(for which you can use the same steps as for getting the witness inputs data +[here](#getting-data-needed-for-proving), but locally you can find it in `/artifacts/proofs_fri` directory). Now, simply +send the data to the endpoint `{address}/verify_batch/{batch_number}`. Note, that you need to pass the generated proof +as serialized JSON data when calling the endpoint. API will respond with status 200 if the proof is valid and with the +error message otherwise. diff --git a/zk_toolbox/Cargo.lock b/zk_toolbox/Cargo.lock index c76556272e8..7682b92a4f2 100644 --- a/zk_toolbox/Cargo.lock +++ b/zk_toolbox/Cargo.lock @@ -6298,6 +6298,7 @@ dependencies = [ "futures", "human-panic", "serde", + "serde_json", "strum", "tokio", "url", diff --git a/zk_toolbox/crates/config/src/secrets.rs b/zk_toolbox/crates/config/src/secrets.rs index 5bcad19ad33..f0a39148b03 100644 --- a/zk_toolbox/crates/config/src/secrets.rs +++ b/zk_toolbox/crates/config/src/secrets.rs @@ -26,6 +26,18 @@ pub fn set_databases( Ok(()) } +pub fn set_prover_database( + secrets: &mut SecretsConfig, + prover_db_config: &DatabaseConfig, +) -> anyhow::Result<()> { + let database = secrets + .database + .as_mut() + .context("Databases must be presented")?; + database.prover_url = Some(SensitiveUrl::from(prover_db_config.full_url())); + Ok(()) +} + pub fn set_l1_rpc_url(secrets: &mut SecretsConfig, l1_rpc_url: String) -> anyhow::Result<()> { secrets .l1 diff --git a/zk_toolbox/crates/zk_inception/README.md b/zk_toolbox/crates/zk_inception/README.md index 4cb6d213688..8b6368ce8c2 100644 --- a/zk_toolbox/crates/zk_inception/README.md +++ b/zk_toolbox/crates/zk_inception/README.md @@ -428,7 +428,7 @@ Initialize prover - `--project-id ` - `--shall-save-to-public-bucket ` - Possible values: `true`, `false` +Possible values: `true`, `false` - `--public-store-dir ` - `--public-bucket-base-url ` @@ -438,8 +438,13 @@ Initialize prover - `--public-project-id ` - `--bellman-cuda-dir ` - `--download-key ` +- `--setup-database` +- `--use-default` - use default database +- `--dont-drop` - don't drop database +- `--prover-db-url` - URL of database to use +- `--prover-db-name` - Name of database to use - Possible values: `true`, `false` +Possible values: `true`, `false` - `--setup-key-path ` - `--cloud-type ` diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/args/init.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/args/init.rs index cef43562571..e8c9cf1888d 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/args/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/args/init.rs @@ -1,7 +1,10 @@ use clap::{Parser, ValueEnum}; -use common::{logger, Prompt, PromptConfirm, PromptSelect}; +use common::{db::DatabaseConfig, logger, Prompt, PromptConfirm, PromptSelect}; +use config::ChainConfig; use serde::{Deserialize, Serialize}; +use slugify_rs::slugify; use strum::{EnumIter, IntoEnumIterator}; +use url::Url; use xshell::Shell; use zksync_config::configs::fri_prover::CloudConnectionMode; @@ -9,15 +12,18 @@ use super::init_bellman_cuda::InitBellmanCudaArgs; use crate::{ commands::prover::gcs::get_project_ids, consts::{DEFAULT_CREDENTIALS_FILE, DEFAULT_PROOF_STORE_DIR}, + defaults::{generate_db_names, DBNames, DATABASE_PROVER_URL}, messages::{ - MSG_CLOUD_TYPE_PROMPT, MSG_CREATE_GCS_BUCKET_LOCATION_PROMPT, - MSG_CREATE_GCS_BUCKET_NAME_PROMTP, MSG_CREATE_GCS_BUCKET_PROJECT_ID_NO_PROJECTS_PROMPT, + msg_prover_db_name_prompt, msg_prover_db_url_prompt, MSG_CLOUD_TYPE_PROMPT, + MSG_CREATE_GCS_BUCKET_LOCATION_PROMPT, MSG_CREATE_GCS_BUCKET_NAME_PROMTP, + MSG_CREATE_GCS_BUCKET_PROJECT_ID_NO_PROJECTS_PROMPT, MSG_CREATE_GCS_BUCKET_PROJECT_ID_PROMPT, MSG_CREATE_GCS_BUCKET_PROMPT, MSG_DOWNLOAD_SETUP_KEY_PROMPT, MSG_GETTING_PROOF_STORE_CONFIG, MSG_GETTING_PUBLIC_STORE_CONFIG, MSG_PROOF_STORE_CONFIG_PROMPT, MSG_PROOF_STORE_DIR_PROMPT, MSG_PROOF_STORE_GCS_BUCKET_BASE_URL_ERR, MSG_PROOF_STORE_GCS_BUCKET_BASE_URL_PROMPT, - MSG_PROOF_STORE_GCS_CREDENTIALS_FILE_PROMPT, MSG_SAVE_TO_PUBLIC_BUCKET_PROMPT, - MSG_SETUP_KEY_PATH_PROMPT, + MSG_PROOF_STORE_GCS_CREDENTIALS_FILE_PROMPT, MSG_PROVER_DB_NAME_HELP, + MSG_PROVER_DB_URL_HELP, MSG_SAVE_TO_PUBLIC_BUCKET_PROMPT, MSG_SETUP_KEY_PATH_PROMPT, + MSG_USE_DEFAULT_DATABASES_HELP, }, }; @@ -54,6 +60,17 @@ pub struct ProverInitArgs { #[serde(flatten)] pub setup_key_config: SetupKeyConfigTmp, + #[clap(long)] + pub setup_database: Option, + #[clap(long, help = MSG_PROVER_DB_URL_HELP)] + pub prover_db_url: Option, + #[clap(long, help = MSG_PROVER_DB_NAME_HELP)] + pub prover_db_name: Option, + #[clap(long, short, help = MSG_USE_DEFAULT_DATABASES_HELP)] + pub use_default: Option, + #[clap(long, short, action)] + pub dont_drop: Option, + #[clap(long)] cloud_type: Option, } @@ -160,6 +177,12 @@ pub struct SetupKeyConfig { pub setup_key_path: String, } +#[derive(Debug, Clone)] +pub struct ProverDatabaseConfig { + pub database_config: DatabaseConfig, + pub dont_drop: bool, +} + #[derive(Debug, Clone)] pub struct ProverInitArgsFinal { pub proof_store: ProofStorageConfig, @@ -167,6 +190,7 @@ pub struct ProverInitArgsFinal { pub setup_key_config: SetupKeyConfig, pub bellman_cuda_config: InitBellmanCudaArgs, pub cloud_type: CloudConnectionMode, + pub database_config: Option, } impl ProverInitArgs { @@ -174,12 +198,14 @@ impl ProverInitArgs { &self, shell: &Shell, setup_key_path: &str, + chain_config: &ChainConfig, ) -> anyhow::Result { let proof_store = self.fill_proof_storage_values_with_prompt(shell)?; let public_store = self.fill_public_storage_values_with_prompt(shell)?; let setup_key_config = self.fill_setup_key_values_with_prompt(setup_key_path); let bellman_cuda_config = self.fill_bellman_cuda_values_with_prompt()?; let cloud_type = self.get_cloud_type_with_prompt(); + let database_config = self.fill_database_values_with_prompt(chain_config); Ok(ProverInitArgsFinal { proof_store, @@ -187,6 +213,7 @@ impl ProverInitArgs { setup_key_config, bellman_cuda_config, cloud_type, + database_config, }) } @@ -314,7 +341,11 @@ impl ProverInitArgs { .clone() .setup_key_config .download_key - .unwrap_or_else(|| PromptConfirm::new(MSG_DOWNLOAD_SETUP_KEY_PROMPT).ask()); + .unwrap_or_else(|| { + PromptConfirm::new(MSG_DOWNLOAD_SETUP_KEY_PROMPT) + .default(true) + .ask() + }); let setup_key_path = self .clone() .setup_key_config @@ -435,9 +466,65 @@ impl ProverInitArgs { fn get_cloud_type_with_prompt(&self) -> CloudConnectionMode { let cloud_type = self.cloud_type.clone().unwrap_or_else(|| { - PromptSelect::new(MSG_CLOUD_TYPE_PROMPT, InternalCloudConnectionMode::iter()).ask() + PromptSelect::new( + MSG_CLOUD_TYPE_PROMPT, + InternalCloudConnectionMode::iter().rev(), + ) + .ask() }); cloud_type.into() } + + fn fill_database_values_with_prompt( + &self, + config: &ChainConfig, + ) -> Option { + let setup_database = self + .setup_database + .unwrap_or_else(|| PromptConfirm::new("Do you want to setup the database?").ask()); + + if setup_database { + let DBNames { prover_name, .. } = generate_db_names(config); + let chain_name = config.name.clone(); + + let dont_drop = self.dont_drop.unwrap_or_else(|| { + !PromptConfirm::new("Do you want to drop the database?") + .default(true) + .ask() + }); + + if self.use_default.unwrap_or_else(|| { + PromptConfirm::new(MSG_USE_DEFAULT_DATABASES_HELP) + .default(true) + .ask() + }) { + Some(ProverDatabaseConfig { + database_config: DatabaseConfig::new(DATABASE_PROVER_URL.clone(), prover_name), + dont_drop, + }) + } else { + let prover_db_url = self.prover_db_url.clone().unwrap_or_else(|| { + Prompt::new(&msg_prover_db_url_prompt(&chain_name)) + .default(DATABASE_PROVER_URL.as_str()) + .ask() + }); + + let prover_db_name: String = self.prover_db_name.clone().unwrap_or_else(|| { + Prompt::new(&msg_prover_db_name_prompt(&chain_name)) + .default(&prover_name) + .ask() + }); + + let prover_db_name = slugify!(&prover_db_name, separator = "_"); + + Some(ProverDatabaseConfig { + database_config: DatabaseConfig::new(prover_db_url, prover_db_name), + dont_drop, + }) + } + } else { + None + } + } } diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/args/run.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/args/run.rs index c2d5cef26ad..6bdd62c1d48 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/args/run.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/args/run.rs @@ -28,6 +28,8 @@ pub enum ProverComponent { Prover, #[strum(to_string = "Compressor")] Compressor, + #[strum(to_string = "ProverJobMonitor")] + ProverJobMonitor, } #[derive(Debug, Clone, Parser, Default)] diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/generate_sk.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/generate_sk.rs index 1657ab2c99f..7f678470d17 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/generate_sk.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/generate_sk.rs @@ -17,9 +17,9 @@ pub(crate) async fn run(shell: &Shell) -> anyhow::Result<()> { let cmd = Cmd::new(cmd!( shell, "cargo run --features gpu --release --bin key_generator -- - generate-sk all --recompute-if-missing - --setup-path=vk_setup_data_generator_server_fri/data - --path={link_to_prover}/vk_setup_data_generator_server_fri/data" + generate-sk-gpu all --recompute-if-missing + --setup-path=crates/bin/vk_setup_data_generator_server_fri/data + --path={link_to_prover}/crates/bin/vk_setup_data_generator_server_fri/data" )); cmd.run()?; spinner.finish(); diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs index a27e5f1b0be..803ef56df83 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs @@ -1,6 +1,15 @@ +use std::path::PathBuf; + use anyhow::Context; -use common::{check_prover_prequisites, cmd::Cmd, logger, spinner::Spinner}; -use config::EcosystemConfig; +use common::{ + check_prover_prequisites, + cmd::Cmd, + config::global_config, + db::{drop_db_if_exists, init_db, migrate_db, DatabaseConfig}, + logger, + spinner::Spinner, +}; +use config::{copy_configs, set_prover_database, traits::SaveConfigWithBasePath, EcosystemConfig}; use xshell::{cmd, Shell}; use zksync_config::{ configs::{object_store::ObjectStoreMode, GeneralConfig}, @@ -14,28 +23,36 @@ use super::{ utils::get_link_to_prover, }; use crate::{ - consts::PROVER_STORE_MAX_RETRIES, + consts::{PROVER_MIGRATIONS, PROVER_STORE_MAX_RETRIES}, messages::{ MSG_CHAIN_NOT_FOUND_ERR, MSG_DOWNLOADING_SETUP_KEY_SPINNER, - MSG_GENERAL_CONFIG_NOT_FOUND_ERR, MSG_PROOF_COMPRESSOR_CONFIG_NOT_FOUND_ERR, - MSG_PROVER_CONFIG_NOT_FOUND_ERR, MSG_PROVER_INITIALIZED, MSG_SETUP_KEY_PATH_ERROR, + MSG_FAILED_TO_DROP_PROVER_DATABASE_ERR, MSG_GENERAL_CONFIG_NOT_FOUND_ERR, + MSG_INITIALIZING_DATABASES_SPINNER, MSG_INITIALIZING_PROVER_DATABASE, + MSG_PROOF_COMPRESSOR_CONFIG_NOT_FOUND_ERR, MSG_PROVER_CONFIG_NOT_FOUND_ERR, + MSG_PROVER_INITIALIZED, MSG_SETUP_KEY_PATH_ERROR, }, }; pub(crate) async fn run(args: ProverInitArgs, shell: &Shell) -> anyhow::Result<()> { check_prover_prequisites(shell); + let ecosystem_config = EcosystemConfig::from_file(shell)?; + + let setup_key_path = get_default_setup_key_path(&ecosystem_config)?; + let chain_config = ecosystem_config .load_chain(Some(ecosystem_config.default_chain.clone())) .context(MSG_CHAIN_NOT_FOUND_ERR)?; + let args = args.fill_values_with_prompt(shell, &setup_key_path, &chain_config)?; + + if chain_config.get_general_config().is_err() || chain_config.get_secrets_config().is_err() { + copy_configs(shell, &ecosystem_config.link_to_code, &chain_config.configs)?; + } + let mut general_config = chain_config .get_general_config() .context(MSG_GENERAL_CONFIG_NOT_FOUND_ERR)?; - let setup_key_path = get_default_setup_key_path(&ecosystem_config)?; - - let args = args.fill_values_with_prompt(shell, &setup_key_path)?; - let proof_object_store_config = get_object_store_config(shell, Some(args.proof_store))?; let public_object_store_config = get_object_store_config(shell, args.public_store)?; @@ -72,6 +89,23 @@ pub(crate) async fn run(args: ProverInitArgs, shell: &Shell) -> anyhow::Result<( init_bellman_cuda(shell, args.bellman_cuda_config).await?; + if let Some(prover_db) = &args.database_config { + let spinner = Spinner::new(MSG_INITIALIZING_DATABASES_SPINNER); + + let mut secrets = chain_config.get_secrets_config()?; + set_prover_database(&mut secrets, &prover_db.database_config)?; + secrets.save_with_base_path(shell, &chain_config.configs)?; + initialize_prover_database( + shell, + &prover_db.database_config, + ecosystem_config.link_to_code.clone(), + prover_db.dont_drop, + ) + .await?; + + spinner.finish(); + } + logger::outro(MSG_PROVER_INITIALIZED); Ok(()) } @@ -138,3 +172,29 @@ fn get_object_store_config( Ok(object_store) } + +async fn initialize_prover_database( + shell: &Shell, + prover_db_config: &DatabaseConfig, + link_to_code: PathBuf, + dont_drop: bool, +) -> anyhow::Result<()> { + if global_config().verbose { + logger::debug(MSG_INITIALIZING_PROVER_DATABASE) + } + if !dont_drop { + drop_db_if_exists(prover_db_config) + .await + .context(MSG_FAILED_TO_DROP_PROVER_DATABASE_ERR)?; + init_db(prover_db_config).await?; + } + let path_to_prover_migration = link_to_code.join(PROVER_MIGRATIONS); + migrate_db( + shell, + path_to_prover_migration, + &prover_db_config.full_url(), + ) + .await?; + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/run.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/run.rs index 5497db8a21e..05672383666 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/run.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/run.rs @@ -13,9 +13,10 @@ use super::{ use crate::messages::{ MSG_BELLMAN_CUDA_DIR_ERR, MSG_CHAIN_NOT_FOUND_ERR, MSG_MISSING_COMPONENT_ERR, MSG_RUNNING_COMPRESSOR, MSG_RUNNING_COMPRESSOR_ERR, MSG_RUNNING_PROVER, MSG_RUNNING_PROVER_ERR, - MSG_RUNNING_PROVER_GATEWAY, MSG_RUNNING_PROVER_GATEWAY_ERR, MSG_RUNNING_WITNESS_GENERATOR, - MSG_RUNNING_WITNESS_GENERATOR_ERR, MSG_RUNNING_WITNESS_VECTOR_GENERATOR, - MSG_RUNNING_WITNESS_VECTOR_GENERATOR_ERR, MSG_WITNESS_GENERATOR_ROUND_ERR, + MSG_RUNNING_PROVER_GATEWAY, MSG_RUNNING_PROVER_GATEWAY_ERR, MSG_RUNNING_PROVER_JOB_MONITOR, + MSG_RUNNING_WITNESS_GENERATOR, MSG_RUNNING_WITNESS_GENERATOR_ERR, + MSG_RUNNING_WITNESS_VECTOR_GENERATOR, MSG_RUNNING_WITNESS_VECTOR_GENERATOR_ERR, + MSG_WITNESS_GENERATOR_ROUND_ERR, }; pub(crate) async fn run(args: ProverRunArgs, shell: &Shell) -> anyhow::Result<()> { @@ -39,6 +40,7 @@ pub(crate) async fn run(args: ProverRunArgs, shell: &Shell) -> anyhow::Result<() } Some(ProverComponent::Prover) => run_prover(shell, &chain)?, Some(ProverComponent::Compressor) => run_compressor(shell, &chain, &ecosystem_config)?, + Some(ProverComponent::ProverJobMonitor) => run_prover_job_monitor(shell, &chain)?, None => anyhow::bail!(MSG_MISSING_COMPONENT_ERR), } @@ -127,3 +129,13 @@ fn run_compressor( cmd = cmd.with_force_run(); cmd.run().context(MSG_RUNNING_COMPRESSOR_ERR) } + +fn run_prover_job_monitor(shell: &Shell, chain: &ChainConfig) -> anyhow::Result<()> { + logger::info(MSG_RUNNING_PROVER_JOB_MONITOR); + let config_path = chain.path_to_general_config(); + let secrets_path = chain.path_to_secrets_config(); + + let mut cmd = Cmd::new(cmd!(shell, "cargo run --release --bin zksync_prover_job_monitor -- --config-path={config_path} --secrets-path={secrets_path}")); + cmd = cmd.with_force_run(); + cmd.run().context(MSG_RUNNING_PROVER_JOB_MONITOR) +} diff --git a/zk_toolbox/crates/zk_inception/src/messages.rs b/zk_toolbox/crates/zk_inception/src/messages.rs index f0e46aaf486..1ec2b006452 100644 --- a/zk_toolbox/crates/zk_inception/src/messages.rs +++ b/zk_toolbox/crates/zk_inception/src/messages.rs @@ -259,6 +259,7 @@ pub(super) const MSG_GENERATING_SK_SPINNER: &str = "Generating setup keys..."; pub(super) const MSG_SK_GENERATED: &str = "Setup keys generated successfully"; pub(super) const MSG_MISSING_COMPONENT_ERR: &str = "Missing component"; pub(super) const MSG_RUNNING_PROVER_GATEWAY: &str = "Running gateway"; +pub(super) const MSG_RUNNING_PROVER_JOB_MONITOR: &str = "Running prover job monitor"; pub(super) const MSG_RUNNING_WITNESS_GENERATOR: &str = "Running witness generator"; pub(super) const MSG_RUNNING_WITNESS_VECTOR_GENERATOR: &str = "Running witness vector generator"; pub(super) const MSG_RUNNING_PROVER: &str = "Running prover"; diff --git a/zk_toolbox/crates/zk_supervisor/Cargo.toml b/zk_toolbox/crates/zk_supervisor/Cargo.toml index e1225de96d3..e24c88f3ec2 100644 --- a/zk_toolbox/crates/zk_supervisor/Cargo.toml +++ b/zk_toolbox/crates/zk_supervisor/Cargo.toml @@ -23,3 +23,4 @@ xshell.workspace = true serde.workspace = true clap-markdown.workspace = true futures.workspace = true +serde_json.workspace = true diff --git a/zk_toolbox/crates/zk_supervisor/README.md b/zk_toolbox/crates/zk_supervisor/README.md index 4648fe6cb36..1f880cdcb30 100644 --- a/zk_toolbox/crates/zk_supervisor/README.md +++ b/zk_toolbox/crates/zk_supervisor/README.md @@ -5,6 +5,7 @@ This document contains the help content for the `zk_supervisor` command-line pro **Command Overview:** - [`zk_supervisor`↴](#zk_supervisor) +- [`zk_supervisor prover-version`↴](#zk_supervisor-prover-version) - [`zk_supervisor database`↴](#zk_supervisor-database) - [`zk_supervisor database check-sqlx-data`↴](#zk_supervisor-database-check-sqlx-data) - [`zk_supervisor database drop`↴](#zk_supervisor-database-drop) @@ -44,6 +45,12 @@ ZK Toolbox is a set of tools for working with zk stack. - `--chain ` — Chain to use - `--ignore-prerequisites` — Ignores prerequisites checks +## `zk_supervisor prover-version` + +Gets information about current protocol version of provers in `zksync-era` and snark wrapper hash. + +**Usage:** `zk_supervisor prover-version` + ## `zk_supervisor database` Database related commands diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs index 99a8fa5e0a5..181ce50c213 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs @@ -3,5 +3,6 @@ pub mod database; pub mod fmt; pub mod lint; pub(crate) mod lint_utils; +pub mod prover_version; pub mod snapshot; pub mod test; diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/prover_version.rs b/zk_toolbox/crates/zk_supervisor/src/commands/prover_version.rs new file mode 100644 index 00000000000..479f796294f --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/prover_version.rs @@ -0,0 +1,41 @@ +use std::{fs, path::Path}; + +use common::logger; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +pub async fn run(shell: &Shell) -> anyhow::Result<()> { + let link_to_code = EcosystemConfig::from_file(shell)?.link_to_code; + let link_to_prover = link_to_code.join("prover"); + + let protocol_version = get_protocol_version(shell, &link_to_prover).await?; + let snark_wrapper = get_snark_wrapper(&link_to_prover).await?; + + logger::info(format!( + "Current protocol version found in zksync-era: {}, snark_wrapper: {}", + protocol_version, snark_wrapper + )); + + Ok(()) +} + +async fn get_protocol_version(shell: &Shell, link_to_prover: &Path) -> anyhow::Result { + shell.change_dir(link_to_prover); + let protocol_version = cmd!(shell, "cargo run --release --bin prover_version").read()?; + + Ok(protocol_version) +} + +async fn get_snark_wrapper(link_to_prover: &Path) -> anyhow::Result { + let path = + link_to_prover.join("crates/bin/vk_setup_data_generator_server_fri/data/commitments.json"); + let file = fs::File::open(path).expect("Could not find commitments file in zksync-era"); + let json: serde_json::Value = + serde_json::from_reader(file).expect("Could not parse commitments.json"); + + let snark_wrapper = json + .get("snark_wrapper") + .expect("Could not find snark_wrapper in commitments.json"); + + Ok(snark_wrapper.to_string()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/main.rs b/zk_toolbox/crates/zk_supervisor/src/main.rs index 965def9263a..9a1c1ad74bc 100644 --- a/zk_toolbox/crates/zk_supervisor/src/main.rs +++ b/zk_toolbox/crates/zk_supervisor/src/main.rs @@ -10,9 +10,9 @@ use common::{ }; use config::EcosystemConfig; use messages::{ - msg_global_chain_does_not_exist, MSG_SUBCOMMAND_CLEAN, MSG_SUBCOMMAND_DATABASE_ABOUT, - MSG_SUBCOMMAND_FMT_ABOUT, MSG_SUBCOMMAND_LINT_ABOUT, MSG_SUBCOMMAND_SNAPSHOTS_CREATOR_ABOUT, - MSG_SUBCOMMAND_TESTS_ABOUT, + msg_global_chain_does_not_exist, MSG_PROVER_VERSION_ABOUT, MSG_SUBCOMMAND_CLEAN, + MSG_SUBCOMMAND_DATABASE_ABOUT, MSG_SUBCOMMAND_FMT_ABOUT, MSG_SUBCOMMAND_LINT_ABOUT, + MSG_SUBCOMMAND_SNAPSHOTS_CREATOR_ABOUT, MSG_SUBCOMMAND_TESTS_ABOUT, }; use xshell::Shell; @@ -47,6 +47,8 @@ enum SupervisorSubcommands { Fmt(FmtArgs), #[command(hide = true)] Markdown, + #[command(about = MSG_PROVER_VERSION_ABOUT)] + ProverVersion, } #[derive(Parser, Debug)] @@ -103,6 +105,7 @@ async fn run_subcommand(args: Supervisor, shell: &Shell) -> anyhow::Result<()> { } SupervisorSubcommands::Lint(args) => commands::lint::run(shell, args)?, SupervisorSubcommands::Fmt(args) => commands::fmt::run(shell.clone(), args).await?, + SupervisorSubcommands::ProverVersion => commands::prover_version::run(shell).await?, } Ok(()) } diff --git a/zk_toolbox/crates/zk_supervisor/src/messages.rs b/zk_toolbox/crates/zk_supervisor/src/messages.rs index df0cf0c311d..de25be28132 100644 --- a/zk_toolbox/crates/zk_supervisor/src/messages.rs +++ b/zk_toolbox/crates/zk_supervisor/src/messages.rs @@ -8,6 +8,7 @@ pub(super) fn msg_global_chain_does_not_exist(chain: &str, available_chains: &st } // Subcommands help +pub(super) const MSG_PROVER_VERSION_ABOUT: &str = "Protocol version used by provers"; pub(super) const MSG_SUBCOMMAND_DATABASE_ABOUT: &str = "Database related commands"; pub(super) const MSG_SUBCOMMAND_TESTS_ABOUT: &str = "Run tests"; pub(super) const MSG_SUBCOMMAND_CLEAN: &str = "Clean artifacts";