Skip to content

Commit

Permalink
feat: Provide easy prover setup (#2683)
Browse files Browse the repository at this point in the history
## What ❔

Allow to run `zk_inception prover init` without `chain init`
Add docs for running provers and proving the batch.

## Why ❔

To provide easy way to spin up prover subsystem locally.

## Checklist

<!-- Check your PR fulfills the following items. -->
<!-- For draft PRs check the boxes as you complete them. -->

- [ ] PR title corresponds to the body of PR (we generate changelog
entries from PRs).
- [ ] Tests for the changes have been added / updated.
- [ ] Documentation comments have been added / updated.
- [ ] Code has been formatted via `zk fmt` and `zk lint`.
  • Loading branch information
Artemka374 authored Aug 22, 2024
1 parent 8776875 commit 30edda4
Show file tree
Hide file tree
Showing 32 changed files with 589 additions and 129 deletions.
2 changes: 1 addition & 1 deletion etc/env/file_based/general.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ prover:
file_backed:
file_backed_base_path: artifacts
max_retries: 10
setup_data_path: vk_setup_data_generator_server_fri/data
setup_data_path: crates/bin/vk_setup_data_generator_server_fri/data
prometheus_port: 3315
max_attempts: 10
generation_timeout_in_secs: 600
Expand Down
9 changes: 7 additions & 2 deletions prover/crates/bin/proof_fri_compressor/src/compressor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ pub struct ProofCompressor {
compression_mode: u8,
max_attempts: u32,
protocol_version: ProtocolSemanticVersion,
setup_data_path: String,
}

impl ProofCompressor {
Expand All @@ -44,13 +45,15 @@ impl ProofCompressor {
compression_mode: u8,
max_attempts: u32,
protocol_version: ProtocolSemanticVersion,
setup_data_path: String,
) -> Self {
Self {
blob_store,
pool,
compression_mode,
max_attempts,
protocol_version,
setup_data_path,
}
}

Expand All @@ -59,8 +62,9 @@ impl ProofCompressor {
l1_batch: L1BatchNumber,
proof: ZkSyncRecursionLayerProof,
_compression_mode: u8,
setup_data_path: String,
) -> anyhow::Result<FinalProof> {
let keystore = Keystore::default();
let keystore = Keystore::new_with_setup_data_path(setup_data_path);
let scheduler_vk = keystore
.load_recursive_layer_verification_key(
ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8,
Expand Down Expand Up @@ -174,8 +178,9 @@ impl JobProcessor for ProofCompressor {
) -> JoinHandle<anyhow::Result<Self::JobArtifacts>> {
let compression_mode = self.compression_mode;
let block_number = *job_id;
let setup_data_path = self.setup_data_path.clone();
tokio::task::spawn_blocking(move || {
Self::compress_proof(block_number, job, compression_mode)
Self::compress_proof(block_number, job, compression_mode, setup_data_path)
})
}

Expand Down
5 changes: 5 additions & 0 deletions prover/crates/bin/proof_fri_compressor/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ async fn main() -> anyhow::Result<()> {
let object_store_config = ProverObjectStoreConfig(
general_config
.prover_config
.clone()
.expect("ProverConfig")
.prover_object_store
.context("ProverObjectStoreConfig")?,
Expand All @@ -75,6 +76,10 @@ async fn main() -> anyhow::Result<()> {
config.compression_mode,
config.max_attempts,
protocol_version,
general_config
.prover_config
.expect("ProverConfig doesn't exist")
.setup_data_path,
);

let (stop_sender, stop_receiver) = watch::channel(false);
Expand Down
7 changes: 6 additions & 1 deletion prover/crates/bin/prover_cli/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ use clap::{command, Args, Parser, Subcommand};
use zksync_types::url::SensitiveUrl;

use crate::commands::{
config, debug_proof, delete, get_file_info, requeue, restart, stats, status::StatusCommand,
config, debug_proof, delete, get_file_info, insert_batch, insert_version, requeue, restart,
stats, status::StatusCommand,
};

pub const VERSION_STRING: &str = env!("CARGO_PKG_VERSION");
Expand All @@ -27,6 +28,8 @@ impl ProverCLI {
ProverCommand::Restart(args) => restart::run(args).await?,
ProverCommand::DebugProof(args) => debug_proof::run(args).await?,
ProverCommand::Stats(args) => stats::run(args, self.config).await?,
ProverCommand::InsertVersion(args) => insert_version::run(args, self.config).await?,
ProverCommand::InsertBatch(args) => insert_batch::run(args, self.config).await?,
};
Ok(())
}
Expand Down Expand Up @@ -55,4 +58,6 @@ pub enum ProverCommand {
Restart(restart::Args),
#[command(about = "Displays L1 Batch proving stats for a given period")]
Stats(stats::Options),
InsertVersion(insert_version::Args),
InsertBatch(insert_batch::Args),
}
43 changes: 43 additions & 0 deletions prover/crates/bin/prover_cli/src/commands/insert_batch.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
use anyhow::Context as _;
use clap::Args as ClapArgs;
use zksync_basic_types::{
protocol_version::{ProtocolSemanticVersion, ProtocolVersionId, VersionPatch},
L1BatchNumber,
};
use zksync_db_connection::connection_pool::ConnectionPool;
use zksync_prover_dal::{Prover, ProverDal};

use crate::cli::ProverCLIConfig;

#[derive(ClapArgs)]
pub struct Args {
#[clap(short, long)]
pub number: L1BatchNumber,
#[clap(short, long)]
pub version: u16,
#[clap(short, long)]
pub patch: u32,
}

pub async fn run(args: Args, config: ProverCLIConfig) -> anyhow::Result<()> {
let connection = ConnectionPool::<Prover>::singleton(config.db_url)
.build()
.await
.context("failed to build a prover_connection_pool")?;
let mut conn = connection.connection().await.unwrap();

let protocol_version = ProtocolVersionId::try_from(args.version)
.map_err(|_| anyhow::anyhow!("Invalid protocol version"))?;

let protocol_version_patch = VersionPatch(args.patch);

conn.fri_witness_generator_dal()
.save_witness_inputs(
args.number,
&format!("witness_inputs_{}", args.number.0),
ProtocolSemanticVersion::new(protocol_version, protocol_version_patch),
)
.await;

Ok(())
}
52 changes: 52 additions & 0 deletions prover/crates/bin/prover_cli/src/commands/insert_version.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
use std::str::FromStr;

use anyhow::Context as _;
use clap::Args as ClapArgs;
use zksync_basic_types::{
protocol_version::{
L1VerifierConfig, ProtocolSemanticVersion, ProtocolVersionId, VersionPatch,
},
H256,
};
use zksync_db_connection::connection_pool::ConnectionPool;
use zksync_prover_dal::{Prover, ProverDal};

use crate::cli::ProverCLIConfig;

#[derive(ClapArgs)]
pub struct Args {
#[clap(short, long)]
pub version: u16,
#[clap(short, long)]
pub patch: u32,
#[clap(short, long)]
pub snark_wrapper: String,
}

pub async fn run(args: Args, config: ProverCLIConfig) -> anyhow::Result<()> {
let connection = ConnectionPool::<Prover>::singleton(config.db_url)
.build()
.await
.context("failed to build a prover_connection_pool")?;
let mut conn = connection.connection().await.unwrap();

let protocol_version = ProtocolVersionId::try_from(args.version)
.map_err(|_| anyhow::anyhow!("Invalid protocol version"))?;

let protocol_version_patch = VersionPatch(args.patch);

let snark_wrapper = H256::from_str(&args.snark_wrapper).unwrap_or_else(|_| {
panic!("Invalid snark wrapper hash");
});

conn.fri_protocol_versions_dal()
.save_prover_protocol_version(
ProtocolSemanticVersion::new(protocol_version, protocol_version_patch),
L1VerifierConfig {
recursion_scheduler_level_vk_hash: snark_wrapper,
},
)
.await;

Ok(())
}
2 changes: 2 additions & 0 deletions prover/crates/bin/prover_cli/src/commands/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ pub(crate) mod config;
pub(crate) mod debug_proof;
pub(crate) mod delete;
pub(crate) mod get_file_info;
pub(crate) mod insert_batch;
pub(crate) mod insert_version;
pub(crate) mod requeue;
pub(crate) mod restart;
pub(crate) mod stats;
Expand Down
5 changes: 3 additions & 2 deletions prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,8 @@ pub mod gpu_prover {
.clone(),
SetupLoadMode::FromDisk => {
let started_at = Instant::now();
let keystore = Keystore::default();
let keystore =
Keystore::new_with_setup_data_path(self.config.setup_data_path.clone());
let artifact: GoldilocksGpuProverSetupData = keystore
.load_gpu_setup_data_for_circuit_type(key.clone())
.context("load_gpu_setup_data_for_circuit_type()")?;
Expand Down Expand Up @@ -347,7 +348,7 @@ pub mod gpu_prover {
&config.specialized_group_id,
prover_setup_metadata_list
);
let keystore = Keystore::default();
let keystore = Keystore::new_with_setup_data_path(config.setup_data_path.clone());
for prover_setup_metadata in prover_setup_metadata_list {
let key = setup_metadata_to_setup_data_key(&prover_setup_metadata);
let setup_data = keystore
Expand Down
5 changes: 3 additions & 2 deletions prover/crates/bin/prover_fri/src/prover_job_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,8 @@ impl Prover {
.clone(),
SetupLoadMode::FromDisk => {
let started_at = Instant::now();
let keystore = Keystore::default();
let keystore =
Keystore::new_with_setup_data_path(self.config.setup_data_path.clone());
let artifact: GoldilocksProverSetupData = keystore
.load_cpu_setup_data_for_circuit_type(key.clone())
.context("get_cpu_setup_data_for_circuit_type()")?;
Expand Down Expand Up @@ -298,7 +299,7 @@ pub fn load_setup_data_cache(config: &FriProverConfig) -> anyhow::Result<SetupLo
&config.specialized_group_id,
prover_setup_metadata_list
);
let keystore = Keystore::default();
let keystore = Keystore::new_with_setup_data_path(config.setup_data_path.clone());
for prover_setup_metadata in prover_setup_metadata_list {
let key = setup_metadata_to_setup_data_key(&prover_setup_metadata);
let setup_data = keystore
Expand Down
73 changes: 0 additions & 73 deletions prover/crates/bin/witness_generator/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,76 +50,3 @@ One round of prover generation consists of:

Note that the very first input table (`witness_inputs`) is populated by the tree (as the input artifact for the
`WitnessGeneratorJobType::BasicCircuits` is the merkle proofs)

## Running BWG for custom batch

After releases `prover-v15.1.0` and `core-v24.9.0` basic witness generator doesn't need access to core database anymore.
Database information now lives in input file, called `witness_inputs_<batch>.bin` generated by different core
components).

This file is stored by prover gateway in GCS (or your choice of object storage -- check config). To access it from GCS
(assuming you have access to the bucket), run:

```shell
gsutil cp gs://your_bucket/witness_inputs/witness_inputs_<batch>.bin <path/to/era/prover/artifacts/witness_inputs>
```

Note, that you need to have `gsutil` installed, and you need to have access to the bucket.

Now, database needs to know about the batch and the protocol version it should use. Check the latest protocol version in
the codebase by checking const `PROVER_PROTOCOL_SEMANTIC_VERSION` or run the binary in `prover` workspace:

```console
cargo run --bin prover_version
```

It will give you the latest prover protocol version in a semver format, like `0.24.2`, you need to know only minor and
patch versions. Now, go to the `prover/crates/bin/vk_setup_data_generator_server_fri/data/commitments.json` and get
`snark_wrapper` value from it. Then, you need to insert the info about protocol version into the database. First,
connect to the database, e.g. locally you can do it like that:

```shell
psql postgres://postgres:notsecurepassword@localhost/prover_local
```

And run the following query:

```shell
INSERT INTO
prover_fri_protocol_versions (
id,
recursion_scheduler_level_vk_hash,
created_at,
protocol_version_patch
)
VALUES
(<minor version>, '<snark wrapper value>'::bytea, NOW(), <patch version>)
ON CONFLICT (id, protocol_version_patch) DO NOTHING

```
Now, you need to insert the batch into the database. Run the following query:
```shell
INSERT INTO
witness_inputs_fri (
l1_batch_number,
witness_inputs_blob_url,
protocol_version,
status,
created_at,
updated_at,
protocol_version_patch
)
VALUES
(<batch number>, 'witness_inputs_<batch_number>.bin', <minor version>, 'queued', NOW(), NOW(), <patch version>)
ON CONFLICT (l1_batch_number) DO NOTHING
```
Finally, run the basic witness generator itself:
```shell
API_PROMETHEUS_LISTENER_PORT=3116 zk f cargo run --release --bin zksync_witness_generator -- --round=basic_circuits
```
And you are good to go!
16 changes: 12 additions & 4 deletions prover/crates/bin/witness_generator/src/leaf_aggregation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ pub struct LeafAggregationWitnessGenerator {
object_store: Arc<dyn ObjectStore>,
prover_connection_pool: ConnectionPool<Prover>,
protocol_version: ProtocolSemanticVersion,
setup_data_path: String,
}

impl LeafAggregationWitnessGenerator {
Expand All @@ -80,12 +81,14 @@ impl LeafAggregationWitnessGenerator {
object_store: Arc<dyn ObjectStore>,
prover_connection_pool: ConnectionPool<Prover>,
protocol_version: ProtocolSemanticVersion,
setup_data_path: String,
) -> Self {
Self {
config,
object_store,
prover_connection_pool,
protocol_version,
setup_data_path,
}
}

Expand Down Expand Up @@ -131,9 +134,13 @@ impl JobProcessor for LeafAggregationWitnessGenerator {
tracing::info!("Processing leaf aggregation job {:?}", metadata.id);
Ok(Some((
metadata.id,
prepare_leaf_aggregation_job(metadata, &*self.object_store)
.await
.context("prepare_leaf_aggregation_job()")?,
prepare_leaf_aggregation_job(
metadata,
&*self.object_store,
self.setup_data_path.clone(),
)
.await
.context("prepare_leaf_aggregation_job()")?,
)))
}

Expand Down Expand Up @@ -219,6 +226,7 @@ impl JobProcessor for LeafAggregationWitnessGenerator {
pub async fn prepare_leaf_aggregation_job(
metadata: LeafAggregationJobMetadata,
object_store: &dyn ObjectStore,
setup_data_path: String,
) -> anyhow::Result<LeafAggregationWitnessGeneratorJob> {
let started_at = Instant::now();
let closed_form_input = get_artifacts(&metadata, object_store).await;
Expand All @@ -227,7 +235,7 @@ pub async fn prepare_leaf_aggregation_job(
.observe(started_at.elapsed());

let started_at = Instant::now();
let keystore = Keystore::default();
let keystore = Keystore::new_with_setup_data_path(setup_data_path);
let base_vk = keystore
.load_base_layer_verification_key(metadata.circuit_id)
.context("get_base_layer_vk_for_circuit_type()")?;
Expand Down
Loading

0 comments on commit 30edda4

Please sign in to comment.