Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: arkworks groth16 conversion #1783

Merged
merged 6 commits into from
Nov 13, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
213 changes: 176 additions & 37 deletions Cargo.lock

Large diffs are not rendered by default.

12 changes: 10 additions & 2 deletions crates/verifier/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,23 @@ categories = { workspace = true }
[dependencies]
bn = { version = "0.6.0", package = "substrate-bn-succinct" }
sha2 = { version = "0.10.8", default-features = false }
thiserror-no-std = "2.0.2"
thiserror = { version = "2", default-features = false }
hex = { version = "0.4.3", default-features = false, features = ["alloc"] }
lazy_static = { version = "1.5.0", default-features = false }

# arkworks
ark-bn254 = { version = "0.4.0", optional = true }
ark-serialize = { version = "0.4.2", optional = true }
ark-ff = { version = "0.4.2", optional = true }
ark-groth16 = { version = "0.4.0", optional = true }
ark-ec = { version = "0.4.0", optional = true }

[dev-dependencies]
sp1-sdk = { workspace = true }
num-bigint = "0.4.6"
num-traits = "0.2.19"

[features]
default = ["std"]
std = ["thiserror-no-std/std"]
std = ["thiserror/std"]
ark = ["ark-bn254", "ark-serialize", "ark-ff", "ark-groth16", "ark-ec"]
2 changes: 1 addition & 1 deletion crates/verifier/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use bn::{CurveError, FieldError, GroupError};
use thiserror_no_std::Error;
use thiserror::Error;

#[derive(Error, Debug)]
pub enum Error {
Expand Down
193 changes: 193 additions & 0 deletions crates/verifier/src/groth16/ark_converter.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,193 @@
use ark_bn254::{Bn254, Fr, G1Affine, G2Affine};
use ark_ec::AffineRepr;
use ark_ff::PrimeField;
use ark_groth16::{Proof, VerifyingKey};
use ark_serialize::{CanonicalDeserialize, Compress, Validate};
use thiserror::Error;

const GNARK_MASK: u8 = 0b11 << 6;
const GNARK_COMPRESSED_POSITVE: u8 = 0b10 << 6;
const GNARK_COMPRESSED_NEGATIVE: u8 = 0b11 << 6;
const GNARK_COMPRESSED_INFINITY: u8 = 0b01 << 6;

const ARK_MASK: u8 = 0b11 << 6;
const ARK_COMPRESSED_POSITVE: u8 = 0b00 << 6;
const ARK_COMPRESSED_NEGATIVE: u8 = 0b10 << 6;
const ARK_COMPRESSED_INFINITY: u8 = 0b01 << 6;

#[derive(Error, Debug)]
pub enum ArkGroth16Error {
#[error("G1 compression error")]
G1CompressionError,
#[error("G2 compression error")]
G2CompressionError,
#[error("Invalid input")]
InvalidInput,
}

/// Convert the endianness of a byte array, chunk by chunk.
///
/// Taken from https://github.com/anza-xyz/agave/blob/c54d840/curves/bn254/src/compression.rs#L176-L189
fn convert_endianness<const CHUNK_SIZE: usize, const ARRAY_SIZE: usize>(
bytes: &[u8; ARRAY_SIZE],
) -> [u8; ARRAY_SIZE] {
let reversed: [_; ARRAY_SIZE] = bytes
.chunks_exact(CHUNK_SIZE)
.flat_map(|chunk| chunk.iter().rev().copied())
.enumerate()
.fold([0u8; ARRAY_SIZE], |mut acc, (i, v)| {
acc[i] = v;
acc
});
reversed
}

/// Decompress a G1 point.
///
/// Taken from https://github.com/anza-xyz/agave/blob/c54d840/curves/bn254/src/compression.rs#L219
fn decompress_g1(g1_bytes: &[u8; 32]) -> Result<G1Affine, ArkGroth16Error> {
let g1_bytes = gnark_compressed_x_to_ark_compressed_x(g1_bytes)?;
let g1_bytes = convert_endianness::<32, 32>(&g1_bytes.as_slice().try_into().unwrap());
let decompressed_g1 = G1Affine::deserialize_with_mode(
convert_endianness::<32, 32>(&g1_bytes).as_slice(),
Compress::Yes,
Validate::No,
)
.map_err(|_| ArkGroth16Error::G1CompressionError)?;
Ok(decompressed_g1)
}

/// Decompress a G2 point.
///
/// Adapted from https://github.com/anza-xyz/agave/blob/c54d840/curves/bn254/src/compression.rs#L255
fn decompress_g2(g2_bytes: &[u8; 64]) -> Result<G2Affine, ArkGroth16Error> {
let g2_bytes = gnark_compressed_x_to_ark_compressed_x(g2_bytes)?;
let g2_bytes = convert_endianness::<64, 64>(&g2_bytes.as_slice().try_into().unwrap());
let decompressed_g2 = G2Affine::deserialize_with_mode(
convert_endianness::<64, 64>(&g2_bytes).as_slice(),
Compress::Yes,
Validate::No,
)
.map_err(|_| ArkGroth16Error::G2CompressionError)?;
Ok(decompressed_g2)
}

fn gnark_flag_to_ark_flag(msb: u8) -> Result<u8, ArkGroth16Error> {
let gnark_flag = msb & GNARK_MASK;

let ark_flag = match gnark_flag {
GNARK_COMPRESSED_POSITVE => ARK_COMPRESSED_POSITVE,
GNARK_COMPRESSED_NEGATIVE => ARK_COMPRESSED_NEGATIVE,
GNARK_COMPRESSED_INFINITY => ARK_COMPRESSED_INFINITY,
_ => {
return Err(ArkGroth16Error::InvalidInput);
}
};

Ok(msb & !ARK_MASK | ark_flag)
}

fn gnark_compressed_x_to_ark_compressed_x(x: &[u8]) -> Result<Vec<u8>, ArkGroth16Error> {
if x.len() != 32 && x.len() != 64 {
return Err(ArkGroth16Error::InvalidInput);
}
let mut x_copy = x.to_owned();

let msb = gnark_flag_to_ark_flag(x_copy[0])?;
x_copy[0] = msb;

x_copy.reverse();
Ok(x_copy)
}

/// Deserialize a gnark decompressed affine G1 point to an arkworks decompressed affine G1 point.
fn gnark_decompressed_g1_to_ark_decompressed_g1(
buf: &[u8; 64],
) -> Result<G1Affine, ArkGroth16Error> {
let buf = convert_endianness::<32, 64>(buf);
if buf == [0u8; 64] {
return Ok(G1Affine::zero());
}
let g1 = G1Affine::deserialize_with_mode(
&*[&buf[..], &[0u8][..]].concat(),
Compress::No,
Validate::Yes,
)
.map_err(|_| ArkGroth16Error::G1CompressionError)?;
Ok(g1)
}

/// Deserialize a gnark decompressed affine G2 point to an arkworks decompressed affine G2 point.
fn gnark_decompressed_g2_to_ark_decompressed_g2(
buf: &[u8; 128],
) -> Result<G2Affine, ArkGroth16Error> {
let buf = convert_endianness::<64, 128>(buf);
if buf == [0u8; 128] {
return Ok(G2Affine::zero());
}
let g2 = G2Affine::deserialize_with_mode(
&*[&buf[..], &[0u8][..]].concat(),
Compress::No,
Validate::Yes,
)
.map_err(|_| ArkGroth16Error::G2CompressionError)?;
Ok(g2)
}

/// Load a Groth16 proof from bytes in the arkworks format.
pub fn load_ark_proof_from_bytes(buffer: &[u8]) -> Result<Proof<Bn254>, ArkGroth16Error> {
Ok(Proof::<Bn254> {
a: gnark_decompressed_g1_to_ark_decompressed_g1(buffer[..64].try_into().unwrap())?,
b: gnark_decompressed_g2_to_ark_decompressed_g2(buffer[64..192].try_into().unwrap())?,
c: gnark_decompressed_g1_to_ark_decompressed_g1(&buffer[192..256].try_into().unwrap())?,
})
}

/// Load a Groth16 verifying key from bytes in the arkworks format.
pub fn load_ark_groth16_verifying_key_from_bytes(
buffer: &[u8],
) -> Result<VerifyingKey<Bn254>, ArkGroth16Error> {
// Note that g1_beta and g1_delta are not used in the verification process.
let alpha_g1 = decompress_g1(buffer[..32].try_into().unwrap())?;
let beta_g2 = decompress_g2(buffer[64..128].try_into().unwrap())?;
let gamma_g2 = decompress_g2(buffer[128..192].try_into().unwrap())?;
let delta_g2 = decompress_g2(buffer[224..288].try_into().unwrap())?;

let num_k = u32::from_be_bytes([buffer[288], buffer[289], buffer[290], buffer[291]]);
let mut k = Vec::new();
let mut offset = 292;
for _ in 0..num_k {
let point = decompress_g1(&buffer[offset..offset + 32].try_into().unwrap())?;
k.push(point);
offset += 32;
}

let num_of_array_of_public_and_commitment_committed = u32::from_be_bytes([
buffer[offset],
buffer[offset + 1],
buffer[offset + 2],
buffer[offset + 3],
]);
offset += 4;
for _ in 0..num_of_array_of_public_and_commitment_committed {
let num = u32::from_be_bytes([
buffer[offset],
buffer[offset + 1],
buffer[offset + 2],
buffer[offset + 3],
]);
offset += 4;
for _ in 0..num {
offset += 4;
}
}

Ok(VerifyingKey { alpha_g1, beta_g2, gamma_g2, delta_g2, gamma_abc_g1: k })
}

pub fn load_ark_public_inputs_from_bytes(
vkey_hash: &[u8; 32],
committed_values_digest: &[u8; 32],
) -> [Fr; 2] {
[Fr::from_be_bytes_mod_order(vkey_hash), Fr::from_be_bytes_mod_order(committed_values_digest)]
}
2 changes: 1 addition & 1 deletion crates/verifier/src/groth16/error.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use thiserror_no_std::Error;
use thiserror::Error;

#[derive(Debug, Error)]
pub enum Groth16Error {
Expand Down
3 changes: 3 additions & 0 deletions crates/verifier/src/groth16/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ use error::Groth16Error;

use crate::{bn254_public_values, decode_sp1_vkey_hash, error::Error};

#[cfg(feature = "ark")]
pub mod ark_converter;

/// A verifier for Groth16 zero-knowledge proofs.
#[derive(Debug)]
pub struct Groth16Verifier;
Expand Down
3 changes: 3 additions & 0 deletions crates/verifier/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ pub use groth16::error::Groth16Error;
pub use groth16::Groth16Verifier;
mod groth16;

#[cfg(feature = "ark")]
pub use groth16::ark_converter::*;

pub use plonk::error::PlonkError;
pub use plonk::PlonkVerifier;
mod plonk;
Expand Down
2 changes: 1 addition & 1 deletion crates/verifier/src/plonk/error.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use thiserror_no_std::Error;
use thiserror::Error;

#[derive(Error, Debug)]
pub enum PlonkError {
Expand Down
35 changes: 34 additions & 1 deletion crates/verifier/src/tests.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use sp1_sdk::{install::try_install_circuit_artifacts, SP1ProofWithPublicValues};

extern crate std;
use crate::hash_public_inputs;

Check warning on line 3 in crates/verifier/src/tests.rs

View workflow job for this annotation

GitHub Actions / Test (ARM)

unused import: `crate::hash_public_inputs`

Check warning on line 3 in crates/verifier/src/tests.rs

View workflow job for this annotation

GitHub Actions / Test (x86-64)

unused import: `crate::hash_public_inputs`

#[test]
fn test_verify_groth16() {
Expand Down Expand Up @@ -50,3 +50,36 @@
let s3_vkey_bytes = std::fs::read(s3_vkey_path).unwrap();
assert_eq!(s3_vkey_bytes, *crate::PLONK_VK_BYTES);
}

#[test]
#[cfg(feature = "ark")]
fn test_ark_groth16() {
use ark_bn254::Bn254;
use ark_groth16::{r1cs_to_qap::LibsnarkReduction, Groth16};

use crate::groth16::ark_converter::*;
// Location of the serialized SP1ProofWithPublicValues. See README.md for more information.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: comment location wrong?


use crate::decode_sp1_vkey_hash;
let proof_file = "test_binaries/fibonacci-groth16.bin";

// Load the saved proof and extract the proof and public inputs.
let sp1_proof_with_public_values = SP1ProofWithPublicValues::load(proof_file).unwrap();

let proof = sp1_proof_with_public_values.bytes();
let public_inputs = sp1_proof_with_public_values.public_values.to_vec();

// This vkey hash was derived by calling `vk.bytes32()` on the verifying key.
let vkey_hash = "0x00e60860c07bfc6e4c480286c0ddbb879674eb47f84b4ef041cf858b17aa0ed1";

let proof = load_ark_proof_from_bytes(&proof[4..]).unwrap();
let vkey = load_ark_groth16_verifying_key_from_bytes(&crate::GROTH16_VK_BYTES).unwrap();

let public_inputs = load_ark_public_inputs_from_bytes(
&decode_sp1_vkey_hash(vkey_hash).unwrap(),
&hash_public_inputs(&public_inputs),
);

Groth16::<Bn254, LibsnarkReduction>::verify_proof(&vkey.into(), &proof, &public_inputs)
.unwrap();
}
Loading