Skip to content

Commit

Permalink
Stark: Make batch commit of trace columns compatible with SHARP (#581)
Browse files Browse the repository at this point in the history
* add test

* make trace commitment SHARP compatible

* wip

* use powers of a single challenge for the boundary and transition coefficients

* add permutation to match sharp compatible commitments on the trace

* change trait bound from ByteConversion to Serializable

* minor refactor

* fmt, clippy

* move std feature to inner trait function in Serializable
  • Loading branch information
schouhy authored Oct 2, 2023
1 parent 314fafc commit c606714
Show file tree
Hide file tree
Showing 11 changed files with 300 additions and 71 deletions.
6 changes: 3 additions & 3 deletions crypto/src/merkle_tree/backends/field_element.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::merkle_tree::traits::IsMerkleTreeBackend;
use lambdaworks_math::{
field::{element::FieldElement, traits::IsField},
traits::ByteConversion,
traits::Serializable,
};
use sha3::{
digest::{generic_array::GenericArray, OutputSizeUser},
Expand All @@ -28,15 +28,15 @@ impl<F, D: Digest, const NUM_BYTES: usize> IsMerkleTreeBackend
for FieldElementBackend<F, D, NUM_BYTES>
where
F: IsField,
FieldElement<F>: ByteConversion,
FieldElement<F>: Serializable,
[u8; NUM_BYTES]: From<GenericArray<u8, <D as OutputSizeUser>::OutputSize>>,
{
type Node = [u8; NUM_BYTES];
type Data = FieldElement<F>;

fn hash_data(&self, input: &FieldElement<F>) -> [u8; NUM_BYTES] {
let mut hasher = D::new();
hasher.update(input.to_bytes_be());
hasher.update(input.serialize());
hasher.finalize().into()
}

Expand Down
6 changes: 3 additions & 3 deletions crypto/src/merkle_tree/backends/field_element_vector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::marker::PhantomData;
use crate::merkle_tree::traits::IsMerkleTreeBackend;
use lambdaworks_math::{
field::{element::FieldElement, traits::IsField},
traits::ByteConversion,
traits::Serializable,
};
use sha3::{
digest::{generic_array::GenericArray, OutputSizeUser},
Expand All @@ -29,7 +29,7 @@ impl<F, D: Digest, const NUM_BYTES: usize> IsMerkleTreeBackend
for FieldElementVectorBackend<F, D, NUM_BYTES>
where
F: IsField,
FieldElement<F>: ByteConversion,
FieldElement<F>: Serializable,
[u8; NUM_BYTES]: From<GenericArray<u8, <D as OutputSizeUser>::OutputSize>>,
{
type Node = [u8; NUM_BYTES];
Expand All @@ -38,7 +38,7 @@ where
fn hash_data(&self, input: &Vec<FieldElement<F>>) -> [u8; NUM_BYTES] {
let mut hasher = D::new();
for element in input.iter() {
hasher.update(element.to_bytes_be());
hasher.update(element.serialize());
}
let mut result_hash = [0_u8; NUM_BYTES];
result_hash.copy_from_slice(&hasher.finalize());
Expand Down
12 changes: 11 additions & 1 deletion math/src/field/fields/montgomery_backed_prime_fields.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::field::element::FieldElement;
use crate::field::errors::FieldError;
use crate::field::traits::IsPrimeField;
use crate::traits::ByteConversion;
use crate::traits::{ByteConversion, Serializable};
use crate::{
field::traits::IsField, unsigned_integer::element::UnsignedInteger,
unsigned_integer::montgomery::MontgomeryAlgorithms,
Expand Down Expand Up @@ -350,6 +350,16 @@ where
}
}

impl<M, const NUM_LIMBS: usize> Serializable
for FieldElement<MontgomeryBackendPrimeField<M, NUM_LIMBS>>
where
M: IsModulus<UnsignedInteger<NUM_LIMBS>> + Clone + Debug,
{
#[cfg(feature = "std")]
fn serialize(&self) -> Vec<u8> {
self.value().to_bytes_be()
}
}
#[cfg(test)]
mod tests_u384_prime_fields {
use crate::field::element::FieldElement;
Expand Down
2 changes: 1 addition & 1 deletion math/src/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ pub trait ByteConversion {

/// Serialize function without args
/// Used for serialization when formatting options are not relevant
#[cfg(feature = "std")]
pub trait Serializable {
/// Default serialize without args
#[cfg(feature = "std")]
fn serialize(&self) -> Vec<u8>;
}

Expand Down
4 changes: 2 additions & 2 deletions provers/stark/src/constraints/evaluator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use lambdaworks_math::{
fft::cpu::roots_of_unity::get_powers_of_primitive_root_coset,
field::{element::FieldElement, traits::IsFFTField},
polynomial::Polynomial,
traits::ByteConversion,
traits::Serializable,
};

#[cfg(feature = "parallel")]
Expand Down Expand Up @@ -44,7 +44,7 @@ impl<F: IsFFTField, A: AIR + AIR<Field = F>> ConstraintEvaluator<F, A> {
rap_challenges: &A::RAPChallenges,
) -> ConstraintEvaluationTable<F>
where
FieldElement<F>: ByteConversion + Send + Sync,
FieldElement<F>: Serializable + Send + Sync,
A: Send + Sync,
A::RAPChallenges: Send + Sync,
{
Expand Down
4 changes: 2 additions & 2 deletions provers/stark/src/domain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ impl<F: IsFFTField> Domain<F> {
{
// Initial definitions
let blowup_factor = air.options().blowup_factor as usize;
let coset_offset = FieldElement::<F>::from(air.options().coset_offset);
let coset_offset = FieldElement::from(air.options().coset_offset);
let interpolation_domain_size = air.trace_length();
let root_order = air.trace_length().trailing_zeros();
// * Generate Coset
let trace_primitive_root = F::get_primitive_root_of_unity(root_order as u64).unwrap();
let trace_roots_of_unity = get_powers_of_primitive_root_coset(
root_order as u64,
interpolation_domain_size,
&FieldElement::<F>::one(),
&FieldElement::one(),
)
.unwrap();

Expand Down
6 changes: 3 additions & 3 deletions provers/stark/src/fri/fri_commitment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use lambdaworks_math::{
traits::{IsFFTField, IsField},
},
polynomial::Polynomial,
traits::ByteConversion,
traits::Serializable,
};

use crate::config::FriMerkleTree;
Expand All @@ -14,7 +14,7 @@ use crate::config::FriMerkleTree;
pub struct FriLayer<F>
where
F: IsField,
FieldElement<F>: ByteConversion,
FieldElement<F>: Serializable,
{
pub evaluation: Vec<FieldElement<F>>,
pub merkle_tree: FriMerkleTree<F>,
Expand All @@ -25,7 +25,7 @@ where
impl<F> FriLayer<F>
where
F: IsField + IsFFTField,
FieldElement<F>: ByteConversion,
FieldElement<F>: Serializable,
{
pub fn new(
poly: &Polynomial<FieldElement<F>>,
Expand Down
6 changes: 3 additions & 3 deletions provers/stark/src/fri/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ pub mod fri_decommit;
mod fri_functions;

use lambdaworks_math::field::traits::{IsFFTField, IsField};
use lambdaworks_math::traits::ByteConversion;
use lambdaworks_math::traits::Serializable;
pub use lambdaworks_math::{
field::{element::FieldElement, fields::u64_prime_field::U64PrimeField},
polynomial::Polynomial,
Expand All @@ -25,7 +25,7 @@ pub fn fri_commit_phase<F: IsField + IsFFTField>(
domain_size: usize,
) -> (FieldElement<F>, Vec<FriLayer<F>>)
where
FieldElement<F>: ByteConversion,
FieldElement<F>: Serializable,
{
let mut domain_size = domain_size;

Expand Down Expand Up @@ -80,7 +80,7 @@ pub fn fri_query_phase<F, A>(
where
F: IsFFTField,
A: AIR<Field = F>,
FieldElement<F>: ByteConversion,
FieldElement<F>: Serializable,
{
if !fri_layers.is_empty() {
let number_of_queries = air.options().fri_number_of_queries;
Expand Down
Loading

0 comments on commit c606714

Please sign in to comment.