Skip to content

Commit

Permalink
Merge branch 'master' into faster-benchmarks-and-starknet-field
Browse files Browse the repository at this point in the history
  • Loading branch information
mmagician authored Sep 11, 2023
2 parents ff56bc5 + 2369347 commit 1ff0844
Show file tree
Hide file tree
Showing 11 changed files with 136 additions and 60 deletions.
2 changes: 1 addition & 1 deletion bench-templates/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ rust-version = "1.63"
################################# Dependencies ################################

[dependencies]
criterion = { version = "0.4.0", features = [ "html_reports" ] }
criterion = { version = "0.5.1", features = [ "html_reports" ] }
ark-std = { version = "0.4.0", default-features = false }
ark-ec = { version = "0.4.2", path = "../ec", default-features = false }
ark-ff = { version = "0.4.2", path = "../ff", default-features = false }
Expand Down
27 changes: 21 additions & 6 deletions bench-templates/src/macros/pairing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,27 @@ macro_rules! pairing_bench {
})
},
);
pairing.bench_function(&format!("Full Pairing for {}", stringify!($curve)), |b| {
b.iter(|| {
i = (i + 1) % SAMPLES;
<$curve as Pairing>::multi_pairing([g1s[i]], [g2s[i]])
})
});

const NUM_PAIRS: usize = 10;

for pairs in 1..=NUM_PAIRS {
pairing.bench_function(
&format!(
"Multi Pairing for {} with {} pairs",
stringify!($curve),
pairs
),
|b| {
b.iter(|| {
i = (i + 1) % (SAMPLES - NUM_PAIRS);
<$curve as Pairing>::multi_pairing(
g1s[(i)..(i + pairs)].to_vec(),
g2s[(i)..(i + pairs)].to_vec(),
)
})
},
);
}
}

$crate::criterion_group!(benches, pairing);
Expand Down
39 changes: 30 additions & 9 deletions ec/src/models/bw6/g2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,16 @@ impl<P: BW6Config> Default for G2Prepared<P> {
}
}

// impl into G2Affine from G2HomProjective
impl<P: BW6Config> From<G2HomProjective<P>> for G2Affine<P> {
fn from(q: G2HomProjective<P>) -> Self {
let z_inv = q.z.inverse().unwrap();
let x = q.x * &z_inv;
let y = q.y * &z_inv;
G2Affine::<P>::new_unchecked(x, y)
}
}

impl<P: BW6Config> From<G2Affine<P>> for G2Prepared<P> {
fn from(q: G2Affine<P>) -> Self {
if q.infinity {
Expand All @@ -56,7 +66,7 @@ impl<P: BW6Config> From<G2Affine<P>> for G2Prepared<P> {
};
}

// f_{u+1,Q}(P)
// f_{u,Q}(P)
let mut ell_coeffs_1 = vec![];
let mut r = G2HomProjective::<P> {
x: q.x,
Expand All @@ -71,23 +81,34 @@ impl<P: BW6Config> From<G2Affine<P>> for G2Prepared<P> {
ell_coeffs_1.push(r.add_in_place(&q));
}
}
// TODO: this is probably the slowest part
// While G2 preparation is overall faster due to shortened 2nd loop,
// The inversion could probably be avoided by using Hom(P) + Hom(Q) addition,
// instead of mixed addition as is currently done.
let r_affine: G2Affine<P> = r.into();
// Swap the signs of `qu`, `r` & `neg_qu` if the loop count is negative.
let (qu, neg_qu) = if P::ATE_LOOP_COUNT_1_IS_NEGATIVE {
(-r_affine, r_affine)
} else {
(r_affine, -r_affine)
};

// f_{u^3-u^2-u,Q}(P)
let mut ell_coeffs_2 = vec![];
let mut r = G2HomProjective::<P> {
x: q.x,
y: q.y,
r = G2HomProjective::<P> {
x: qu.x,
y: qu.y,
z: P::Fp::one(),
};
ell_coeffs_1.push(r.clone().add_in_place(&q));

let negq = -q;
let mut ell_coeffs_2 = vec![];

// f_{u^2-u-1,[u]Q}(P)
for bit in P::ATE_LOOP_COUNT_2.iter().rev().skip(1) {
ell_coeffs_2.push(r.double_in_place());

match bit {
1 => ell_coeffs_2.push(r.add_in_place(&q)),
-1 => ell_coeffs_2.push(r.add_in_place(&negq)),
1 => ell_coeffs_2.push(r.add_in_place(&qu)),
-1 => ell_coeffs_2.push(r.add_in_place(&neg_qu)),
_ => continue,
}
}
Expand Down
41 changes: 31 additions & 10 deletions ec/src/models/bw6/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ pub trait BW6Config: 'static + Eq + Sized {
const X_MINUS_1_DIV_3: <Self::Fp as PrimeField>::BigInt;
const ATE_LOOP_COUNT_1: &'static [u64];
const ATE_LOOP_COUNT_1_IS_NEGATIVE: bool;
// X^2 - X - 1
const ATE_LOOP_COUNT_2: &'static [i8];
const ATE_LOOP_COUNT_2_IS_NEGATIVE: bool;
const TWIST_TYPE: TwistType;
Expand Down Expand Up @@ -96,7 +97,8 @@ pub trait BW6Config: 'static + Eq + Sized {
})
.unzip::<_, _, Vec<_>, Vec<_>>();

let mut f_1 = cfg_chunks_mut!(pairs_1, 4)
// compute f_u which we can later re-use for the 2nd loop
let mut f_u = cfg_chunks_mut!(pairs_1, 4)
.map(|pairs| {
let mut f = <BW6<Self> as Pairing>::TargetField::one();
for i in BitIteratorBE::without_leading_zeros(Self::ATE_LOOP_COUNT_1).skip(1) {
Expand All @@ -114,26 +116,45 @@ pub trait BW6Config: 'static + Eq + Sized {
})
.product::<<BW6<Self> as Pairing>::TargetField>();

let f_u_inv;

if Self::ATE_LOOP_COUNT_1_IS_NEGATIVE {
f_1.cyclotomic_inverse_in_place();
f_u_inv = f_u;
f_u.cyclotomic_inverse_in_place();
} else {
f_u_inv = f_u.cyclotomic_inverse().unwrap();
}

// f_1(P) = f_(u+1)(P) = f_u(P) * l([u]q, q)(P)
let mut f_1 = cfg_chunks_mut!(pairs_1, 4)
.map(|pairs| {
pairs.iter_mut().fold(f_u, |mut f, (p, coeffs)| {
BW6::<Self>::ell(&mut f, &coeffs.next().unwrap(), &p.0);
f
})
})
.product::<<BW6<Self> as Pairing>::TargetField>();

let mut f_2 = cfg_chunks_mut!(pairs_2, 4)
.map(|pairs| {
let mut f = <<BW6<Self> as Pairing>::TargetField>::one();
let mut f = f_u;
for i in (1..Self::ATE_LOOP_COUNT_2.len()).rev() {
if i != Self::ATE_LOOP_COUNT_2.len() - 1 {
f.square_in_place();
}
f.square_in_place();

for (p, ref mut coeffs) in pairs.iter_mut() {
BW6::<Self>::ell(&mut f, &coeffs.next().unwrap(), &p.0);
}

let bit = Self::ATE_LOOP_COUNT_2[i - 1];
if bit == 1 || bit == -1 {
for &mut (p, ref mut coeffs) in pairs.iter_mut() {
BW6::<Self>::ell(&mut f, &coeffs.next().unwrap(), &p.0);
}
if bit == 1 {
f *= &f_u;
} else if bit == -1 {
f *= &f_u_inv;
} else {
continue;
}
for &mut (p, ref mut coeffs) in pairs.iter_mut() {
BW6::<Self>::ell(&mut f, &coeffs.next().unwrap(), &p.0);
}
}
f
Expand Down
2 changes: 1 addition & 1 deletion ff/src/fields/field_hashers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ impl<F: Field, H: Default + DynDigest + Clone, const SEC_PARAM: usize> HashToFie
);
base_prime_field_elems.push(val);
}
let f = F::from_base_prime_field_elems(&base_prime_field_elems).unwrap();
let f = F::from_base_prime_field_elems(base_prime_field_elems.drain(..)).unwrap();
output.push(f);
}

Expand Down
5 changes: 4 additions & 1 deletion ff/src/fields/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use ark_serialize::{
use ark_std::{
fmt::{Debug, Display},
hash::Hash,
iter::IntoIterator,
ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign},
vec::Vec,
};
Expand Down Expand Up @@ -224,7 +225,9 @@ pub trait Field:

/// Convert a slice of base prime field elements into a field element.
/// If the slice length != Self::extension_degree(), must return None.
fn from_base_prime_field_elems(elems: &[Self::BasePrimeField]) -> Option<Self>;
fn from_base_prime_field_elems(
elems: impl IntoIterator<Item = Self::BasePrimeField>,
) -> Option<Self>;

/// Constructs a field element from a single base prime field elements.
/// ```
Expand Down
34 changes: 20 additions & 14 deletions ff/src/fields/models/cubic_extension.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use ark_std::{
cmp::{Ord, Ordering, PartialOrd},
fmt,
io::{Read, Write},
iter::Chain,
iter::{Chain, IntoIterator},
ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign},
vec::Vec,
};
Expand Down Expand Up @@ -216,17 +216,22 @@ impl<P: CubicExtConfig> Field for CubicExtField<P> {
)
}

fn from_base_prime_field_elems(elems: &[Self::BasePrimeField]) -> Option<Self> {
if elems.len() != (Self::extension_degree() as usize) {
return None;
}
fn from_base_prime_field_elems(
elems: impl IntoIterator<Item = Self::BasePrimeField>,
) -> Option<Self> {
let mut elems = elems.into_iter();
let elems = elems.by_ref();
let base_ext_deg = P::BaseField::extension_degree() as usize;
Some(Self::new(
P::BaseField::from_base_prime_field_elems(&elems[0..base_ext_deg]).unwrap(),
P::BaseField::from_base_prime_field_elems(&elems[base_ext_deg..2 * base_ext_deg])
.unwrap(),
P::BaseField::from_base_prime_field_elems(&elems[2 * base_ext_deg..]).unwrap(),
))
let element = Some(Self::new(
P::BaseField::from_base_prime_field_elems(elems.take(base_ext_deg))?,
P::BaseField::from_base_prime_field_elems(elems.take(base_ext_deg))?,
P::BaseField::from_base_prime_field_elems(elems.take(base_ext_deg))?,
));
if elems.next().is_some() {
None
} else {
element
}
}

#[inline]
Expand Down Expand Up @@ -734,7 +739,7 @@ mod cube_ext_tests {
for _ in 0..d {
random_coeffs.push(Fq::rand(&mut test_rng()));
}
let res = Fq6::from_base_prime_field_elems(&random_coeffs);
let res = Fq6::from_base_prime_field_elems(random_coeffs);
assert_eq!(res, None);
}
// Test on slice lengths that are equal to the extension degree
Expand All @@ -745,12 +750,13 @@ mod cube_ext_tests {
for _ in 0..ext_degree {
random_coeffs.push(Fq::rand(&mut test_rng()));
}
let actual = Fq6::from_base_prime_field_elems(&random_coeffs).unwrap();

let expected_0 = Fq2::new(random_coeffs[0], random_coeffs[1]);
let expected_1 = Fq2::new(random_coeffs[2], random_coeffs[3]);
let expected_2 = Fq2::new(random_coeffs[3], random_coeffs[4]);
let expected = Fq6::new(expected_0, expected_1, expected_2);

let actual = Fq6::from_base_prime_field_elems(random_coeffs).unwrap();
assert_eq!(actual, expected);
}
}
Expand All @@ -766,7 +772,7 @@ mod cube_ext_tests {
random_coeffs[0] = random_coeff;
assert_eq!(
res,
Fq6::from_base_prime_field_elems(&random_coeffs).unwrap()
Fq6::from_base_prime_field_elems(random_coeffs).unwrap()
);
}
}
Expand Down
10 changes: 7 additions & 3 deletions ff/src/fields/models/fp/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -232,11 +232,15 @@ impl<P: FpConfig<N>, const N: usize> Field for Fp<P, N> {
iter::once(*self)
}

fn from_base_prime_field_elems(elems: &[Self::BasePrimeField]) -> Option<Self> {
if elems.len() != (Self::extension_degree() as usize) {
fn from_base_prime_field_elems(
elems: impl IntoIterator<Item = Self::BasePrimeField>,
) -> Option<Self> {
let mut elems = elems.into_iter();
let elem = elems.next()?;
if elems.next().is_some() {
return None;
}
Some(elems[0])
Some(elem)
}

#[inline]
Expand Down
30 changes: 18 additions & 12 deletions ff/src/fields/models/quadratic_extension.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use ark_std::{
cmp::{Ord, Ordering, PartialOrd},
fmt,
io::{Read, Write},
iter::Chain,
iter::{Chain, IntoIterator},
ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign},
vec::Vec,
};
Expand Down Expand Up @@ -242,15 +242,21 @@ impl<P: QuadExtConfig> Field for QuadExtField<P> {
.chain(self.c1.to_base_prime_field_elements())
}

fn from_base_prime_field_elems(elems: &[Self::BasePrimeField]) -> Option<Self> {
if elems.len() != (Self::extension_degree() as usize) {
return None;
}
fn from_base_prime_field_elems(
elems: impl IntoIterator<Item = Self::BasePrimeField>,
) -> Option<Self> {
let mut elems = elems.into_iter();
let elems = elems.by_ref();
let base_ext_deg = P::BaseField::extension_degree() as usize;
Some(Self::new(
P::BaseField::from_base_prime_field_elems(&elems[0..base_ext_deg]).unwrap(),
P::BaseField::from_base_prime_field_elems(&elems[base_ext_deg..]).unwrap(),
))
let element = Some(Self::new(
P::BaseField::from_base_prime_field_elems(elems.take(base_ext_deg))?,
P::BaseField::from_base_prime_field_elems(elems.take(base_ext_deg))?,
));
if elems.next().is_some() {
None
} else {
element
}
}

fn square(&self) -> Self {
Expand Down Expand Up @@ -794,7 +800,7 @@ mod quad_ext_tests {
for _ in 0..d {
random_coeffs.push(Fq::rand(&mut test_rng()));
}
let res = Fq2::from_base_prime_field_elems(&random_coeffs);
let res = Fq2::from_base_prime_field_elems(random_coeffs);
assert_eq!(res, None);
}
// Test on slice lengths that are equal to the extension degree
Expand All @@ -805,8 +811,8 @@ mod quad_ext_tests {
for _ in 0..ext_degree {
random_coeffs.push(Fq::rand(&mut test_rng()));
}
let actual = Fq2::from_base_prime_field_elems(&random_coeffs).unwrap();
let expected = Fq2::new(random_coeffs[0], random_coeffs[1]);
let actual = Fq2::from_base_prime_field_elems(random_coeffs).unwrap();
assert_eq!(actual, expected);
}
}
Expand All @@ -822,7 +828,7 @@ mod quad_ext_tests {
random_coeffs[0] = random_coeff;
assert_eq!(
res,
Fq2::from_base_prime_field_elems(&random_coeffs).unwrap()
Fq2::from_base_prime_field_elems(random_coeffs).unwrap()
);
}
}
Expand Down
2 changes: 1 addition & 1 deletion poly/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ hashbrown = { version = "0.14.0"}

[dev-dependencies]
ark-test-curves = { path = "../test-curves", default-features = false, features = [ "bls12_381_curve", "bn384_small_two_adicity_curve", "mnt4_753_curve"] }
criterion = "0.4.0"
criterion = "0.5.1"


[features]
Expand Down
4 changes: 2 additions & 2 deletions test-templates/src/h2c/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@ macro_rules! test_h2c {
let y = read_fq_vec(&v.p.y);
let got = g1_mapper.hash(&v.msg.as_bytes()).unwrap();
let want = Affine::<$group>::new_unchecked(
<$field>::from_base_prime_field_elems(&x[..]).unwrap(),
<$field>::from_base_prime_field_elems(&y[..]).unwrap(),
<$field>::from_base_prime_field_elems(x).unwrap(),
<$field>::from_base_prime_field_elems(y).unwrap(),
);
assert!(got.is_on_curve());
assert!(want.is_on_curve());
Expand Down

0 comments on commit 1ff0844

Please sign in to comment.