Skip to content

Commit

Permalink
removing reading g2, fixing g2 tau points. clean up
Browse files Browse the repository at this point in the history
  • Loading branch information
anupsv committed Jan 12, 2025
1 parent 4ad14ea commit 844ac2b
Show file tree
Hide file tree
Showing 13 changed files with 106 additions and 355 deletions.
9 changes: 1 addition & 8 deletions benches/bench_g1_ifft.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,7 @@ fn generate_powers_of_2(limit: u64) -> Vec<usize> {

fn bench_g1_ifft(c: &mut Criterion) {
c.bench_function("bench_g1_ifft", |b| {
let kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
3000,
3000,
)
.unwrap();
let kzg = KZG::setup("tests/test-files/mainnet-data/g1.131072.point", 3000, 3000).unwrap();
b.iter(|| {
for power in &generate_powers_of_2(3000) {
kzg.g1_ifft(black_box(*power)).unwrap();
Expand Down
2 changes: 0 additions & 2 deletions benches/bench_kzg_commit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ fn bench_kzg_commit(c: &mut Criterion) {
let mut rng = rand::thread_rng();
let mut kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
131072,
)
Expand Down
2 changes: 0 additions & 2 deletions benches/bench_kzg_commit_large_blobs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ fn bench_kzg_commit(c: &mut Criterion) {
let mut rng = rand::thread_rng();
let mut kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.32mb.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
524288,
)
Expand Down
2 changes: 0 additions & 2 deletions benches/bench_kzg_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ fn bench_kzg_proof(c: &mut Criterion) {
let mut rng = rand::thread_rng();
let mut kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
131072,
)
Expand Down
13 changes: 1 addition & 12 deletions benches/bench_kzg_setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,11 @@ use std::time::Duration;

fn bench_kzg_setup(c: &mut Criterion) {
c.bench_function("bench_kzg_setup", |b| {
b.iter(|| {
KZG::setup(
"tests/test-files/g1.point",
"tests/test-files/g2.point",
"tests/test-files/g2.point.powerOf2",
3000,
3000,
)
.unwrap()
});
b.iter(|| KZG::setup("tests/test-files/g1.point", 3000, 3000).unwrap());

b.iter(|| {
KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
131072,
)
Expand Down
2 changes: 0 additions & 2 deletions benches/bench_kzg_verify.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ fn bench_kzg_verify(c: &mut Criterion) {
let mut rng = rand::thread_rng();
let mut kzg = KZG::setup(
"tests/test-files/mainnet-data/g1.131072.point",
"",
"tests/test-files/mainnet-data/g2.point.powerOf2",
268435456,
131072,
)
Expand Down
59 changes: 58 additions & 1 deletion src/consts.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
pub const BYTES_PER_FIELD_ELEMENT: usize = 32;
pub const SIZE_OF_G1_AFFINE_COMPRESSED: usize = 32; // in bytes
pub const SIZE_OF_G2_AFFINE_COMPRESSED: usize = 64; // in bytes

/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#blob
pub const FIAT_SHAMIR_PROTOCOL_DOMAIN: &[u8] = b"EIGENDA_FSBLOBVERIFY_V1_"; // Adapted from 4844
Expand All @@ -15,3 +14,61 @@ pub enum Endianness {
Big,
Little,
}

// This is the G2 Tau for the SRS of size 3000. These is only meant for testing purposes.
pub const G2_TAU_FOR_TEST_SRS_3000: [[u64; 4]; 4] = [
[
6210180350256028851,
1155870131248430255,
5195628682048044774,
1260504166784820003,
], // x_c0
[
5796639583410086988,
1670781852330703136,
9975496901009692568,
3351822507251002947,
], // x_c1
[
11145494475421916991,
4671284253524040022,
18315320503610857882,
2978668873662892197,
], // y_c0
[
6336249489527546243,
1821275851175057403,
15993261854023940214,
1208597503336813826,
], // y_c1
];

// This is the G2 Tau for the MAINNET SRS points.
pub const G2_TAU_FOR_MAINNET_SRS: [[u64; 4]; 4] = [
[
6210180350256028851,
1155870131248430255,
5195628682048044774,
1260504166784820003,
], // x_c0
[
5796639583410086988,
1670781852330703136,
9975496901009692568,
3351822507251002947,
], // x_c1
[
11145494475421916991,
4671284253524040022,
18315320503610857882,
2978668873662892197,
], // y_c0
[
6336249489527546243,
1821275851175057403,
15993261854023940214,
1208597503336813826,
], // y_c1
];

pub const MAINNET_SRS_G1_SIZE: usize = 131072;
76 changes: 3 additions & 73 deletions src/helpers.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
use ark_bn254::{Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective};
use ark_bn254::{Fq, Fq2, Fr, G1Affine, G1Projective, G2Projective};
use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM};
use ark_ff::{sbb, BigInt, BigInteger, Field, LegendreSymbol, PrimeField};
use ark_ff::{sbb, BigInt, BigInteger, Field, PrimeField};
use ark_std::{str::FromStr, vec::Vec, One, Zero};
use crossbeam_channel::Receiver;
use std::cmp;

use crate::{
arith,
consts::{
Endianness, BYTES_PER_FIELD_ELEMENT, KZG_ENDIANNESS, SIZE_OF_G1_AFFINE_COMPRESSED,
SIZE_OF_G2_AFFINE_COMPRESSED,
},
consts::{Endianness, BYTES_PER_FIELD_ELEMENT, KZG_ENDIANNESS, SIZE_OF_G1_AFFINE_COMPRESSED},
errors::KzgError,
traits::ReadPointFromBytes,
};
Expand Down Expand Up @@ -140,8 +137,6 @@ pub fn to_fr_array(data: &[u8]) -> Vec<Fr> {
///
/// let mut kzg = KZG::setup(
/// "tests/test-files/mainnet-data/g1.131072.point",
/// "",
/// "tests/test-files/mainnet-data/g2.point.powerOf2",
/// 268435456,
/// 131072,
/// ).unwrap();
Expand Down Expand Up @@ -247,71 +242,6 @@ pub fn lexicographically_largest(z: &Fq) -> bool {
borrow == 0
}

pub fn read_g2_point_from_bytes_be(g2_bytes_be: &[u8]) -> Result<G2Affine, &str> {
if g2_bytes_be.len() != SIZE_OF_G2_AFFINE_COMPRESSED {
return Err("not enough bytes for g2 point");
}

let m_mask: u8 = 0b11 << 6;
let m_compressed_infinity: u8 = 0b01 << 6;
let m_compressed_smallest: u8 = 0b10 << 6;
let m_compressed_largest: u8 = 0b11 << 6;

let m_data = g2_bytes_be[0] & m_mask;

if m_data == m_compressed_infinity {
if !is_zeroed(
g2_bytes_be[0] & !m_mask,
g2_bytes_be[1..SIZE_OF_G2_AFFINE_COMPRESSED].to_vec(),
) {
return Err("point at infinity not coded properly for g2");
}
return Ok(G2Affine::zero());
}

let mut x_bytes = [0u8; SIZE_OF_G2_AFFINE_COMPRESSED];
x_bytes.copy_from_slice(g2_bytes_be);
x_bytes[0] &= !m_mask;
let half_size = SIZE_OF_G2_AFFINE_COMPRESSED / 2;

let c1 = Fq::from_be_bytes_mod_order(&x_bytes[..half_size]);
let c0 = Fq::from_be_bytes_mod_order(&x_bytes[half_size..]);
let x = Fq2::new(c0, c1);
let y_squared = x * x * x;

// this is bTwistCurveCoeff
let twist_curve_coeff = get_b_twist_curve_coeff();

let added_result = y_squared + twist_curve_coeff;
if added_result.legendre() == LegendreSymbol::QuadraticNonResidue {
return Err("invalid compressed coordinate: square root doesn't exist");
}

let mut y_sqrt = added_result.sqrt().ok_or("no square root found").unwrap();

let lexicographical_check_result = if y_sqrt.c1.0.is_zero() {
lexicographically_largest(&y_sqrt.c0)
} else {
lexicographically_largest(&y_sqrt.c1)
};

if lexicographical_check_result {
if m_data == m_compressed_smallest {
y_sqrt.neg_in_place();
}
} else if m_data == m_compressed_largest {
y_sqrt.neg_in_place();
}

let point = G2Affine::new_unchecked(x, y_sqrt);
if !point.is_in_correct_subgroup_assuming_on_curve()
&& is_on_curve_g2(&G2Projective::from(point))
{
return Err("point couldn't be created");
}
Ok(point)
}

pub fn read_g1_point_from_bytes_be(g1_bytes_be: &[u8]) -> Result<G1Affine, &str> {
if g1_bytes_be.len() != SIZE_OF_G1_AFFINE_COMPRESSED {
return Err("not enough bytes for g1 point");
Expand Down
Loading

0 comments on commit 844ac2b

Please sign in to comment.