Skip to content

Commit

Permalink
fixing endianess to big across functions
Browse files Browse the repository at this point in the history
  • Loading branch information
anupsv committed Jan 12, 2025
1 parent 4ad14ea commit fe7383a
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 33 deletions.
8 changes: 0 additions & 8 deletions src/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,3 @@ pub const FIAT_SHAMIR_PROTOCOL_DOMAIN: &[u8] = b"EIGENDA_FSBLOBVERIFY_V1_"; // A

/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#blob
pub const RANDOM_CHALLENGE_KZG_BATCH_DOMAIN: &[u8] = b"EIGENDA_RCKZGBATCH___V1_"; // Adapted from 4844

pub const KZG_ENDIANNESS: Endianness = Endianness::Big; // Choose between Big or Little.

#[derive(Debug, Clone, Copy)]
pub enum Endianness {
Big,
Little,
}
11 changes: 2 additions & 9 deletions src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,7 @@ use std::cmp;

use crate::{
arith,
consts::{
Endianness, BYTES_PER_FIELD_ELEMENT, KZG_ENDIANNESS, SIZE_OF_G1_AFFINE_COMPRESSED,
SIZE_OF_G2_AFFINE_COMPRESSED,
},
consts::{BYTES_PER_FIELD_ELEMENT, SIZE_OF_G1_AFFINE_COMPRESSED, SIZE_OF_G2_AFFINE_COMPRESSED},
errors::KzgError,
traits::ReadPointFromBytes,
};
Expand Down Expand Up @@ -160,11 +157,7 @@ pub fn to_byte_array(data_fr: &[Fr], max_output_size: usize) -> Vec<u8> {
// Using enumerate().take(n) to process elements up to n
for (i, element) in data_fr.iter().enumerate().take(n) {
// Convert field element to bytes based on configured endianness
// TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27
let v: Vec<u8> = match KZG_ENDIANNESS {
Endianness::Big => element.into_bigint().to_bytes_be(), // Big-endian conversion
Endianness::Little => element.into_bigint().to_bytes_le(), // Little-endian conversion
};
let v: Vec<u8> = element.into_bigint().to_bytes_be();

// Calculate start and end indices for this element in output buffer
let start = i * BYTES_PER_FIELD_ELEMENT;
Expand Down
20 changes: 4 additions & 16 deletions src/kzg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@ use crate::{
traits::ReadPointFromBytes,
};

use crate::consts::{
Endianness, FIAT_SHAMIR_PROTOCOL_DOMAIN, KZG_ENDIANNESS, RANDOM_CHALLENGE_KZG_BATCH_DOMAIN,
};
use crate::consts::{FIAT_SHAMIR_PROTOCOL_DOMAIN, RANDOM_CHALLENGE_KZG_BATCH_DOMAIN};
use crate::helpers::is_on_curve_g1;
use ark_bn254::{Bn254, Fr, G1Affine, G1Projective, G2Affine, G2Projective};
use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup, VariableBaseMSM};
Expand Down Expand Up @@ -916,11 +914,7 @@ impl KZG {
let msg_digest = Sha256::digest(msg);
let hash_elements = msg_digest.as_slice();

// TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27
let fr_element: Fr = match KZG_ENDIANNESS {
Endianness::Big => Fr::from_be_bytes_mod_order(hash_elements),
Endianness::Little => Fr::from_le_bytes_mod_order(hash_elements),
};
let fr_element: Fr = Fr::from_be_bytes_mod_order(hash_elements);

fr_element
}
Expand Down Expand Up @@ -965,10 +959,7 @@ impl KZG {
// Step 2: Copy the number of field elements (blob polynomial length)
// Convert to bytes using the configured endianness
// TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27
let number_of_field_elements = match KZG_ENDIANNESS {
Endianness::Big => blob_poly.len().to_be_bytes(),
Endianness::Little => blob_poly.len().to_le_bytes(),
};
let number_of_field_elements = blob_poly.len().to_be_bytes();
digest_bytes[offset..offset + 8].copy_from_slice(&number_of_field_elements);
offset += 8;

Expand Down Expand Up @@ -1222,10 +1213,7 @@ impl KZG {
// Convert number of commitments to bytes and copy to buffer
// Uses configured endianness (Big or Little)
// TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27
let n_bytes: [u8; 8] = match KZG_ENDIANNESS {
Endianness::Big => n.to_be_bytes(),
Endianness::Little => n.to_le_bytes(),
};
let n_bytes: [u8; 8] = n.to_be_bytes();
data_to_be_hashed[32..40].copy_from_slice(&n_bytes);

let target_slice = &mut data_to_be_hashed[24..24 + (n * 8)];
Expand Down

0 comments on commit fe7383a

Please sign in to comment.