From fe7383a25ac9d3d8f058642e43c33ca24fd65b2d Mon Sep 17 00:00:00 2001 From: anupsv Date: Sun, 12 Jan 2025 09:19:56 +0530 Subject: [PATCH] fixing endianess to big across functions --- src/consts.rs | 8 -------- src/helpers.rs | 11 ++--------- src/kzg.rs | 20 ++++---------------- 3 files changed, 6 insertions(+), 33 deletions(-) diff --git a/src/consts.rs b/src/consts.rs index 31dfb6d..e1cecca 100644 --- a/src/consts.rs +++ b/src/consts.rs @@ -7,11 +7,3 @@ pub const FIAT_SHAMIR_PROTOCOL_DOMAIN: &[u8] = b"EIGENDA_FSBLOBVERIFY_V1_"; // A /// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#blob pub const RANDOM_CHALLENGE_KZG_BATCH_DOMAIN: &[u8] = b"EIGENDA_RCKZGBATCH___V1_"; // Adapted from 4844 - -pub const KZG_ENDIANNESS: Endianness = Endianness::Big; // Choose between Big or Little. - -#[derive(Debug, Clone, Copy)] -pub enum Endianness { - Big, - Little, -} diff --git a/src/helpers.rs b/src/helpers.rs index db8b300..13f31e6 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -7,10 +7,7 @@ use std::cmp; use crate::{ arith, - consts::{ - Endianness, BYTES_PER_FIELD_ELEMENT, KZG_ENDIANNESS, SIZE_OF_G1_AFFINE_COMPRESSED, - SIZE_OF_G2_AFFINE_COMPRESSED, - }, + consts::{BYTES_PER_FIELD_ELEMENT, SIZE_OF_G1_AFFINE_COMPRESSED, SIZE_OF_G2_AFFINE_COMPRESSED}, errors::KzgError, traits::ReadPointFromBytes, }; @@ -160,11 +157,7 @@ pub fn to_byte_array(data_fr: &[Fr], max_output_size: usize) -> Vec { // Using enumerate().take(n) to process elements up to n for (i, element) in data_fr.iter().enumerate().take(n) { // Convert field element to bytes based on configured endianness - // TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27 - let v: Vec = match KZG_ENDIANNESS { - Endianness::Big => element.into_bigint().to_bytes_be(), // Big-endian conversion - Endianness::Little => element.into_bigint().to_bytes_le(), // Little-endian conversion - }; + let v: Vec = element.into_bigint().to_bytes_be(); // Calculate start and end indices for this element in output buffer let start = i * BYTES_PER_FIELD_ELEMENT; diff --git a/src/kzg.rs b/src/kzg.rs index cda9495..4b809d7 100644 --- a/src/kzg.rs +++ b/src/kzg.rs @@ -7,9 +7,7 @@ use crate::{ traits::ReadPointFromBytes, }; -use crate::consts::{ - Endianness, FIAT_SHAMIR_PROTOCOL_DOMAIN, KZG_ENDIANNESS, RANDOM_CHALLENGE_KZG_BATCH_DOMAIN, -}; +use crate::consts::{FIAT_SHAMIR_PROTOCOL_DOMAIN, RANDOM_CHALLENGE_KZG_BATCH_DOMAIN}; use crate::helpers::is_on_curve_g1; use ark_bn254::{Bn254, Fr, G1Affine, G1Projective, G2Affine, G2Projective}; use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup, VariableBaseMSM}; @@ -916,11 +914,7 @@ impl KZG { let msg_digest = Sha256::digest(msg); let hash_elements = msg_digest.as_slice(); - // TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27 - let fr_element: Fr = match KZG_ENDIANNESS { - Endianness::Big => Fr::from_be_bytes_mod_order(hash_elements), - Endianness::Little => Fr::from_le_bytes_mod_order(hash_elements), - }; + let fr_element: Fr = Fr::from_be_bytes_mod_order(hash_elements); fr_element } @@ -965,10 +959,7 @@ impl KZG { // Step 2: Copy the number of field elements (blob polynomial length) // Convert to bytes using the configured endianness // TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27 - let number_of_field_elements = match KZG_ENDIANNESS { - Endianness::Big => blob_poly.len().to_be_bytes(), - Endianness::Little => blob_poly.len().to_le_bytes(), - }; + let number_of_field_elements = blob_poly.len().to_be_bytes(); digest_bytes[offset..offset + 8].copy_from_slice(&number_of_field_elements); offset += 8; @@ -1222,10 +1213,7 @@ impl KZG { // Convert number of commitments to bytes and copy to buffer // Uses configured endianness (Big or Little) // TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27 - let n_bytes: [u8; 8] = match KZG_ENDIANNESS { - Endianness::Big => n.to_be_bytes(), - Endianness::Little => n.to_le_bytes(), - }; + let n_bytes: [u8; 8] = n.to_be_bytes(); data_to_be_hashed[32..40].copy_from_slice(&n_bytes); let target_slice = &mut data_to_be_hashed[24..24 + (n * 8)];