Skip to content

Commit

Permalink
fixing endianess to big across functions (#39)
Browse files Browse the repository at this point in the history
* fixing endianess to big across functions

* removing todo's
  • Loading branch information
anupsv committed Jan 15, 2025
1 parent 736f7c2 commit 993f67c
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 32 deletions.
8 changes: 0 additions & 8 deletions src/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,6 @@ pub const FIAT_SHAMIR_PROTOCOL_DOMAIN: &[u8] = b"EIGENDA_FSBLOBVERIFY_V1_"; // A
/// Ref: https://github.com/ethereum/consensus-specs/blob/master/specs/deneb/polynomial-commitments.md#blob
pub const RANDOM_CHALLENGE_KZG_BATCH_DOMAIN: &[u8] = b"EIGENDA_RCKZGBATCH___V1_"; // Adapted from 4844

pub const KZG_ENDIANNESS: Endianness = Endianness::Big; // Choose between Big or Little.

#[derive(Debug, Clone, Copy)]
pub enum Endianness {
Big,
Little,
}

// This is the G2 Tau for the SRS of size 3000. These is only meant for testing purposes.
pub const G2_TAU_FOR_TEST_SRS_3000: [[u64; 4]; 4] = [
[
Expand Down
8 changes: 2 additions & 6 deletions src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use std::cmp;

use crate::{
arith,
consts::{Endianness, BYTES_PER_FIELD_ELEMENT, KZG_ENDIANNESS, SIZE_OF_G1_AFFINE_COMPRESSED},
consts::{BYTES_PER_FIELD_ELEMENT, SIZE_OF_G1_AFFINE_COMPRESSED},
errors::KzgError,
traits::ReadPointFromBytes,
};
Expand Down Expand Up @@ -189,11 +189,7 @@ pub fn to_byte_array(data_fr: &[Fr], max_output_size: usize) -> Vec<u8> {
// Using enumerate().take(n) to process elements up to n
for (i, element) in data_fr.iter().enumerate().take(n) {
// Convert field element to bytes based on configured endianness
// TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27
let v: Vec<u8> = match KZG_ENDIANNESS {
Endianness::Big => element.into_bigint().to_bytes_be(), // Big-endian conversion
Endianness::Little => element.into_bigint().to_bytes_le(), // Little-endian conversion
};
let v: Vec<u8> = element.into_bigint().to_bytes_be();

// Calculate start and end indices for this element in output buffer
let start = i * BYTES_PER_FIELD_ELEMENT;
Expand Down
22 changes: 4 additions & 18 deletions src/kzg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,7 @@ use crate::{
polynomial::{PolynomialCoeffForm, PolynomialEvalForm},
};

use crate::consts::{
Endianness, FIAT_SHAMIR_PROTOCOL_DOMAIN, KZG_ENDIANNESS, RANDOM_CHALLENGE_KZG_BATCH_DOMAIN,
};
use crate::consts::{FIAT_SHAMIR_PROTOCOL_DOMAIN, RANDOM_CHALLENGE_KZG_BATCH_DOMAIN};
use crate::helpers::is_on_curve_g1;
use ark_bn254::{Bn254, Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective};
use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup, VariableBaseMSM};
Expand Down Expand Up @@ -746,11 +744,7 @@ impl KZG {
let msg_digest = Sha256::digest(msg);
let hash_elements = msg_digest.as_slice();

// TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27
let fr_element: Fr = match KZG_ENDIANNESS {
Endianness::Big => Fr::from_be_bytes_mod_order(hash_elements),
Endianness::Little => Fr::from_le_bytes_mod_order(hash_elements),
};
let fr_element: Fr = Fr::from_be_bytes_mod_order(hash_elements);

fr_element
}
Expand Down Expand Up @@ -794,11 +788,7 @@ impl KZG {

// Step 2: Copy the number of field elements (blob polynomial length)
// Convert to bytes using the configured endianness
// TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27
let number_of_field_elements = match KZG_ENDIANNESS {
Endianness::Big => blob_poly.len().to_be_bytes(),
Endianness::Little => blob_poly.len().to_le_bytes(),
};
let number_of_field_elements = blob_poly.len().to_be_bytes();
digest_bytes[offset..offset + 8].copy_from_slice(&number_of_field_elements);
offset += 8;

Expand Down Expand Up @@ -1050,11 +1040,7 @@ impl KZG {

// Convert number of commitments to bytes and copy to buffer
// Uses configured endianness (Big or Little)
// TODO(anupsv): To be removed and default to Big endian. Ref: https://github.com/Layr-Labs/rust-kzg-bn254/issues/27
let n_bytes: [u8; 8] = match KZG_ENDIANNESS {
Endianness::Big => n.to_be_bytes(),
Endianness::Little => n.to_le_bytes(),
};
let n_bytes: [u8; 8] = n.to_be_bytes();
data_to_be_hashed[32..40].copy_from_slice(&n_bytes);

let target_slice = &mut data_to_be_hashed[24..24 + (n * 8)];
Expand Down

0 comments on commit 993f67c

Please sign in to comment.