Skip to content

Commit

Permalink
unified parse_total_l2_to_l1_pubdata with parse_resolved_pubdata
Browse files Browse the repository at this point in the history
  • Loading branch information
vbar committed Mar 20, 2024
1 parent 0d54a35 commit 705193c
Show file tree
Hide file tree
Showing 4 changed files with 48 additions and 77 deletions.
2 changes: 2 additions & 0 deletions state-reconstruct-fetcher/src/constants.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,4 +38,6 @@ pub mod zksync {
pub const LENGTH_BITS_OFFSET: u8 = 3;
// Size of `CommitBatchInfo.pubdataCommitments` item.
pub const PUBDATA_COMMITMENT_SIZE: usize = 144;
// Size of the unparsed end of `CommitBatchInfo.pubdataCommitments`.
pub const CALLDATA_SOURCE_TAIL_SIZE: usize = 32;
}
34 changes: 33 additions & 1 deletion state-reconstruct-fetcher/src/types/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
use ethers::{abi, types::U256};

use super::{L2ToL1Pubdata, PackingType, ParseError};
use crate::constants::zksync::{LENGTH_BITS_OFFSET, OPERATION_BITMASK};
use crate::constants::zksync::{
L2_TO_L1_LOG_SERIALIZE_SIZE, LENGTH_BITS_OFFSET, OPERATION_BITMASK,
};

pub struct ExtractedToken {
pub new_l2_block_number: U256,
Expand Down Expand Up @@ -85,6 +87,36 @@ impl TryFrom<&abi::Token> for ExtractedToken {
}

// TODO: Move these to a dedicated parser struct.
pub fn parse_resolved_pubdata(bytes: &[u8]) -> Result<Vec<L2ToL1Pubdata>, ParseError> {
let mut l2_to_l1_pubdata = Vec::new();

let mut pointer = 0;
// Skip over logs and messages.
let num_of_l1_to_l2_logs = u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer));
pointer += L2_TO_L1_LOG_SERIALIZE_SIZE * num_of_l1_to_l2_logs as usize;

let num_of_messages = u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer));
for _ in 0..num_of_messages {
let current_message_len = u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer));
pointer += current_message_len as usize;
}

// Parse published bytecodes.
let num_of_bytecodes = u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer));
for _ in 0..num_of_bytecodes {
let current_bytecode_len =
u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer)) as usize;
let bytecode = bytes[pointer..pointer + current_bytecode_len].to_vec();
pointer += current_bytecode_len;
l2_to_l1_pubdata.push(L2ToL1Pubdata::PublishedBytecode(bytecode))
}

let mut state_diffs = parse_compressed_state_diffs(bytes, &mut pointer)?;
l2_to_l1_pubdata.append(&mut state_diffs);

Ok(l2_to_l1_pubdata)
}

pub fn parse_compressed_state_diffs(
bytes: &[u8],
pointer: &mut usize,
Expand Down
36 changes: 2 additions & 34 deletions state-reconstruct-fetcher/src/types/v2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@ use ethers::{abi, types::U256};
use serde::{Deserialize, Serialize};

use super::{
common::{parse_compressed_state_diffs, read_next_n_bytes, ExtractedToken},
common::{parse_resolved_pubdata, ExtractedToken},
CommitBlockFormat, CommitBlockInfo, L2ToL1Pubdata, ParseError,
};
use crate::constants::zksync::L2_TO_L1_LOG_SERIALIZE_SIZE;

/// Data needed to commit new block
#[derive(Debug, Serialize, Deserialize)]
Expand Down Expand Up @@ -53,7 +52,7 @@ impl TryFrom<&abi::Token> for V2 {
} = token.try_into()?;
let new_enumeration_index = new_enumeration_index.as_u64();

let total_l2_to_l1_pubdata = parse_total_l2_to_l1_pubdata(total_l2_to_l1_pubdata)?;
let total_l2_to_l1_pubdata = parse_resolved_pubdata(&total_l2_to_l1_pubdata[..])?;
let blk = V2 {
block_number: new_l2_block_number.as_u64(),
timestamp: timestamp.as_u64(),
Expand All @@ -68,34 +67,3 @@ impl TryFrom<&abi::Token> for V2 {
Ok(blk)
}
}

fn parse_total_l2_to_l1_pubdata(bytes: Vec<u8>) -> Result<Vec<L2ToL1Pubdata>, ParseError> {
let mut l2_to_l1_pubdata = Vec::new();
let mut pointer = 0;

// Skip over logs and messages.
let num_of_l1_to_l2_logs = u32::from_be_bytes(read_next_n_bytes(&bytes, &mut pointer));
pointer += L2_TO_L1_LOG_SERIALIZE_SIZE * num_of_l1_to_l2_logs as usize;

let num_of_messages = u32::from_be_bytes(read_next_n_bytes(&bytes, &mut pointer));
for _ in 0..num_of_messages {
let current_message_len = u32::from_be_bytes(read_next_n_bytes(&bytes, &mut pointer));
pointer += current_message_len as usize;
}

// Parse published bytecodes.
let num_of_bytecodes = u32::from_be_bytes(read_next_n_bytes(&bytes, &mut pointer));
for _ in 0..num_of_bytecodes {
let current_bytecode_len =
u32::from_be_bytes(read_next_n_bytes(&bytes, &mut pointer)) as usize;
let bytecode = bytes[pointer..pointer + current_bytecode_len].to_vec();
pointer += current_bytecode_len;
l2_to_l1_pubdata.push(L2ToL1Pubdata::PublishedBytecode(bytecode))
}

// Parse compressed state diffs.
let mut state_diffs = parse_compressed_state_diffs(&bytes, &mut pointer)?;
l2_to_l1_pubdata.append(&mut state_diffs);

Ok(l2_to_l1_pubdata)
}
53 changes: 11 additions & 42 deletions state-reconstruct-fetcher/src/types/v3.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ use tokio::time::{sleep, Duration};
use zkevm_circuits::eip_4844::ethereum_4844_data_into_zksync_pubdata;

use super::{
common::{parse_compressed_state_diffs, read_next_n_bytes, ExtractedToken},
common::{parse_resolved_pubdata, read_next_n_bytes, ExtractedToken},
L2ToL1Pubdata, ParseError,
};
use crate::{
blob_http_client::BlobHttpClient,
constants::zksync::{L2_TO_L1_LOG_SERIALIZE_SIZE, PUBDATA_COMMITMENT_SIZE},
constants::zksync::{CALLDATA_SOURCE_TAIL_SIZE, PUBDATA_COMMITMENT_SIZE},
};

/// `MAX_RETRIES` is the maximum number of retries on failed blob retrieval.
Expand Down Expand Up @@ -109,7 +109,14 @@ impl V3 {
) -> Result<Vec<L2ToL1Pubdata>, ParseError> {
let bytes = &self.pubdata_commitments[..];
match self.pubdata_source {
PubdataSource::Calldata => parse_resolved_pubdata(bytes, true),
PubdataSource::Calldata => {
let l = bytes.len();
if l < CALLDATA_SOURCE_TAIL_SIZE {
Err(ParseError::InvalidCalldata("too short".to_string()))
} else {
parse_resolved_pubdata(&bytes[..l - CALLDATA_SOURCE_TAIL_SIZE])
}
}
PubdataSource::Blob => parse_pubdata_from_blobs(bytes, client).await,
}
}
Expand All @@ -121,44 +128,6 @@ fn parse_pubdata_source(bytes: &[u8], pointer: &mut usize) -> Result<PubdataSour
pubdata_source.try_into()
}

fn parse_resolved_pubdata(bytes: &[u8], shorten: bool) -> Result<Vec<L2ToL1Pubdata>, ParseError> {
let mut l2_to_l1_pubdata = Vec::new();

let mut pointer = 0;
// Skip over logs and messages.
let num_of_l1_to_l2_logs = u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer));
pointer += L2_TO_L1_LOG_SERIALIZE_SIZE * num_of_l1_to_l2_logs as usize;

let num_of_messages = u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer));
for _ in 0..num_of_messages {
let current_message_len = u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer));
pointer += current_message_len as usize;
}

// Parse published bytecodes.
let num_of_bytecodes = u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer));
for _ in 0..num_of_bytecodes {
let current_bytecode_len =
u32::from_be_bytes(read_next_n_bytes(bytes, &mut pointer)) as usize;
let bytecode = bytes[pointer..pointer + current_bytecode_len].to_vec();
pointer += current_bytecode_len;
l2_to_l1_pubdata.push(L2ToL1Pubdata::PublishedBytecode(bytecode))
}

// Parse compressed state diffs.
// NOTE: Is this correct? Ignoring the last 32 bytes?
let diff_bytes = if shorten {
let end_point = bytes.len() - 32;
&bytes[..end_point]
} else {
bytes
};
let mut state_diffs = parse_compressed_state_diffs(diff_bytes, &mut pointer)?;
l2_to_l1_pubdata.append(&mut state_diffs);

Ok(l2_to_l1_pubdata)
}

async fn parse_pubdata_from_blobs(
bytes: &[u8],
client: &BlobHttpClient,
Expand All @@ -180,7 +149,7 @@ async fn parse_pubdata_from_blobs(
}

let blobs_view = &blobs[..l];
parse_resolved_pubdata(blobs_view, false)
parse_resolved_pubdata(blobs_view)
}

async fn get_blob(kzg_commitment: &[u8], client: &BlobHttpClient) -> Result<Vec<u8>, ParseError> {
Expand Down

0 comments on commit 705193c

Please sign in to comment.