Skip to content

Commit

Permalink
Reintroduce L1BatchCommitDataGenerator trait
Browse files Browse the repository at this point in the history
  • Loading branch information
ilitteri committed Feb 3, 2024
1 parent 0ce5616 commit ebca913
Show file tree
Hide file tree
Showing 11 changed files with 168 additions and 120 deletions.
3 changes: 2 additions & 1 deletion core/bin/external_node/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ use zksync_dal::{healthcheck::ConnectionPoolHealthCheck, ConnectionPool};
use zksync_health_check::CheckHealth;
use zksync_state::PostgresStorageCaches;
use zksync_storage::RocksDB;
use zksync_types::l1_batch_commit_data_generator::RollupModeL1BatchCommitDataGenerator;
use zksync_utils::wait_for_tasks::wait_for_tasks;

mod config;
Expand Down Expand Up @@ -267,7 +268,7 @@ async fn init_tasks(
.context("failed to build a tree_pool")?;
let tree_handle = task::spawn(metadata_calculator.run(tree_pool, tree_stop_receiver));

let l1_batch_commit_data_generator = L1BatchCommitDataGeneratorMode::Rollup;
let l1_batch_commit_data_generator = Arc::new(RollupModeL1BatchCommitDataGenerator {});

let consistency_checker_handle = tokio::spawn(
consistency_checker.run(stop_receiver.clone(), l1_batch_commit_data_generator),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
use zksync_config::configs::chain::L1BatchCommitDataGeneratorMode;
use zksync_types::{commitment::L1BatchWithMetadata, ethabi::Token};
use std::sync::Arc;

use zksync_types::{
commitment::L1BatchWithMetadata, ethabi::Token,
l1_batch_commit_data_generator::L1BatchCommitDataGenerator,
};

use crate::{
i_executor::structures::{CommitBatchInfo, StoredBatchInfo},
Expand All @@ -11,7 +15,7 @@ use crate::{
pub struct CommitBatches {
pub last_committed_l1_batch: L1BatchWithMetadata,
pub l1_batches: Vec<L1BatchWithMetadata>,
pub l1_batch_commit_data_generator: L1BatchCommitDataGeneratorMode,
pub l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
}

impl Tokenize for CommitBatches {
Expand All @@ -21,7 +25,8 @@ impl Tokenize for CommitBatches {
.l1_batches
.iter()
.map(|batch| {
CommitBatchInfo::new(batch, self.l1_batch_commit_data_generator).into_token()
CommitBatchInfo::new(batch, self.l1_batch_commit_data_generator.clone())
.into_token()
})
.collect();

Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use zksync_config::configs::chain::L1BatchCommitDataGeneratorMode;
use std::sync::Arc;

use zksync_types::{
commitment::L1BatchWithMetadata,
ethabi::Token,
utils,
l1_batch_commit_data_generator::L1BatchCommitDataGenerator,
web3::{contract::Error as Web3ContractError, error::Error as Web3ApiError},
U256,
};
Expand All @@ -13,13 +14,13 @@ use crate::Tokenizable;
#[derive(Debug)]
pub struct CommitBatchInfo<'a> {
pub l1_batch_with_metadata: &'a L1BatchWithMetadata,
pub l1_batch_commit_data_generator: L1BatchCommitDataGeneratorMode,
pub l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
}

impl<'a> CommitBatchInfo<'a> {
pub fn new(
l1_batch_with_metadata: &'a L1BatchWithMetadata,
l1_batch_commit_data_generator: L1BatchCommitDataGeneratorMode,
l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
) -> Self {
Self {
l1_batch_with_metadata,
Expand Down Expand Up @@ -51,14 +52,8 @@ impl<'a> Tokenizable for CommitBatchInfo<'a> {
{
pre_boojum_into_token(self.l1_batch_with_metadata)
} else {
match self.l1_batch_commit_data_generator {
L1BatchCommitDataGeneratorMode::Rollup => {
Token::Tuple(rollup_mode_l1_commit_data(self.l1_batch_with_metadata))
}
L1BatchCommitDataGeneratorMode::Validium => {
Token::Tuple(validium_mode_l1_commit_data(self.l1_batch_with_metadata))
}
}
self.l1_batch_commit_data_generator
.l1_commit_data(self.l1_batch_with_metadata)
}
}
}
Expand Down Expand Up @@ -93,49 +88,3 @@ fn pre_boojum_into_token<'a>(l1_batch_commit_with_metadata: &'a L1BatchWithMetad
),
])
}

fn validium_mode_l1_commit_data<'a>(l1_batch_with_metadata: &'a L1BatchWithMetadata) -> Vec<Token> {
let header = &l1_batch_with_metadata.header;
let metadata = &l1_batch_with_metadata.metadata;
let commit_data = vec![
// `batchNumber`
Token::Uint(U256::from(header.number.0)),
// `timestamp`
Token::Uint(U256::from(header.timestamp)),
// `indexRepeatedStorageChanges`
Token::Uint(U256::from(metadata.rollup_last_leaf_index)),
// `newStateRoot`
Token::FixedBytes(metadata.merkle_root_hash.as_bytes().to_vec()),
// `numberOfLayer1Txs`
Token::Uint(U256::from(header.l1_tx_count)),
// `priorityOperationsHash`
Token::FixedBytes(header.priority_ops_onchain_data_hash().as_bytes().to_vec()),
// `bootloaderHeapInitialContentsHash`
Token::FixedBytes(
metadata
.bootloader_initial_content_commitment
.unwrap()
.as_bytes()
.to_vec(),
),
// `eventsQueueStateHash`
Token::FixedBytes(
metadata
.events_queue_commitment
.unwrap()
.as_bytes()
.to_vec(),
),
// `systemLogs`
Token::Bytes(metadata.l2_l1_messages_compressed.clone()),
];
commit_data
}

fn rollup_mode_l1_commit_data<'a>(l1_batch_with_metadata: &'a L1BatchWithMetadata) -> Vec<Token> {
let mut commit_data = validium_mode_l1_commit_data(l1_batch_with_metadata);
commit_data.push(Token::Bytes(utils::construct_pubdata(
l1_batch_with_metadata,
)));
commit_data
}
80 changes: 80 additions & 0 deletions core/lib/types/src/l1_batch_commit_data_generator.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
use zksync_basic_types::{ethabi::Token, U256};

use crate::{commitment::L1BatchWithMetadata, utils};

pub trait L1BatchCommitDataGenerator
where
Self: std::fmt::Debug + Send + Sync,
{
fn l1_commit_data(&self, l1_batch_with_metadata: &L1BatchWithMetadata) -> Token;
fn l1_commit_data_size(&self, l1_batch_with_metadata: &L1BatchWithMetadata) -> usize {
crate::ethabi::encode(&[Token::Array(vec![
self.l1_commit_data(l1_batch_with_metadata)
])])
.len()
}
}

#[derive(Debug, Clone)]
pub struct RollupModeL1BatchCommitDataGenerator {}

#[derive(Debug, Clone)]
pub struct ValidiumModeL1BatchCommitDataGenerator {}

impl L1BatchCommitDataGenerator for RollupModeL1BatchCommitDataGenerator {
fn l1_commit_data(&self, l1_batch_with_metadata: &L1BatchWithMetadata) -> Token {
Token::Tuple(rollup_mode_l1_commit_data(l1_batch_with_metadata))
}
}

impl L1BatchCommitDataGenerator for ValidiumModeL1BatchCommitDataGenerator {
fn l1_commit_data(&self, l1_batch_with_metadata: &L1BatchWithMetadata) -> Token {
Token::Tuple(validium_mode_l1_commit_data(l1_batch_with_metadata))
}
}

fn validium_mode_l1_commit_data(l1_batch_with_metadata: &L1BatchWithMetadata) -> Vec<Token> {
let header = &l1_batch_with_metadata.header;
let metadata = &l1_batch_with_metadata.metadata;
let commit_data = vec![
// `batchNumber`
Token::Uint(U256::from(header.number.0)),
// `timestamp`
Token::Uint(U256::from(header.timestamp)),
// `indexRepeatedStorageChanges`
Token::Uint(U256::from(metadata.rollup_last_leaf_index)),
// `newStateRoot`
Token::FixedBytes(metadata.merkle_root_hash.as_bytes().to_vec()),
// `numberOfLayer1Txs`
Token::Uint(U256::from(header.l1_tx_count)),
// `priorityOperationsHash`
Token::FixedBytes(header.priority_ops_onchain_data_hash().as_bytes().to_vec()),
// `bootloaderHeapInitialContentsHash`
Token::FixedBytes(
metadata
.bootloader_initial_content_commitment
.unwrap()
.as_bytes()
.to_vec(),
),
// `eventsQueueStateHash`
Token::FixedBytes(
metadata
.events_queue_commitment
.unwrap()
.as_bytes()
.to_vec(),
),
// `systemLogs`
Token::Bytes(metadata.l2_l1_messages_compressed.clone()),
];
commit_data
}

fn rollup_mode_l1_commit_data(l1_batch_with_metadata: &L1BatchWithMetadata) -> Vec<Token> {
let mut commit_data = validium_mode_l1_commit_data(l1_batch_with_metadata);
commit_data.push(Token::Bytes(utils::construct_pubdata(
l1_batch_with_metadata,
)));
commit_data
}
2 changes: 2 additions & 0 deletions core/lib/types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ pub mod transaction_request;
pub mod utils;
pub mod vm_version;

pub mod l1_batch_commit_data_generator;

/// Denotes the first byte of the special zkSync's EIP-712-signed transaction.
pub const EIP_712_TX_TYPE: u8 = 0x71;

Expand Down
11 changes: 6 additions & 5 deletions core/lib/zksync_core/src/consistency_checker/mod.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
use std::{fmt, time::Duration};
use std::{fmt, sync::Arc, time::Duration};

use anyhow::Context as _;
use tokio::sync::watch;
use zksync_config::configs::chain::L1BatchCommitDataGeneratorMode;
use zksync_contracts::PRE_BOOJUM_COMMIT_FUNCTION;
use zksync_dal::{ConnectionPool, StorageProcessor};
use zksync_eth_client::{clients::QueryClient, Error as L1ClientError, EthInterface};
use zksync_l1_contract_interface::{i_executor::structures::CommitBatchInfo, Tokenizable};
use zksync_types::{web3::ethabi, L1BatchNumber, H256};
use zksync_types::{
l1_batch_commit_data_generator::L1BatchCommitDataGenerator, web3::ethabi, L1BatchNumber, H256,
};

use crate::{
metrics::{CheckerComponent, EN_METRICS},
Expand Down Expand Up @@ -68,7 +69,7 @@ impl LocalL1BatchCommitData {
async fn new(
storage: &mut StorageProcessor<'_>,
batch_number: L1BatchNumber,
l1_batch_commit_data_generator: L1BatchCommitDataGeneratorMode,
l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
) -> anyhow::Result<Option<Self>> {
let Some(storage_l1_batch) = storage
.blocks_dal()
Expand Down Expand Up @@ -254,7 +255,7 @@ impl ConsistencyChecker {
pub async fn run(
mut self,
mut stop_receiver: watch::Receiver<bool>,
l1_batch_commit_data_generator: L1BatchCommitDataGeneratorMode,
l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
) -> anyhow::Result<()> {
// It doesn't make sense to start the checker until we have at least one L1 batch with metadata.
let earliest_l1_batch_number =
Expand Down
27 changes: 14 additions & 13 deletions core/lib/zksync_core/src/consistency_checker/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ use std::{collections::HashMap, slice};
use assert_matches::assert_matches;
use test_casing::{test_casing, Product};
use tokio::sync::mpsc;
use zksync_config::configs::chain::L1BatchCommitDataGeneratorMode;
use zksync_dal::StorageProcessor;
use zksync_eth_client::clients::MockEthereum;
use zksync_l1_contract_interface::i_executor::structures::StoredBatchInfo;
use zksync_types::{
aggregated_operations::AggregatedActionType, commitment::L1BatchWithMetadata,
web3::contract::Options, L2ChainId, ProtocolVersion, ProtocolVersionId, H256,
l1_batch_commit_data_generator::RollupModeL1BatchCommitDataGenerator, web3::contract::Options,
L2ChainId, ProtocolVersion, ProtocolVersionId, H256,
};

use super::*;
Expand Down Expand Up @@ -45,11 +45,11 @@ fn create_pre_boojum_l1_batch_with_metadata(number: u32) -> L1BatchWithMetadata

fn build_commit_tx_input_data(
batches: &[L1BatchWithMetadata],
l1_batch_commit_data_generator: L1BatchCommitDataGeneratorMode,
l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
) -> Vec<u8> {
let commit_tokens = batches
.iter()
.map(|batch| CommitBatchInfo::new(batch, l1_batch_commit_data_generator).into_token());
let commit_tokens = batches.iter().map(|batch| {
CommitBatchInfo::new(batch, l1_batch_commit_data_generator.clone()).into_token()
});
let commit_tokens = ethabi::Token::Array(commit_tokens.collect());

let mut encoded = vec![];
Expand Down Expand Up @@ -89,7 +89,7 @@ fn build_commit_tx_input_data_is_correct() {
create_l1_batch_with_metadata(1),
create_l1_batch_with_metadata(2),
];
let l1_batch_commit_data_generator = L1BatchCommitDataGeneratorMode::Rollup;
let l1_batch_commit_data_generator = Arc::new(RollupModeL1BatchCommitDataGenerator {});

let commit_tx_input_data =
build_commit_tx_input_data(&batches, l1_batch_commit_data_generator.clone());
Expand All @@ -103,7 +103,8 @@ fn build_commit_tx_input_data_is_correct() {
.unwrap();
assert_eq!(
commit_data,
CommitBatchInfo::new(batch, l1_batch_commit_data_generator).into_token()
CommitBatchInfo::new(batch, l1_batch_commit_data_generator.clone().clone())
.into_token()
);
}
}
Expand Down Expand Up @@ -309,7 +310,7 @@ async fn normal_checker_function(
let mut commit_tx_hash_by_l1_batch = HashMap::with_capacity(l1_batches.len());
let client = MockEthereum::default();

let l1_batch_commit_data_generator = L1BatchCommitDataGeneratorMode::Rollup;
let l1_batch_commit_data_generator = Arc::new(RollupModeL1BatchCommitDataGenerator {});
for (i, l1_batches) in l1_batches.chunks(batches_per_transaction).enumerate() {
let input_data =
build_commit_tx_input_data(l1_batches, l1_batch_commit_data_generator.clone());
Expand Down Expand Up @@ -390,7 +391,7 @@ async fn checker_processes_pre_boojum_batches(
let mut commit_tx_hash_by_l1_batch = HashMap::with_capacity(l1_batches.len());
let client = MockEthereum::default();

let l1_batch_commit_data_generator = L1BatchCommitDataGeneratorMode::Rollup;
let l1_batch_commit_data_generator = Arc::new(RollupModeL1BatchCommitDataGenerator {});
for (i, l1_batch) in l1_batches.iter().enumerate() {
let input_data = build_commit_tx_input_data(
slice::from_ref(l1_batch),
Expand Down Expand Up @@ -452,7 +453,7 @@ async fn checker_functions_after_snapshot_recovery(delay_batch_insertion: bool)

let l1_batch = create_l1_batch_with_metadata(99);

let l1_batch_commit_data_generator = L1BatchCommitDataGeneratorMode::Rollup;
let l1_batch_commit_data_generator = Arc::new(RollupModeL1BatchCommitDataGenerator {});

let commit_tx_input_data = build_commit_tx_input_data(
slice::from_ref(&l1_batch),
Expand Down Expand Up @@ -535,7 +536,7 @@ impl IncorrectDataKind {
self,
client: &MockEthereum,
l1_batch: &L1BatchWithMetadata,
l1_batch_commit_data_generator: L1BatchCommitDataGeneratorMode,
l1_batch_commit_data_generator: Arc<dyn L1BatchCommitDataGenerator>,
) -> H256 {
let (commit_tx_input_data, successful_status) = match self {
Self::MissingStatus => {
Expand Down Expand Up @@ -614,7 +615,7 @@ async fn checker_detects_incorrect_tx_data(kind: IncorrectDataKind, snapshot_rec
}

let l1_batch = create_l1_batch_with_metadata(if snapshot_recovery { 99 } else { 1 });
let l1_batch_commit_data_generator = L1BatchCommitDataGeneratorMode::Rollup;
let l1_batch_commit_data_generator = Arc::new(RollupModeL1BatchCommitDataGenerator {});
let client = MockEthereum::default();
let commit_tx_hash = kind
.apply(&client, &l1_batch, l1_batch_commit_data_generator.clone())
Expand Down
Loading

0 comments on commit ebca913

Please sign in to comment.