From 405233d90609bfd03aaea60abf90e1903436b578 Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Fri, 10 May 2024 08:24:09 -0400 Subject: [PATCH 01/16] chore(validium): wipe blob hash values --- .../structures/commit_batch_info.rs | 11 +++++---- .../src/input_generation.rs | 23 ++++++++++++------- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs b/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs index e40d4e61c66..2422d10ed7c 100644 --- a/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs +++ b/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs @@ -3,7 +3,7 @@ use zksync_types::{ ethabi::Token, pubdata_da::PubdataDA, web3::contract::Error as ContractError, - ProtocolVersionId, U256, + ProtocolVersionId, H256, PUBDATA_CHUNK_PUBLISHER_ADDRESS, U256, }; use crate::{ @@ -307,6 +307,11 @@ impl<'a> CommitBatchInfoValidium<'a> { ), ] } else { + let mut system_logs = self.l1_batch_with_metadata.header.system_logs.clone(); + system_logs + .iter_mut() + .filter(|log| log.0.sender == PUBDATA_CHUNK_PUBLISHER_ADDRESS) + .for_each(|log| log.0.value = H256::default()); vec![ // `batchNumber` Token::Uint(U256::from(self.l1_batch_with_metadata.header.number.0)), @@ -353,9 +358,7 @@ impl<'a> CommitBatchInfoValidium<'a> { .to_vec(), ), // `systemLogs` - Token::Bytes(serialize_commitments( - &self.l1_batch_with_metadata.header.system_logs, - )), + Token::Bytes(serialize_commitments(&system_logs)), ] } } diff --git a/core/node/commitment_generator/src/input_generation.rs b/core/node/commitment_generator/src/input_generation.rs index db32776dbac..c73a9f04b9f 100644 --- a/core/node/commitment_generator/src/input_generation.rs +++ b/core/node/commitment_generator/src/input_generation.rs @@ -1,6 +1,6 @@ use std::fmt; -use zksync_types::{commitment::CommitmentInput, H256}; +use zksync_types::{commitment::CommitmentInput, H256, PUBDATA_CHUNK_PUBLISHER_ADDRESS}; #[derive(Debug)] pub struct ValidiumInputGenerator; @@ -24,13 +24,20 @@ impl InputGenerator for ValidiumInputGenerator { state_diffs, aux_commitments, blob_commitments, - } => CommitmentInput::PostBoojum { - common, - system_logs, - state_diffs, - aux_commitments, - blob_commitments: vec![H256::zero(); blob_commitments.len()], - }, + } => { + let mut system_logs = system_logs.clone(); + system_logs + .iter_mut() + .filter(|log| log.0.sender == PUBDATA_CHUNK_PUBLISHER_ADDRESS) + .for_each(|log| log.0.value = H256::default()); + CommitmentInput::PostBoojum { + common, + system_logs, + state_diffs, + aux_commitments, + blob_commitments: vec![H256::zero(); blob_commitments.len()], + } + } _ => input, } } From 84ad0b09869bec807665a2c1bdfe8708c3fcef80 Mon Sep 17 00:00:00 2001 From: EmilLuta Date: Fri, 10 May 2024 15:06:41 +0200 Subject: [PATCH 02/16] Dirty hacks to submit only empty blobs --- core/lib/basic_types/src/basic_fri_types.rs | 6 ++++++ .../proof_data_handler/src/request_processor.rs | 9 +++++---- .../prover_dal/src/fri_witness_generator_dal.rs | 15 ++++++++------- prover/prover_fri/src/main.rs | 7 ++++--- prover/witness_vector_generator/src/main.rs | 6 +++--- 5 files changed, 26 insertions(+), 17 deletions(-) diff --git a/core/lib/basic_types/src/basic_fri_types.rs b/core/lib/basic_types/src/basic_fri_types.rs index fc6818e773f..9938ee310d9 100644 --- a/core/lib/basic_types/src/basic_fri_types.rs +++ b/core/lib/basic_types/src/basic_fri_types.rs @@ -39,6 +39,12 @@ impl Eip4844Blobs { } impl Eip4844Blobs { + pub fn empty() -> Self { + Self { + blobs: Default::default(), + } + } + pub fn encode(self) -> Vec { self.blobs().into_iter().flatten().flatten().collect() } diff --git a/core/node/proof_data_handler/src/request_processor.rs b/core/node/proof_data_handler/src/request_processor.rs index 8b3d0de745d..f9b4d8b701b 100644 --- a/core/node/proof_data_handler/src/request_processor.rs +++ b/core/node/proof_data_handler/src/request_processor.rs @@ -131,10 +131,11 @@ impl RequestProcessor { .unwrap() .unwrap(); - let eip_4844_blobs = Eip4844Blobs::decode(&storage_batch.pubdata_input.expect(&format!( - "expected pubdata, but it is not available for batch {l1_batch_number:?}" - ))) - .expect("failed to decode EIP-4844 blobs"); + // let eip_4844_blobs = Eip4844Blobs::decode(&storage_batch.pubdata_input.expect(&format!( + // "expected pubdata, but it is not available for batch {l1_batch_number:?}" + // ))) + // .expect("failed to decode EIP-4844 blobs"); + let eip_4844_blobs = Eip4844Blobs::empty(); let proof_gen_data = ProofGenerationData { l1_batch_number, diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index fcbba5c5b7c..c6c1adc9143 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -117,13 +117,14 @@ impl FriWitnessGeneratorDal<'_, '_> { .map(|row| { ( L1BatchNumber(row.l1_batch_number as u32), - Eip4844Blobs::decode(&row.eip_4844_blobs.unwrap_or_else(|| { - panic!( - "missing eip 4844 blobs from the database for batch {}", - row.l1_batch_number - ) - })) - .expect("failed to decode EIP4844 blobs"), + Eip4844Blobs::empty(), + // Eip4844Blobs::decode(&row.eip_4844_blobs.unwrap_or_else(|| { + // panic!( + // "missing eip 4844 blobs from the database for batch {}", + // row.l1_batch_number + // ) + // })) + // .expect("failed to decode EIP4844 blobs"), ) }) } diff --git a/prover/prover_fri/src/main.rs b/prover/prover_fri/src/main.rs index c90b4ced147..4fcda4a50e5 100644 --- a/prover/prover_fri/src/main.rs +++ b/prover/prover_fri/src/main.rs @@ -238,9 +238,10 @@ async fn get_prover_tasks( let shared_witness_vector_queue = Arc::new(Mutex::new(witness_vector_queue)); let consumer = shared_witness_vector_queue.clone(); - let zone = get_zone(&prover_config.zone_read_url) - .await - .context("get_zone()")?; + // let zone = get_zone(&prover_config.zone_read_url) + // .await + // .context("get_zone()")?; + let zone = String::from("home"); let local_ip = local_ip().context("Failed obtaining local IP address")?; let address = SocketAddress { host: local_ip, diff --git a/prover/witness_vector_generator/src/main.rs b/prover/witness_vector_generator/src/main.rs index 2260648d3f1..1a6553a8eed 100644 --- a/prover/witness_vector_generator/src/main.rs +++ b/prover/witness_vector_generator/src/main.rs @@ -84,9 +84,9 @@ async fn main() -> anyhow::Result<()> { let circuit_ids_for_round_to_be_proven = get_all_circuit_id_round_tuples_for(circuit_ids_for_round_to_be_proven); let fri_prover_config = FriProverConfig::from_env().context("FriProverConfig::from_env()")?; - let zone_url = &fri_prover_config.zone_read_url; - let zone = get_zone(zone_url).await.context("get_zone()")?; - + // let zone_url = &fri_prover_config.zone_read_url; + // let zone = get_zone(zone_url).await.context("get_zone()")?; + let zone = String::from("home"); let protocol_version = ProtocolVersionId::current_prover_version(); let witness_vector_generator = WitnessVectorGenerator::new( From 90c682d646ca55d7f6085ce10661a558c0473118 Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Fri, 10 May 2024 10:06:43 -0400 Subject: [PATCH 03/16] filter logs before end of SK --- core/lib/zksync_core/src/state_keeper/io/seal_logic.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs b/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs index d04f17b2c22..70567f16427 100644 --- a/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs +++ b/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs @@ -24,7 +24,7 @@ use zksync_types::{ utils::display_timestamp, zk_evm_types::LogQuery, AccountTreeId, Address, ExecuteTransactionCommon, L1BlockNumber, ProtocolVersionId, StorageKey, - StorageLog, Transaction, VmEvent, H256, + StorageLog, Transaction, VmEvent, H256, PUBDATA_CHUNK_PUBLISHER_ADDRESS, }; use zksync_utils::u256_to_h256; @@ -101,6 +101,11 @@ impl UpdatesManager { let progress = L1_BATCH_METRICS.start(L1BatchSealStage::InsertL1BatchHeader); let l2_to_l1_messages = extract_long_l2_to_l1_messages(&finished_batch.final_execution_state.events); + let mut system_logs = finished_batch.final_execution_state.system_logs.clone(); + system_logs + .iter_mut() + .filter(|log| log.0.sender == PUBDATA_CHUNK_PUBLISHER_ADDRESS) + .for_each(|log| log.0.value = H256::default()); let l1_batch = L1BatchHeader { number: self.l1_batch.number, timestamp: self.batch_timestamp(), @@ -119,7 +124,7 @@ impl UpdatesManager { .clone(), base_system_contracts_hashes: self.base_system_contract_hashes(), protocol_version: Some(self.protocol_version()), - system_logs: finished_batch.final_execution_state.system_logs.clone(), + system_logs, pubdata_input: finished_batch.pubdata_input.clone(), }; From c33b11e38f01815d81f028839c9182cea06ac9be Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Fri, 10 May 2024 13:09:09 -0400 Subject: [PATCH 04/16] move log sanitization to contracts --- contracts | 2 +- .../structures/commit_batch_info.rs | 11 ++++----- .../src/state_keeper/io/seal_logic.rs | 9 ++------ .../src/input_generation.rs | 23 +++++++------------ 4 files changed, 15 insertions(+), 30 deletions(-) diff --git a/contracts b/contracts index accecc4526c..e27d5d2ac16 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit accecc4526c95fccbf190d7c2f97f174d1c5fce0 +Subproject commit e27d5d2ac16425842bcfa03cffa24c89ddb1d5f9 diff --git a/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs b/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs index 2422d10ed7c..e40d4e61c66 100644 --- a/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs +++ b/core/lib/l1_contract_interface/src/i_executor/structures/commit_batch_info.rs @@ -3,7 +3,7 @@ use zksync_types::{ ethabi::Token, pubdata_da::PubdataDA, web3::contract::Error as ContractError, - ProtocolVersionId, H256, PUBDATA_CHUNK_PUBLISHER_ADDRESS, U256, + ProtocolVersionId, U256, }; use crate::{ @@ -307,11 +307,6 @@ impl<'a> CommitBatchInfoValidium<'a> { ), ] } else { - let mut system_logs = self.l1_batch_with_metadata.header.system_logs.clone(); - system_logs - .iter_mut() - .filter(|log| log.0.sender == PUBDATA_CHUNK_PUBLISHER_ADDRESS) - .for_each(|log| log.0.value = H256::default()); vec![ // `batchNumber` Token::Uint(U256::from(self.l1_batch_with_metadata.header.number.0)), @@ -358,7 +353,9 @@ impl<'a> CommitBatchInfoValidium<'a> { .to_vec(), ), // `systemLogs` - Token::Bytes(serialize_commitments(&system_logs)), + Token::Bytes(serialize_commitments( + &self.l1_batch_with_metadata.header.system_logs, + )), ] } } diff --git a/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs b/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs index 70567f16427..d04f17b2c22 100644 --- a/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs +++ b/core/lib/zksync_core/src/state_keeper/io/seal_logic.rs @@ -24,7 +24,7 @@ use zksync_types::{ utils::display_timestamp, zk_evm_types::LogQuery, AccountTreeId, Address, ExecuteTransactionCommon, L1BlockNumber, ProtocolVersionId, StorageKey, - StorageLog, Transaction, VmEvent, H256, PUBDATA_CHUNK_PUBLISHER_ADDRESS, + StorageLog, Transaction, VmEvent, H256, }; use zksync_utils::u256_to_h256; @@ -101,11 +101,6 @@ impl UpdatesManager { let progress = L1_BATCH_METRICS.start(L1BatchSealStage::InsertL1BatchHeader); let l2_to_l1_messages = extract_long_l2_to_l1_messages(&finished_batch.final_execution_state.events); - let mut system_logs = finished_batch.final_execution_state.system_logs.clone(); - system_logs - .iter_mut() - .filter(|log| log.0.sender == PUBDATA_CHUNK_PUBLISHER_ADDRESS) - .for_each(|log| log.0.value = H256::default()); let l1_batch = L1BatchHeader { number: self.l1_batch.number, timestamp: self.batch_timestamp(), @@ -124,7 +119,7 @@ impl UpdatesManager { .clone(), base_system_contracts_hashes: self.base_system_contract_hashes(), protocol_version: Some(self.protocol_version()), - system_logs, + system_logs: finished_batch.final_execution_state.system_logs.clone(), pubdata_input: finished_batch.pubdata_input.clone(), }; diff --git a/core/node/commitment_generator/src/input_generation.rs b/core/node/commitment_generator/src/input_generation.rs index c73a9f04b9f..db32776dbac 100644 --- a/core/node/commitment_generator/src/input_generation.rs +++ b/core/node/commitment_generator/src/input_generation.rs @@ -1,6 +1,6 @@ use std::fmt; -use zksync_types::{commitment::CommitmentInput, H256, PUBDATA_CHUNK_PUBLISHER_ADDRESS}; +use zksync_types::{commitment::CommitmentInput, H256}; #[derive(Debug)] pub struct ValidiumInputGenerator; @@ -24,20 +24,13 @@ impl InputGenerator for ValidiumInputGenerator { state_diffs, aux_commitments, blob_commitments, - } => { - let mut system_logs = system_logs.clone(); - system_logs - .iter_mut() - .filter(|log| log.0.sender == PUBDATA_CHUNK_PUBLISHER_ADDRESS) - .for_each(|log| log.0.value = H256::default()); - CommitmentInput::PostBoojum { - common, - system_logs, - state_diffs, - aux_commitments, - blob_commitments: vec![H256::zero(); blob_commitments.len()], - } - } + } => CommitmentInput::PostBoojum { + common, + system_logs, + state_diffs, + aux_commitments, + blob_commitments: vec![H256::zero(); blob_commitments.len()], + }, _ => input, } } From 131064b626508e09994b504fd460b75f40d65404 Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Fri, 10 May 2024 13:29:02 -0400 Subject: [PATCH 05/16] remove linear hashes from commitment --- core/lib/types/src/commitment/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/lib/types/src/commitment/mod.rs b/core/lib/types/src/commitment/mod.rs index 31baa78ee1f..58098df0af8 100644 --- a/core/lib/types/src/commitment/mod.rs +++ b/core/lib/types/src/commitment/mod.rs @@ -357,7 +357,7 @@ impl L1BatchAuxiliaryOutput { let state_diffs_compressed = compress_state_diffs(state_diffs); let blob_linear_hashes = - parse_system_logs_for_blob_hashes(&common_input.protocol_version, &system_logs); + vec![H256::zero(); num_blobs_required(&common_input.protocol_version)]; // Sanity checks. System logs are empty for the genesis batch, so we can't do checks for it. if !system_logs.is_empty() { From 4a7b41a75237a660eef10a2fd9fe24524dc30e2d Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Fri, 10 May 2024 18:41:45 -0400 Subject: [PATCH 06/16] cleanup --- core/bin/external_node/src/main.rs | 14 ++++- core/lib/types/src/commitment/mod.rs | 14 +++-- core/lib/zksync_core/src/lib.rs | 33 +++++++++--- .../src/commitment_post_processor.rs | 52 +++++++++++++++++++ core/node/commitment_generator/src/lib.rs | 8 +++ .../layers/commitment_generator.rs | 35 ++++++++++--- .../layers/proof_data_handler.rs | 11 +++- .../proof_data_handler/src/blob_processor.rs | 33 ++++++++++++ core/node/proof_data_handler/src/lib.rs | 5 +- .../src/request_processor.rs | 18 +++---- 10 files changed, 191 insertions(+), 32 deletions(-) create mode 100644 core/node/commitment_generator/src/commitment_post_processor.rs create mode 100644 core/node/proof_data_handler/src/blob_processor.rs diff --git a/core/bin/external_node/src/main.rs b/core/bin/external_node/src/main.rs index dc938094038..ae5a471f3ab 100644 --- a/core/bin/external_node/src/main.rs +++ b/core/bin/external_node/src/main.rs @@ -10,6 +10,9 @@ use tokio::{ }; use zksync_block_reverter::{BlockReverter, NodeRole}; use zksync_commitment_generator::{ + commitment_post_processor::{ + CommitmentPostProcessor, RollupCommitmentPostProcessor, ValidiumCommitmentPostProcessor, + }, input_generation::{InputGenerator, RollupInputGenerator, ValidiumInputGenerator}, CommitmentGenerator, }; @@ -347,17 +350,20 @@ async fn run_core( ) .await?; - let (l1_batch_commit_data_generator, input_generator): ( + let (l1_batch_commit_data_generator, input_generator, commitment_post_processor): ( Arc, Box, + Box, ) = match config.optional.l1_batch_commit_data_generator_mode { L1BatchCommitDataGeneratorMode::Rollup => ( Arc::new(RollupModeL1BatchCommitDataGenerator {}), Box::new(RollupInputGenerator), + Box::new(RollupCommitmentPostProcessor), ), L1BatchCommitDataGeneratorMode::Validium => ( Arc::new(ValidiumModeL1BatchCommitDataGenerator {}), Box::new(ValidiumInputGenerator), + Box::new(ValidiumCommitmentPostProcessor), ), }; @@ -389,7 +395,11 @@ async fn run_core( .build() .await .context("failed to build a commitment_generator_pool")?; - let commitment_generator = CommitmentGenerator::new(commitment_generator_pool, input_generator); + let commitment_generator = CommitmentGenerator::new( + commitment_generator_pool, + input_generator, + commitment_post_processor, + ); app_health.insert_component(commitment_generator.health_check())?; let commitment_generator_handle = tokio::spawn(commitment_generator.run(stop_receiver.clone())); diff --git a/core/lib/types/src/commitment/mod.rs b/core/lib/types/src/commitment/mod.rs index 58098df0af8..36966c5dc1b 100644 --- a/core/lib/types/src/commitment/mod.rs +++ b/core/lib/types/src/commitment/mod.rs @@ -259,7 +259,7 @@ impl SerializeCommitment for StateDiffRecord { #[derive(Debug, Clone, Eq, PartialEq)] #[cfg_attr(test, derive(Serialize, Deserialize))] -struct L1BatchAuxiliaryCommonOutput { +pub struct L1BatchAuxiliaryCommonOutput { l2_l1_logs_merkle_root: H256, protocol_version: ProtocolVersionId, } @@ -267,7 +267,7 @@ struct L1BatchAuxiliaryCommonOutput { /// Block Output produced by Virtual Machine #[derive(Debug, Clone, Eq, PartialEq)] #[cfg_attr(test, derive(Serialize, Deserialize))] -enum L1BatchAuxiliaryOutput { +pub enum L1BatchAuxiliaryOutput { PreBoojum { common: L1BatchAuxiliaryCommonOutput, l2_l1_logs_linear_hash: H256, @@ -357,7 +357,7 @@ impl L1BatchAuxiliaryOutput { let state_diffs_compressed = compress_state_diffs(state_diffs); let blob_linear_hashes = - vec![H256::zero(); num_blobs_required(&common_input.protocol_version)]; + parse_system_logs_for_blob_hashes(&common_input.protocol_version, &system_logs); // Sanity checks. System logs are empty for the genesis batch, so we can't do checks for it. if !system_logs.is_empty() { @@ -528,8 +528,8 @@ impl L1BatchPassThroughData { #[derive(Debug, Clone)] pub struct L1BatchCommitment { pass_through_data: L1BatchPassThroughData, - auxiliary_output: L1BatchAuxiliaryOutput, - meta_parameters: L1BatchMetaParameters, + pub auxiliary_output: L1BatchAuxiliaryOutput, + pub meta_parameters: L1BatchMetaParameters, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -574,6 +574,10 @@ impl L1BatchCommitment { self.meta_parameters.clone() } + pub fn aux_output(&self) -> L1BatchAuxiliaryOutput { + self.auxiliary_output.clone() + } + pub fn l2_l1_logs_merkle_root(&self) -> H256 { self.auxiliary_output.common().l2_l1_logs_merkle_root } diff --git a/core/lib/zksync_core/src/lib.rs b/core/lib/zksync_core/src/lib.rs index b8513c6963f..0b82c46cd74 100644 --- a/core/lib/zksync_core/src/lib.rs +++ b/core/lib/zksync_core/src/lib.rs @@ -21,6 +21,9 @@ use zksync_circuit_breaker::{ CircuitBreakerChecker, CircuitBreakers, }; use zksync_commitment_generator::{ + commitment_post_processor::{ + CommitmentPostProcessor, RollupCommitmentPostProcessor, ValidiumCommitmentPostProcessor, + }, input_generation::{InputGenerator, RollupInputGenerator, ValidiumInputGenerator}, CommitmentGenerator, }; @@ -76,6 +79,9 @@ use zksync_node_fee_model::{ }; use zksync_node_genesis::{ensure_genesis_state, GenesisParams}; use zksync_object_store::{ObjectStore, ObjectStoreFactory}; +use zksync_proof_data_handler::blob_processor::{ + BlobProcessor, RollupBlobProcessor, ValidiumBlobProcessor, +}; use zksync_shared_metrics::{InitStage, APP_METRICS}; use zksync_state::{PostgresStorageCaches, RocksdbStorageOptions}; use zksync_types::{ethabi::Contract, fee_model::FeeModelConfig, Address, L2ChainId}; @@ -605,13 +611,24 @@ pub async fn initialize_components( tracing::info!("initialized ETH-Watcher in {elapsed:?}"); } - let input_generator: Box = if genesis_config - .l1_batch_commit_data_generator_mode + let (input_generator, commitment_post_processor, blob_processor): ( + Box, + Box, + Arc, + ) = if genesis_config.l1_batch_commit_data_generator_mode == L1BatchCommitDataGeneratorMode::Validium { - Box::new(ValidiumInputGenerator) + ( + Box::new(ValidiumInputGenerator), + Box::new(ValidiumCommitmentPostProcessor), + Arc::new(ValidiumBlobProcessor), + ) } else { - Box::new(RollupInputGenerator) + ( + Box::new(RollupInputGenerator), + Box::new(RollupCommitmentPostProcessor), + Arc::new(RollupBlobProcessor), + ) }; if components.contains(&Component::EthTxAggregator) { @@ -771,6 +788,7 @@ pub async fn initialize_components( .context("proof_data_handler_config")?, store_factory.create_store().await, connection_pool.clone(), + blob_processor, stop_receiver.clone(), ))); } @@ -781,8 +799,11 @@ pub async fn initialize_components( .build() .await .context("failed to build commitment_generator_pool")?; - let commitment_generator = - CommitmentGenerator::new(commitment_generator_pool, input_generator); + let commitment_generator = CommitmentGenerator::new( + commitment_generator_pool, + input_generator, + commitment_post_processor, + ); app_health.insert_component(commitment_generator.health_check())?; task_futures.push(tokio::spawn( commitment_generator.run(stop_receiver.clone()), diff --git a/core/node/commitment_generator/src/commitment_post_processor.rs b/core/node/commitment_generator/src/commitment_post_processor.rs new file mode 100644 index 00000000000..1ce3871f5c6 --- /dev/null +++ b/core/node/commitment_generator/src/commitment_post_processor.rs @@ -0,0 +1,52 @@ +use std::fmt; + +use zksync_types::{ + commitment::{L1BatchAuxiliaryOutput, L1BatchCommitment}, + H256, +}; + +#[derive(Debug)] +pub struct ValidiumCommitmentPostProcessor; + +#[derive(Debug)] +pub struct RollupCommitmentPostProcessor; + +/// Definition of trait handling post processing the L1BatchCommitment depending on the DA solution +/// being utilized. +pub trait CommitmentPostProcessor: 'static + fmt::Debug + Send + Sync { + fn post_process_commitment(&self, commitment: L1BatchCommitment) -> L1BatchCommitment; +} + +impl CommitmentPostProcessor for ValidiumCommitmentPostProcessor { + fn post_process_commitment(&self, mut commitment: L1BatchCommitment) -> L1BatchCommitment { + let aux_output = match commitment.aux_output() { + L1BatchAuxiliaryOutput::PostBoojum { + common, + system_logs_linear_hash, + state_diffs_compressed, + state_diffs_hash, + aux_commitments, + blob_linear_hashes, + blob_commitments, + } => L1BatchAuxiliaryOutput::PostBoojum { + common, + system_logs_linear_hash, + state_diffs_compressed, + state_diffs_hash, + aux_commitments, + blob_linear_hashes: vec![H256::zero(); blob_linear_hashes.len()], + blob_commitments: vec![H256::zero(); blob_commitments.len()], + }, + _ => commitment.aux_output(), + }; + + commitment.auxiliary_output = aux_output; + commitment + } +} + +impl CommitmentPostProcessor for RollupCommitmentPostProcessor { + fn post_process_commitment(&self, commitment: L1BatchCommitment) -> L1BatchCommitment { + commitment + } +} diff --git a/core/node/commitment_generator/src/lib.rs b/core/node/commitment_generator/src/lib.rs index f2975383d76..115629c22de 100644 --- a/core/node/commitment_generator/src/lib.rs +++ b/core/node/commitment_generator/src/lib.rs @@ -17,11 +17,13 @@ use zksync_types::{ use zksync_utils::h256_to_u256; use crate::{ + commitment_post_processor::CommitmentPostProcessor, input_generation::InputGenerator, metrics::{CommitmentStage, METRICS}, utils::{bootloader_initial_content_commitment, events_queue_commitment}, }; +pub mod commitment_post_processor; pub mod input_generation; mod metrics; mod utils; @@ -33,17 +35,20 @@ pub struct CommitmentGenerator { connection_pool: ConnectionPool, health_updater: HealthUpdater, input_generator: Box, + commitment_post_processor: Box, } impl CommitmentGenerator { pub fn new( connection_pool: ConnectionPool, input_generator: Box, + commitment_post_processor: Box, ) -> Self { Self { connection_pool, health_updater: ReactiveHealthCheck::new("commitment_generator").1, input_generator, + commitment_post_processor, } } @@ -271,6 +276,9 @@ impl CommitmentGenerator { let latency = METRICS.generate_commitment_latency_stage[&CommitmentStage::Calculate].start(); let commitment = L1BatchCommitment::new(input); + let commitment = self + .commitment_post_processor + .post_process_commitment(commitment); let artifacts = commitment.artifacts(); let latency = latency.observe(); tracing::debug!( diff --git a/core/node/node_framework/src/implementations/layers/commitment_generator.rs b/core/node/node_framework/src/implementations/layers/commitment_generator.rs index 319e486dc6a..cc357a4e5bf 100644 --- a/core/node/node_framework/src/implementations/layers/commitment_generator.rs +++ b/core/node/node_framework/src/implementations/layers/commitment_generator.rs @@ -1,4 +1,7 @@ use zksync_commitment_generator::{ + commitment_post_processor::{ + CommitmentPostProcessor, RollupCommitmentPostProcessor, ValidiumCommitmentPostProcessor, + }, input_generation::{InputGenerator, RollupInputGenerator, ValidiumInputGenerator}, CommitmentGenerator, }; @@ -16,18 +19,30 @@ use crate::{ pub struct CommitmentGeneratorLayer { input_generator: Box, + commitment_post_processor: Box, } impl CommitmentGeneratorLayer { pub fn new(commit_data_generator_mode: L1BatchCommitDataGeneratorMode) -> Self { - let input_generator: Box = - if commit_data_generator_mode == L1BatchCommitDataGeneratorMode::Validium { - Box::new(ValidiumInputGenerator) - } else { - Box::new(RollupInputGenerator) - }; + let (input_generator, commitment_post_processor): ( + Box, + Box, + ) = if commit_data_generator_mode == L1BatchCommitDataGeneratorMode::Validium { + ( + Box::new(ValidiumInputGenerator), + Box::new(ValidiumCommitmentPostProcessor), + ) + } else { + ( + Box::new(RollupInputGenerator), + Box::new(RollupCommitmentPostProcessor), + ) + }; - Self { input_generator } + Self { + input_generator, + commitment_post_processor, + } } } @@ -41,7 +56,11 @@ impl WiringLayer for CommitmentGeneratorLayer { let pool_resource = context.get_resource::>().await?; let main_pool = pool_resource.get().await.unwrap(); - let commitment_generator = CommitmentGenerator::new(main_pool, self.input_generator); + let commitment_generator = CommitmentGenerator::new( + main_pool, + self.input_generator, + self.commitment_post_processor, + ); let AppHealthCheckResource(app_health) = context.get_resource_or_default().await; app_health diff --git a/core/node/node_framework/src/implementations/layers/proof_data_handler.rs b/core/node/node_framework/src/implementations/layers/proof_data_handler.rs index b9ea91179dd..4faa8d77b99 100644 --- a/core/node/node_framework/src/implementations/layers/proof_data_handler.rs +++ b/core/node/node_framework/src/implementations/layers/proof_data_handler.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use zksync_config::configs::ProofDataHandlerConfig; use zksync_dal::{ConnectionPool, Core}; use zksync_object_store::ObjectStore; +use zksync_proof_data_handler::blob_processor::BlobProcessor; use crate::{ implementations::resources::{ @@ -24,12 +25,17 @@ use crate::{ #[derive(Debug)] pub struct ProofDataHandlerLayer { proof_data_handler_config: ProofDataHandlerConfig, + blob_processor: Arc, } impl ProofDataHandlerLayer { - pub fn new(proof_data_handler_config: ProofDataHandlerConfig) -> Self { + pub fn new( + proof_data_handler_config: ProofDataHandlerConfig, + blob_processor: Arc, + ) -> Self { Self { proof_data_handler_config, + blob_processor, } } } @@ -50,6 +56,7 @@ impl WiringLayer for ProofDataHandlerLayer { proof_data_handler_config: self.proof_data_handler_config, blob_store: object_store.0, main_pool, + blob_processor: self.blob_processor, })); Ok(()) @@ -61,6 +68,7 @@ struct ProofDataHandlerTask { proof_data_handler_config: ProofDataHandlerConfig, blob_store: Arc, main_pool: ConnectionPool, + blob_processor: Arc, } #[async_trait::async_trait] @@ -74,6 +82,7 @@ impl Task for ProofDataHandlerTask { self.proof_data_handler_config, self.blob_store, self.main_pool, + self.blob_processor, stop_receiver.0, ) .await diff --git a/core/node/proof_data_handler/src/blob_processor.rs b/core/node/proof_data_handler/src/blob_processor.rs new file mode 100644 index 00000000000..e91d29337db --- /dev/null +++ b/core/node/proof_data_handler/src/blob_processor.rs @@ -0,0 +1,33 @@ +use std::fmt; + +use zksync_types::{basic_fri_types::Eip4844Blobs, L1BatchNumber}; + +#[derive(Debug)] +pub struct ValidiumBlobProcessor; + +#[derive(Debug)] +pub struct RollupBlobProcessor; + +pub trait BlobProcessor: 'static + fmt::Debug + Send + Sync { + fn process_blobs(&self, l1_batch_number: L1BatchNumber, blobs: Option>) + -> Eip4844Blobs; +} + +impl BlobProcessor for ValidiumBlobProcessor { + fn process_blobs(&self, _: L1BatchNumber, _: Option>) -> Eip4844Blobs { + Eip4844Blobs::empty() + } +} + +impl BlobProcessor for RollupBlobProcessor { + fn process_blobs( + &self, + l1_batch_number: L1BatchNumber, + blobs: Option>, + ) -> Eip4844Blobs { + Eip4844Blobs::decode(&blobs.expect(&format!( + "expected pubdata, but it is not available for batch {l1_batch_number:?}" + ))) + .expect("failed to decode EIP-4844 blobs") + } +} diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 558b8fed576..1cecf542030 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -1,5 +1,6 @@ use std::{net::SocketAddr, sync::Arc}; +use crate::blob_processor::BlobProcessor; use anyhow::Context as _; use axum::{extract::Path, routing::post, Json, Router}; use tokio::sync::watch; @@ -10,17 +11,19 @@ use zksync_prover_interface::api::{ProofGenerationDataRequest, SubmitProofReques use crate::request_processor::RequestProcessor; +pub mod blob_processor; mod request_processor; pub async fn run_server( config: ProofDataHandlerConfig, blob_store: Arc, pool: ConnectionPool, + blob_processor: Arc, mut stop_receiver: watch::Receiver, ) -> anyhow::Result<()> { let bind_address = SocketAddr::from(([0, 0, 0, 0], config.http_port)); tracing::debug!("Starting proof data handler server on {bind_address}"); - let get_proof_gen_processor = RequestProcessor::new(blob_store, pool, config); + let get_proof_gen_processor = RequestProcessor::new(blob_store, pool, config, blob_processor); let submit_proof_processor = get_proof_gen_processor.clone(); let app = Router::new() .route( diff --git a/core/node/proof_data_handler/src/request_processor.rs b/core/node/proof_data_handler/src/request_processor.rs index f9b4d8b701b..3789d42d6f9 100644 --- a/core/node/proof_data_handler/src/request_processor.rs +++ b/core/node/proof_data_handler/src/request_processor.rs @@ -13,16 +13,16 @@ use zksync_prover_interface::api::{ ProofGenerationData, ProofGenerationDataRequest, ProofGenerationDataResponse, SubmitProofRequest, SubmitProofResponse, }; -use zksync_types::{ - basic_fri_types::Eip4844Blobs, commitment::serialize_commitments, web3::keccak256, - L1BatchNumber, H256, -}; +use zksync_types::{commitment::serialize_commitments, web3::keccak256, L1BatchNumber, H256}; + +use crate::blob_processor::BlobProcessor; #[derive(Clone)] pub(crate) struct RequestProcessor { blob_store: Arc, pool: ConnectionPool, config: ProofDataHandlerConfig, + blob_processor: Arc, } pub(crate) enum RequestProcessorError { @@ -62,11 +62,13 @@ impl RequestProcessor { blob_store: Arc, pool: ConnectionPool, config: ProofDataHandlerConfig, + blob_processor: Arc, ) -> Self { Self { blob_store, pool, config, + blob_processor, } } @@ -131,11 +133,9 @@ impl RequestProcessor { .unwrap() .unwrap(); - // let eip_4844_blobs = Eip4844Blobs::decode(&storage_batch.pubdata_input.expect(&format!( - // "expected pubdata, but it is not available for batch {l1_batch_number:?}" - // ))) - // .expect("failed to decode EIP-4844 blobs"); - let eip_4844_blobs = Eip4844Blobs::empty(); + let eip_4844_blobs = self + .blob_processor + .process_blobs(l1_batch_number, storage_batch.pubdata_input); let proof_gen_data = ProofGenerationData { l1_batch_number, From 4b3a9d43d9864a80eb440b08037862d38fd86557 Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Fri, 10 May 2024 18:42:20 -0400 Subject: [PATCH 07/16] zk fmt --- core/node/proof_data_handler/src/lib.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/core/node/proof_data_handler/src/lib.rs b/core/node/proof_data_handler/src/lib.rs index 1cecf542030..7865b767f66 100644 --- a/core/node/proof_data_handler/src/lib.rs +++ b/core/node/proof_data_handler/src/lib.rs @@ -1,6 +1,5 @@ use std::{net::SocketAddr, sync::Arc}; -use crate::blob_processor::BlobProcessor; use anyhow::Context as _; use axum::{extract::Path, routing::post, Json, Router}; use tokio::sync::watch; @@ -9,7 +8,7 @@ use zksync_dal::{ConnectionPool, Core}; use zksync_object_store::ObjectStore; use zksync_prover_interface::api::{ProofGenerationDataRequest, SubmitProofRequest}; -use crate::request_processor::RequestProcessor; +use crate::{blob_processor::BlobProcessor, request_processor::RequestProcessor}; pub mod blob_processor; mod request_processor; From 0cdf713c359f6987601888244242c3baf17bc4fe Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Sat, 11 May 2024 11:45:35 -0400 Subject: [PATCH 08/16] update layer to take in config value --- contracts | 2 +- .../src/commitment_post_processor.rs | 2 +- core/node/node_framework/examples/main_node.rs | 2 ++ .../implementations/layers/proof_data_handler.rs | 14 +++++++++++--- 4 files changed, 15 insertions(+), 5 deletions(-) diff --git a/contracts b/contracts index e27d5d2ac16..452a54f6724 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit e27d5d2ac16425842bcfa03cffa24c89ddb1d5f9 +Subproject commit 452a54f6724347b7e517be1a3d948299ab827d8c diff --git a/core/node/commitment_generator/src/commitment_post_processor.rs b/core/node/commitment_generator/src/commitment_post_processor.rs index 1ce3871f5c6..c8a5158711b 100644 --- a/core/node/commitment_generator/src/commitment_post_processor.rs +++ b/core/node/commitment_generator/src/commitment_post_processor.rs @@ -11,7 +11,7 @@ pub struct ValidiumCommitmentPostProcessor; #[derive(Debug)] pub struct RollupCommitmentPostProcessor; -/// Definition of trait handling post processing the L1BatchCommitment depending on the DA solution +/// Definition of trait handling post processing the `L1BatchCommitment` depending on the DA solution /// being utilized. pub trait CommitmentPostProcessor: 'static + fmt::Debug + Send + Sync { fn post_process_commitment(&self, commitment: L1BatchCommitment) -> L1BatchCommitment; diff --git a/core/node/node_framework/examples/main_node.rs b/core/node/node_framework/examples/main_node.rs index b524664c89a..29947ae9ef6 100644 --- a/core/node/node_framework/examples/main_node.rs +++ b/core/node/node_framework/examples/main_node.rs @@ -174,8 +174,10 @@ impl MainNodeBuilder { } fn add_proof_data_handler_layer(mut self) -> anyhow::Result { + let genesis_config = GenesisConfig::from_env()?; self.node.add_layer(ProofDataHandlerLayer::new( ProofDataHandlerConfig::from_env()?, + genesis_config.l1_batch_commit_data_generator_mode, )); Ok(self) } diff --git a/core/node/node_framework/src/implementations/layers/proof_data_handler.rs b/core/node/node_framework/src/implementations/layers/proof_data_handler.rs index 4faa8d77b99..04bdbced0fd 100644 --- a/core/node/node_framework/src/implementations/layers/proof_data_handler.rs +++ b/core/node/node_framework/src/implementations/layers/proof_data_handler.rs @@ -1,9 +1,11 @@ use std::sync::Arc; -use zksync_config::configs::ProofDataHandlerConfig; +use zksync_config::configs::{chain::L1BatchCommitDataGeneratorMode, ProofDataHandlerConfig}; use zksync_dal::{ConnectionPool, Core}; use zksync_object_store::ObjectStore; -use zksync_proof_data_handler::blob_processor::BlobProcessor; +use zksync_proof_data_handler::blob_processor::{ + BlobProcessor, RollupBlobProcessor, ValidiumBlobProcessor, +}; use crate::{ implementations::resources::{ @@ -31,8 +33,14 @@ pub struct ProofDataHandlerLayer { impl ProofDataHandlerLayer { pub fn new( proof_data_handler_config: ProofDataHandlerConfig, - blob_processor: Arc, + data_generator_mode: L1BatchCommitDataGeneratorMode, ) -> Self { + let blob_processor: Arc = + if data_generator_mode == L1BatchCommitDataGeneratorMode::Validium { + Arc::new(ValidiumBlobProcessor) + } else { + Arc::new(RollupBlobProcessor) + }; Self { proof_data_handler_config, blob_processor, From b649bee865e6102b2f5a17a47695fbbf07b7a962 Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Sat, 11 May 2024 12:13:35 -0400 Subject: [PATCH 09/16] lint fix --- core/node/proof_data_handler/src/blob_processor.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/node/proof_data_handler/src/blob_processor.rs b/core/node/proof_data_handler/src/blob_processor.rs index e91d29337db..5f1cfa9dfa4 100644 --- a/core/node/proof_data_handler/src/blob_processor.rs +++ b/core/node/proof_data_handler/src/blob_processor.rs @@ -25,9 +25,9 @@ impl BlobProcessor for RollupBlobProcessor { l1_batch_number: L1BatchNumber, blobs: Option>, ) -> Eip4844Blobs { - Eip4844Blobs::decode(&blobs.expect(&format!( + let blobs = &blobs.expect(&format!( "expected pubdata, but it is not available for batch {l1_batch_number:?}" - ))) - .expect("failed to decode EIP-4844 blobs") + )); + Eip4844Blobs::decode(blobs).expect("failed to decode EIP-4844 blobs") } } From b4674a9602f0bd93e1bbe53dc8494eed84100303 Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Sat, 11 May 2024 13:41:12 -0400 Subject: [PATCH 10/16] lint fix --- core/node/proof_data_handler/src/blob_processor.rs | 6 +++--- prover/witness_vector_generator/src/main.rs | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/core/node/proof_data_handler/src/blob_processor.rs b/core/node/proof_data_handler/src/blob_processor.rs index 5f1cfa9dfa4..5ec86275bea 100644 --- a/core/node/proof_data_handler/src/blob_processor.rs +++ b/core/node/proof_data_handler/src/blob_processor.rs @@ -25,9 +25,9 @@ impl BlobProcessor for RollupBlobProcessor { l1_batch_number: L1BatchNumber, blobs: Option>, ) -> Eip4844Blobs { - let blobs = &blobs.expect(&format!( - "expected pubdata, but it is not available for batch {l1_batch_number:?}" - )); + let blobs = &blobs.unwrap_or_else(|| { + panic!("expected pubdata, but it is not available for batch {l1_batch_number:?}") + }); Eip4844Blobs::decode(blobs).expect("failed to decode EIP-4844 blobs") } } diff --git a/prover/witness_vector_generator/src/main.rs b/prover/witness_vector_generator/src/main.rs index 1a6553a8eed..44d0fb31a40 100644 --- a/prover/witness_vector_generator/src/main.rs +++ b/prover/witness_vector_generator/src/main.rs @@ -84,9 +84,8 @@ async fn main() -> anyhow::Result<()> { let circuit_ids_for_round_to_be_proven = get_all_circuit_id_round_tuples_for(circuit_ids_for_round_to_be_proven); let fri_prover_config = FriProverConfig::from_env().context("FriProverConfig::from_env()")?; - // let zone_url = &fri_prover_config.zone_read_url; - // let zone = get_zone(zone_url).await.context("get_zone()")?; - let zone = String::from("home"); + let zone_url = &fri_prover_config.zone_read_url; + let zone = get_zone(zone_url).await.context("get_zone()")?; let protocol_version = ProtocolVersionId::current_prover_version(); let witness_vector_generator = WitnessVectorGenerator::new( From c54c32ba595f7787e9cef12a73dcd915deb6b75a Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Sat, 11 May 2024 14:55:58 -0400 Subject: [PATCH 11/16] lint --- .../src/fri_witness_generator_dal.rs | 24 ++++++++++--------- prover/prover_fri/src/main.rs | 7 +++--- prover/witness_vector_generator/src/main.rs | 1 + 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/prover/prover_dal/src/fri_witness_generator_dal.rs b/prover/prover_dal/src/fri_witness_generator_dal.rs index c6c1adc9143..54627881e52 100644 --- a/prover/prover_dal/src/fri_witness_generator_dal.rs +++ b/prover/prover_dal/src/fri_witness_generator_dal.rs @@ -115,17 +115,19 @@ impl FriWitnessGeneratorDal<'_, '_> { .await .unwrap() .map(|row| { - ( - L1BatchNumber(row.l1_batch_number as u32), - Eip4844Blobs::empty(), - // Eip4844Blobs::decode(&row.eip_4844_blobs.unwrap_or_else(|| { - // panic!( - // "missing eip 4844 blobs from the database for batch {}", - // row.l1_batch_number - // ) - // })) - // .expect("failed to decode EIP4844 blobs"), - ) + // Blobs can be `None` if we are using an `off-chain DA` + let blobs = if row.eip_4844_blobs.is_none() { + Eip4844Blobs::empty() + } else { + Eip4844Blobs::decode(&row.eip_4844_blobs.unwrap_or_else(|| { + panic!( + "missing eip 4844 blobs from the database for batch {}", + row.l1_batch_number + ) + })) + .expect("failed to decode EIP4844 blobs") + }; + (L1BatchNumber(row.l1_batch_number as u32), blobs) }) } diff --git a/prover/prover_fri/src/main.rs b/prover/prover_fri/src/main.rs index 4fcda4a50e5..c90b4ced147 100644 --- a/prover/prover_fri/src/main.rs +++ b/prover/prover_fri/src/main.rs @@ -238,10 +238,9 @@ async fn get_prover_tasks( let shared_witness_vector_queue = Arc::new(Mutex::new(witness_vector_queue)); let consumer = shared_witness_vector_queue.clone(); - // let zone = get_zone(&prover_config.zone_read_url) - // .await - // .context("get_zone()")?; - let zone = String::from("home"); + let zone = get_zone(&prover_config.zone_read_url) + .await + .context("get_zone()")?; let local_ip = local_ip().context("Failed obtaining local IP address")?; let address = SocketAddress { host: local_ip, diff --git a/prover/witness_vector_generator/src/main.rs b/prover/witness_vector_generator/src/main.rs index 44d0fb31a40..2260648d3f1 100644 --- a/prover/witness_vector_generator/src/main.rs +++ b/prover/witness_vector_generator/src/main.rs @@ -86,6 +86,7 @@ async fn main() -> anyhow::Result<()> { let fri_prover_config = FriProverConfig::from_env().context("FriProverConfig::from_env()")?; let zone_url = &fri_prover_config.zone_read_url; let zone = get_zone(zone_url).await.context("get_zone()")?; + let protocol_version = ProtocolVersionId::current_prover_version(); let witness_vector_generator = WitnessVectorGenerator::new( From f98b8eb134aa2750b9c94e8325e041e23e0bb24c Mon Sep 17 00:00:00 2001 From: EmilLuta Date: Mon, 13 May 2024 12:09:38 +0200 Subject: [PATCH 12/16] Add notes on future changes for EIP4844Blobs --- core/lib/basic_types/src/basic_fri_types.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/core/lib/basic_types/src/basic_fri_types.rs b/core/lib/basic_types/src/basic_fri_types.rs index 9938ee310d9..4b8deb5de3e 100644 --- a/core/lib/basic_types/src/basic_fri_types.rs +++ b/core/lib/basic_types/src/basic_fri_types.rs @@ -24,9 +24,15 @@ type Eip4844BlobsInner = [Option; MAX_4844_BLOBS_PER_BLOCK]; /// Current invariants: /// - there are between [1, 16] blobs /// - all blobs are of the same size [`EIP_4844_BLOB_SIZE`] +/// - there may be no blobs in case of Validium /// Creating a structure violating these constraints will panic. /// /// Note: blobs are padded to fit the correct size. +// TODO: PLA-932 +/// Note2: this becomes a rather leaky abstraction. +/// It will be reworked once BWIP is introduced. +/// Provers shouldn't need to decide between loading data from database or making it empty. +/// Data should just be available #[derive(Debug, Serialize, Deserialize, Clone)] pub struct Eip4844Blobs { blobs: Eip4844BlobsInner, From 0184d90455fbae298879f83cc4318f4a28aa3132 Mon Sep 17 00:00:00 2001 From: EmilLuta Date: Mon, 13 May 2024 13:33:56 +0200 Subject: [PATCH 13/16] Fix prover subsystems --- core/lib/basic_types/src/basic_fri_types.rs | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/core/lib/basic_types/src/basic_fri_types.rs b/core/lib/basic_types/src/basic_fri_types.rs index 4b8deb5de3e..bae76c09009 100644 --- a/core/lib/basic_types/src/basic_fri_types.rs +++ b/core/lib/basic_types/src/basic_fri_types.rs @@ -29,7 +29,7 @@ type Eip4844BlobsInner = [Option; MAX_4844_BLOBS_PER_BLOCK]; /// /// Note: blobs are padded to fit the correct size. // TODO: PLA-932 -/// Note2: this becomes a rather leaky abstraction. +/// Note2: this becomes a rather leaky abstraction post Validium. /// It will be reworked once BWIP is introduced. /// Provers shouldn't need to decide between loading data from database or making it empty. /// Data should just be available @@ -56,18 +56,16 @@ impl Eip4844Blobs { } pub fn decode(blobs: &[u8]) -> anyhow::Result { + // Validium case + if blobs.is_empty() { + return Ok(Self::empty()); + } let mut chunks: Vec = blobs .chunks(EIP_4844_BLOB_SIZE) .map(|chunk| chunk.into()) .collect(); - - if let Some(last_chunk) = chunks.last_mut() { - last_chunk.resize(EIP_4844_BLOB_SIZE, 0u8); - } else { - return Err(anyhow::anyhow!( - "cannot create Eip4844Blobs, received empty pubdata" - )); - } + // Unwrapping here is safe because of check on first line of the function. + chunks.last_mut().unwrap().resize(EIP_4844_BLOB_SIZE, 0u8); if chunks.len() > MAX_4844_BLOBS_PER_BLOCK { return Err(anyhow::anyhow!( From 8d8e99d5b2afe7013ac73fdc99349098c424b83f Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Mon, 13 May 2024 07:34:45 -0400 Subject: [PATCH 14/16] re-enable validium mode --- .github/workflows/ci-core-reusable.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index 2cc2651087a..b581803569a 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -132,7 +132,7 @@ jobs: matrix: consensus: [false, true] base_token: ["Eth", "Custom"] - deployment_mode: ["Rollup"] #, "Validium"] - Temporary disable validium until PR/1910. + deployment_mode: ["Rollup", "Validium"] env: SERVER_COMPONENTS: "api,tree,eth,state_keeper,housekeeper,commitment_generator${{ matrix.consensus && ',consensus' || '' }}" @@ -278,7 +278,7 @@ jobs: matrix: consensus: [false, true] base_token: ["Eth", "Custom"] - deployment_mode: ["Rollup"] # "Validium"] - Temporary disable validium until PR/1910. + deployment_mode: ["Rollup", "Validium"] runs-on: [matterlabs-ci-runner] env: From ab5e4c4484dd9f41b65693e32731589058e592d6 Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Mon, 13 May 2024 07:41:46 -0400 Subject: [PATCH 15/16] updated spelling --- core/lib/basic_types/src/basic_fri_types.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/lib/basic_types/src/basic_fri_types.rs b/core/lib/basic_types/src/basic_fri_types.rs index bae76c09009..592468eb846 100644 --- a/core/lib/basic_types/src/basic_fri_types.rs +++ b/core/lib/basic_types/src/basic_fri_types.rs @@ -29,8 +29,8 @@ type Eip4844BlobsInner = [Option; MAX_4844_BLOBS_PER_BLOCK]; /// /// Note: blobs are padded to fit the correct size. // TODO: PLA-932 -/// Note2: this becomes a rather leaky abstraction post Validium. -/// It will be reworked once BWIP is introduced. +/// Note 2: this becomes a rather leaky abstraction. +/// It will be reworked once `BWIP` is introduced. /// Provers shouldn't need to decide between loading data from database or making it empty. /// Data should just be available #[derive(Debug, Serialize, Deserialize, Clone)] From 55ab4a97697e3d0ee2b2e6f8b1edb7e2b6c71803 Mon Sep 17 00:00:00 2001 From: Zach Kolodny Date: Mon, 13 May 2024 07:59:19 -0400 Subject: [PATCH 16/16] updated test and added partialeq to blobs struct --- core/lib/basic_types/src/basic_fri_types.rs | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/core/lib/basic_types/src/basic_fri_types.rs b/core/lib/basic_types/src/basic_fri_types.rs index 592468eb846..ce9e8f330da 100644 --- a/core/lib/basic_types/src/basic_fri_types.rs +++ b/core/lib/basic_types/src/basic_fri_types.rs @@ -33,7 +33,7 @@ type Eip4844BlobsInner = [Option; MAX_4844_BLOBS_PER_BLOCK]; /// It will be reworked once `BWIP` is introduced. /// Provers shouldn't need to decide between loading data from database or making it empty. /// Data should just be available -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] pub struct Eip4844Blobs { blobs: Eip4844BlobsInner, } @@ -191,15 +191,8 @@ mod tests { #[test] fn test_eip_4844_blobs_empty_pubdata() { let payload = vec![]; - match Eip4844Blobs::decode(&payload) { - Ok(_) => panic!("expected error, got Ok"), - Err(e) => { - assert_eq!( - e.to_string(), - "cannot create Eip4844Blobs, received empty pubdata" - ); - } - } + let blobs = Eip4844Blobs::decode(&payload).unwrap(); + assert_eq!(blobs, Eip4844Blobs::empty()); } #[test]