Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(core/prover): Changes to support Validium #1910

Merged
merged 21 commits into from
May 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci-core-reusable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ jobs:
matrix:
consensus: [false, true]
base_token: ["Eth", "Custom"]
deployment_mode: ["Rollup"] #, "Validium"] - Temporary disable validium until PR/1910.
deployment_mode: ["Rollup", "Validium"]
env:
SERVER_COMPONENTS: "api,tree,eth,state_keeper,housekeeper,commitment_generator${{ matrix.consensus && ',consensus' || '' }}"

Expand Down Expand Up @@ -278,7 +278,7 @@ jobs:
matrix:
consensus: [false, true]
base_token: ["Eth", "Custom"]
deployment_mode: ["Rollup"] # "Validium"] - Temporary disable validium until PR/1910.
deployment_mode: ["Rollup", "Validium"]
runs-on: [matterlabs-ci-runner]

env:
Expand Down
14 changes: 12 additions & 2 deletions core/bin/external_node/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@ use tokio::{
};
use zksync_block_reverter::{BlockReverter, NodeRole};
use zksync_commitment_generator::{
commitment_post_processor::{
CommitmentPostProcessor, RollupCommitmentPostProcessor, ValidiumCommitmentPostProcessor,
},
input_generation::{InputGenerator, RollupInputGenerator, ValidiumInputGenerator},
CommitmentGenerator,
};
Expand Down Expand Up @@ -347,17 +350,20 @@ async fn run_core(
)
.await?;

let (l1_batch_commit_data_generator, input_generator): (
let (l1_batch_commit_data_generator, input_generator, commitment_post_processor): (
Arc<dyn L1BatchCommitDataGenerator>,
Box<dyn InputGenerator>,
Box<dyn CommitmentPostProcessor>,
) = match config.optional.l1_batch_commit_data_generator_mode {
L1BatchCommitDataGeneratorMode::Rollup => (
Arc::new(RollupModeL1BatchCommitDataGenerator {}),
Box::new(RollupInputGenerator),
Box::new(RollupCommitmentPostProcessor),
),
L1BatchCommitDataGeneratorMode::Validium => (
Arc::new(ValidiumModeL1BatchCommitDataGenerator {}),
Box::new(ValidiumInputGenerator),
Box::new(ValidiumCommitmentPostProcessor),
),
};

Expand Down Expand Up @@ -389,7 +395,11 @@ async fn run_core(
.build()
.await
.context("failed to build a commitment_generator_pool")?;
let commitment_generator = CommitmentGenerator::new(commitment_generator_pool, input_generator);
let commitment_generator = CommitmentGenerator::new(
commitment_generator_pool,
input_generator,
commitment_post_processor,
);
app_health.insert_component(commitment_generator.health_check())?;
let commitment_generator_handle = tokio::spawn(commitment_generator.run(stop_receiver.clone()));

Expand Down
39 changes: 21 additions & 18 deletions core/lib/basic_types/src/basic_fri_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,16 @@ type Eip4844BlobsInner = [Option<Blob>; MAX_4844_BLOBS_PER_BLOCK];
/// Current invariants:
/// - there are between [1, 16] blobs
/// - all blobs are of the same size [`EIP_4844_BLOB_SIZE`]
/// - there may be no blobs in case of Validium
/// Creating a structure violating these constraints will panic.
///
/// Note: blobs are padded to fit the correct size.
#[derive(Debug, Serialize, Deserialize, Clone)]
// TODO: PLA-932
/// Note 2: this becomes a rather leaky abstraction.
/// It will be reworked once `BWIP` is introduced.
/// Provers shouldn't need to decide between loading data from database or making it empty.
/// Data should just be available
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
pub struct Eip4844Blobs {
blobs: Eip4844BlobsInner,
}
Expand All @@ -39,23 +45,27 @@ impl Eip4844Blobs {
}

impl Eip4844Blobs {
pub fn empty() -> Self {
Self {
blobs: Default::default(),
}
}

pub fn encode(self) -> Vec<u8> {
self.blobs().into_iter().flatten().flatten().collect()
}

pub fn decode(blobs: &[u8]) -> anyhow::Result<Self> {
// Validium case
if blobs.is_empty() {
return Ok(Self::empty());
}
let mut chunks: Vec<Blob> = blobs
.chunks(EIP_4844_BLOB_SIZE)
.map(|chunk| chunk.into())
.collect();

if let Some(last_chunk) = chunks.last_mut() {
last_chunk.resize(EIP_4844_BLOB_SIZE, 0u8);
} else {
return Err(anyhow::anyhow!(
"cannot create Eip4844Blobs, received empty pubdata"
));
}
// Unwrapping here is safe because of check on first line of the function.
chunks.last_mut().unwrap().resize(EIP_4844_BLOB_SIZE, 0u8);

if chunks.len() > MAX_4844_BLOBS_PER_BLOCK {
return Err(anyhow::anyhow!(
Expand Down Expand Up @@ -181,15 +191,8 @@ mod tests {
#[test]
fn test_eip_4844_blobs_empty_pubdata() {
let payload = vec![];
match Eip4844Blobs::decode(&payload) {
Ok(_) => panic!("expected error, got Ok"),
Err(e) => {
assert_eq!(
e.to_string(),
"cannot create Eip4844Blobs, received empty pubdata"
);
}
}
let blobs = Eip4844Blobs::decode(&payload).unwrap();
assert_eq!(blobs, Eip4844Blobs::empty());
}

#[test]
Expand Down
12 changes: 8 additions & 4 deletions core/lib/types/src/commitment/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -259,15 +259,15 @@ impl SerializeCommitment for StateDiffRecord {

#[derive(Debug, Clone, Eq, PartialEq)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
struct L1BatchAuxiliaryCommonOutput {
pub struct L1BatchAuxiliaryCommonOutput {
l2_l1_logs_merkle_root: H256,
protocol_version: ProtocolVersionId,
}

/// Block Output produced by Virtual Machine
#[derive(Debug, Clone, Eq, PartialEq)]
#[cfg_attr(test, derive(Serialize, Deserialize))]
enum L1BatchAuxiliaryOutput {
pub enum L1BatchAuxiliaryOutput {
PreBoojum {
common: L1BatchAuxiliaryCommonOutput,
l2_l1_logs_linear_hash: H256,
Expand Down Expand Up @@ -528,8 +528,8 @@ impl L1BatchPassThroughData {
#[derive(Debug, Clone)]
pub struct L1BatchCommitment {
pass_through_data: L1BatchPassThroughData,
auxiliary_output: L1BatchAuxiliaryOutput,
meta_parameters: L1BatchMetaParameters,
pub auxiliary_output: L1BatchAuxiliaryOutput,
pub meta_parameters: L1BatchMetaParameters,
}

#[derive(Debug, Clone, PartialEq, Eq)]
Expand Down Expand Up @@ -574,6 +574,10 @@ impl L1BatchCommitment {
self.meta_parameters.clone()
}

pub fn aux_output(&self) -> L1BatchAuxiliaryOutput {
self.auxiliary_output.clone()
}

pub fn l2_l1_logs_merkle_root(&self) -> H256 {
self.auxiliary_output.common().l2_l1_logs_merkle_root
}
Expand Down
33 changes: 27 additions & 6 deletions core/lib/zksync_core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ use zksync_circuit_breaker::{
CircuitBreakerChecker, CircuitBreakers,
};
use zksync_commitment_generator::{
commitment_post_processor::{
CommitmentPostProcessor, RollupCommitmentPostProcessor, ValidiumCommitmentPostProcessor,
},
input_generation::{InputGenerator, RollupInputGenerator, ValidiumInputGenerator},
CommitmentGenerator,
};
Expand Down Expand Up @@ -76,6 +79,9 @@ use zksync_node_fee_model::{
};
use zksync_node_genesis::{ensure_genesis_state, GenesisParams};
use zksync_object_store::{ObjectStore, ObjectStoreFactory};
use zksync_proof_data_handler::blob_processor::{
BlobProcessor, RollupBlobProcessor, ValidiumBlobProcessor,
};
use zksync_shared_metrics::{InitStage, APP_METRICS};
use zksync_state::{PostgresStorageCaches, RocksdbStorageOptions};
use zksync_types::{ethabi::Contract, fee_model::FeeModelConfig, Address, L2ChainId};
Expand Down Expand Up @@ -605,13 +611,24 @@ pub async fn initialize_components(
tracing::info!("initialized ETH-Watcher in {elapsed:?}");
}

let input_generator: Box<dyn InputGenerator> = if genesis_config
.l1_batch_commit_data_generator_mode
let (input_generator, commitment_post_processor, blob_processor): (
Box<dyn InputGenerator>,
Box<dyn CommitmentPostProcessor>,
Arc<dyn BlobProcessor>,
) = if genesis_config.l1_batch_commit_data_generator_mode
== L1BatchCommitDataGeneratorMode::Validium
{
Box::new(ValidiumInputGenerator)
(
Box::new(ValidiumInputGenerator),
Box::new(ValidiumCommitmentPostProcessor),
Arc::new(ValidiumBlobProcessor),
)
} else {
Box::new(RollupInputGenerator)
(
Box::new(RollupInputGenerator),
Box::new(RollupCommitmentPostProcessor),
Arc::new(RollupBlobProcessor),
)
};

if components.contains(&Component::EthTxAggregator) {
Expand Down Expand Up @@ -771,6 +788,7 @@ pub async fn initialize_components(
.context("proof_data_handler_config")?,
store_factory.create_store().await,
connection_pool.clone(),
blob_processor,
stop_receiver.clone(),
)));
}
Expand All @@ -781,8 +799,11 @@ pub async fn initialize_components(
.build()
.await
.context("failed to build commitment_generator_pool")?;
let commitment_generator =
CommitmentGenerator::new(commitment_generator_pool, input_generator);
let commitment_generator = CommitmentGenerator::new(
commitment_generator_pool,
input_generator,
commitment_post_processor,
);
app_health.insert_component(commitment_generator.health_check())?;
task_futures.push(tokio::spawn(
commitment_generator.run(stop_receiver.clone()),
Expand Down
52 changes: 52 additions & 0 deletions core/node/commitment_generator/src/commitment_post_processor.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
use std::fmt;

use zksync_types::{
commitment::{L1BatchAuxiliaryOutput, L1BatchCommitment},
H256,
};

#[derive(Debug)]
pub struct ValidiumCommitmentPostProcessor;

#[derive(Debug)]
pub struct RollupCommitmentPostProcessor;

/// Definition of trait handling post processing the `L1BatchCommitment` depending on the DA solution
/// being utilized.
pub trait CommitmentPostProcessor: 'static + fmt::Debug + Send + Sync {
fn post_process_commitment(&self, commitment: L1BatchCommitment) -> L1BatchCommitment;
}

impl CommitmentPostProcessor for ValidiumCommitmentPostProcessor {
fn post_process_commitment(&self, mut commitment: L1BatchCommitment) -> L1BatchCommitment {
let aux_output = match commitment.aux_output() {
L1BatchAuxiliaryOutput::PostBoojum {
common,
system_logs_linear_hash,
state_diffs_compressed,
state_diffs_hash,
aux_commitments,
blob_linear_hashes,
blob_commitments,
} => L1BatchAuxiliaryOutput::PostBoojum {
common,
system_logs_linear_hash,
state_diffs_compressed,
state_diffs_hash,
aux_commitments,
blob_linear_hashes: vec![H256::zero(); blob_linear_hashes.len()],
blob_commitments: vec![H256::zero(); blob_commitments.len()],
},
_ => commitment.aux_output(),
};

commitment.auxiliary_output = aux_output;
commitment
}
}

impl CommitmentPostProcessor for RollupCommitmentPostProcessor {
fn post_process_commitment(&self, commitment: L1BatchCommitment) -> L1BatchCommitment {
commitment
}
}
8 changes: 8 additions & 0 deletions core/node/commitment_generator/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,13 @@ use zksync_types::{
use zksync_utils::h256_to_u256;

use crate::{
commitment_post_processor::CommitmentPostProcessor,
input_generation::InputGenerator,
metrics::{CommitmentStage, METRICS},
utils::{bootloader_initial_content_commitment, events_queue_commitment},
};

pub mod commitment_post_processor;
pub mod input_generation;
mod metrics;
mod utils;
Expand All @@ -33,17 +35,20 @@ pub struct CommitmentGenerator {
connection_pool: ConnectionPool<Core>,
health_updater: HealthUpdater,
input_generator: Box<dyn InputGenerator>,
commitment_post_processor: Box<dyn CommitmentPostProcessor>,
}

impl CommitmentGenerator {
pub fn new(
connection_pool: ConnectionPool<Core>,
input_generator: Box<dyn InputGenerator>,
commitment_post_processor: Box<dyn CommitmentPostProcessor>,
) -> Self {
Self {
connection_pool,
health_updater: ReactiveHealthCheck::new("commitment_generator").1,
input_generator,
commitment_post_processor,
}
}

Expand Down Expand Up @@ -271,6 +276,9 @@ impl CommitmentGenerator {
let latency =
METRICS.generate_commitment_latency_stage[&CommitmentStage::Calculate].start();
let commitment = L1BatchCommitment::new(input);
let commitment = self
.commitment_post_processor
.post_process_commitment(commitment);
let artifacts = commitment.artifacts();
let latency = latency.observe();
tracing::debug!(
Expand Down
2 changes: 2 additions & 0 deletions core/node/node_framework/examples/main_node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -174,8 +174,10 @@ impl MainNodeBuilder {
}

fn add_proof_data_handler_layer(mut self) -> anyhow::Result<Self> {
let genesis_config = GenesisConfig::from_env()?;
self.node.add_layer(ProofDataHandlerLayer::new(
ProofDataHandlerConfig::from_env()?,
genesis_config.l1_batch_commit_data_generator_mode,
));
Ok(self)
}
Expand Down
Loading
Loading