Skip to content

Commit

Permalink
chore: resolve merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
pythonberg1997 committed Sep 3, 2024
1 parent 785411b commit 69139a8
Show file tree
Hide file tree
Showing 20 changed files with 93 additions and 101 deletions.
5 changes: 3 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
FROM lukemathwalker/cargo-chef:latest-rust-1.80 AS chef
WORKDIR /app

LABEL org.opencontainers.image.source=https://github.com/paradigmxyz/reth
LABEL org.opencontainers.image.source=https://github.com/bnb-chain/reth
LABEL org.opencontainers.image.licenses="MIT OR Apache-2.0"

# Install system dependencies
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.cross
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# locatable in `./dist/bin/$TARGETARCH`
FROM --platform=$TARGETPLATFORM ubuntu:22.04

LABEL org.opencontainers.image.source=https://github.com/paradigmxyz/reth
LABEL org.opencontainers.image.source=https://github.com/bnb-chain/reth
LABEL org.opencontainers.image.licenses="MIT OR Apache-2.0"

# Filled by docker buildx
Expand Down
2 changes: 1 addition & 1 deletion DockerfileBsc.cross
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# locatable in `./dist/bin/$TARGETARCH`
FROM --platform=$TARGETPLATFORM ubuntu:22.04

LABEL org.opencontainers.image.source=https://github.com/paradigmxyz/reth
LABEL org.opencontainers.image.source=https://github.com/bnb-chain/reth
LABEL org.opencontainers.image.licenses="MIT OR Apache-2.0"

# Filled by docker buildx
Expand Down
2 changes: 1 addition & 1 deletion DockerfileOp.cross
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# locatable in `./dist/bin/$TARGETARCH`
FROM --platform=$TARGETPLATFORM ubuntu:22.04

LABEL org.opencontainers.image.source=https://github.com/paradigmxyz/reth
LABEL org.opencontainers.image.source=https://github.com/bnb-chain/reth
LABEL org.opencontainers.image.licenses="MIT OR Apache-2.0"

# Filled by docker buildx
Expand Down
2 changes: 1 addition & 1 deletion bin/reth/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -157,11 +157,11 @@ opbnb = [
bsc = [
"reth-rpc/bsc",
"reth-primitives/bsc",
"dep:reth-node-bsc",
"reth-node-core/bsc",
"reth-stages/bsc",
"reth-node-builder/bsc",
"reth-beacon-consensus/bsc",
"dep:reth-node-bsc",
]

# no-op feature flag for switching between the `optimism` and default functionality in CI matrices
Expand Down
2 changes: 1 addition & 1 deletion bsc.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
FROM lukemathwalker/cargo-chef:latest-rust-1.80 AS chef
WORKDIR /app

LABEL org.opencontainers.image.source=https://github.com/paradigmxyz/reth
LABEL org.opencontainers.image.source=https://github.com/bnb-chain/reth
LABEL org.opencontainers.image.licenses="MIT OR Apache-2.0"

# Builds a cargo-chef plan
Expand Down
1 change: 0 additions & 1 deletion crates/bsc/evm/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

// TODO: doc
#![allow(missing_docs)]
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
// The `bsc` feature must be enabled to use this crate.
#![cfg(feature = "bsc")]
Expand Down
15 changes: 4 additions & 11 deletions crates/chainspec/src/spec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2244,6 +2244,7 @@ Post-merge hard forks (timestamp based):
}

#[test]
#[cfg(not(feature = "bsc"))]
fn test_default_cancun_header_forkhash() {
// set the gas limit from the hive test genesis according to the hash
let genesis = Genesis { gas_limit: 0x2fefd8u128, ..Default::default() };
Expand All @@ -2270,20 +2271,12 @@ Post-merge hard forks (timestamp based):

// check the genesis hash
let genesis_hash = header.hash_slow();
let expected_hash = if cfg!(feature = "bsc") {
// bsc has zero base fee
b256!("8498b49617a74f5750dfe77e025989bd06955a177d255b8fb90ed3ebbe9aaf0f")
} else {
b256!("16bb7c59613a5bad3f7c04a852fd056545ade2483968d9a25a1abb05af0c4d37")
};
let expected_hash =
b256!("16bb7c59613a5bad3f7c04a852fd056545ade2483968d9a25a1abb05af0c4d37");
assert_eq!(genesis_hash, expected_hash);

// check that the forkhash is correct
let expected_forkhash = if cfg!(feature = "bsc") {
ForkHash(hex!("1e7c5080"))
} else {
ForkHash(hex!("8062457a"))
};
let expected_forkhash = ForkHash(hex!("8062457a"));
assert_eq!(ForkHash::from(genesis_hash), expected_forkhash);
}

Expand Down
5 changes: 1 addition & 4 deletions crates/evm/src/execute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,9 @@ pub use reth_storage_errors::provider::ProviderError;

use core::fmt::Display;

use std::collections::HashMap;

use reth_primitives::{BlockNumber, BlockWithSenders, Header, Receipt};
use reth_prune_types::PruneModes;
use revm_primitives::db::Database;
use revm_primitives::EvmState;
use revm_primitives::{db::Database, EvmState};
use tokio::sync::mpsc::UnboundedSender;

/// A general purpose executor trait that executes an input (e.g. block) and produces an output
Expand Down
11 changes: 7 additions & 4 deletions crates/exex/exex/src/backfill/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,13 @@ where

// Execute the block to produce a block execution output
let mut block_execution_output = EthExecutorProvider::ethereum(chain_spec)
.executor(StateProviderDatabase::new(LatestStateProviderRef::new(
provider.tx_ref(),
provider.static_file_provider().clone(),
)))
.executor(
StateProviderDatabase::new(LatestStateProviderRef::new(
provider.tx_ref(),
provider.static_file_provider().clone(),
)),
None,
)
.execute(BlockExecutionInput {
block,
total_difficulty: U256::ZERO,
Expand Down
2 changes: 1 addition & 1 deletion crates/node/core/src/args/pruning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ impl PruningArgs {
.deposit_contract
.as_ref()
.map(|contract| PruneMode::Before(contract.block))
.or(Some(PruneMode::Full)),
.or(Some(PruneMode::Distance(MINIMUM_PRUNING_DISTANCE))),
account_history: Some(PruneMode::Distance(MINIMUM_PRUNING_DISTANCE)),
storage_history: Some(PruneMode::Distance(MINIMUM_PRUNING_DISTANCE)),
receipts_log_filter: ReceiptsLogPruneConfig(
Expand Down
11 changes: 9 additions & 2 deletions crates/node/core/src/args/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,15 @@ use reth_optimism_chainspec::{OPBNB_MAINNET, OPBNB_QA, OPBNB_TESTNET};
pub const SUPPORTED_CHAINS: &[&str] = &["bsc", "bsc-testnet"];
#[cfg(feature = "optimism")]
/// Chains supported by op-reth. First value should be used as the default.
pub const SUPPORTED_CHAINS: &[&str] =
&["optimism", "optimism-sepolia", "base", "base-sepolia", "opbnb-mainnet", "opbnb-testnet", "dev"];
pub const SUPPORTED_CHAINS: &[&str] = &[
"optimism",
"optimism-sepolia",
"base",
"base-sepolia",
"opbnb-mainnet",
"opbnb-testnet",
"dev",
];
#[cfg(all(not(feature = "optimism"), not(feature = "bsc")))]
/// Chains supported by reth. First value should be used as the default.
pub const SUPPORTED_CHAINS: &[&str] = &["mainnet", "sepolia", "holesky", "dev"];
Expand Down
3 changes: 2 additions & 1 deletion crates/optimism/evm/src/execute.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
//! Optimism block executor.

use std::{collections::HashMap, str::FromStr, sync::Arc};

use crate::{l1::ensure_create2_deployer, OptimismBlockExecutionError, OptimismEvmConfig};
use reth_chainspec::{ChainSpec, EthereumHardforks, OptimismHardfork};
use reth_evm::{
Expand All @@ -25,7 +27,6 @@ use revm_primitives::{
db::{Database, DatabaseCommit},
BlockEnv, CfgEnvWithHandlerCfg, EVMError, EnvWithHandlerCfg, EvmState, ResultAndState,
};
use std::{collections::HashMap, str::FromStr, sync::Arc};
use tokio::sync::mpsc::UnboundedSender;
use tracing::{debug, trace};

Expand Down
42 changes: 28 additions & 14 deletions crates/storage/provider/src/providers/database/provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2095,13 +2095,20 @@ impl<TX: DbTx> BlockReader for DatabaseProvider<TX> {
transaction_kind,
|block_number| self.header_by_number(block_number),
|header, body, senders, ommers, withdrawals, requests| {
Block { header, body, ommers, withdrawals, sidecars: Some(Default::default()), requests }
// Note: we're using unchecked here because we know the block contains valid txs
// wrt to its height and can ignore the s value check so pre
// EIP-2 txs are allowed
.try_with_senders_unchecked(senders)
.map(Some)
.map_err(|_| ProviderError::SenderRecoveryError)
Block {
header,
body,
ommers,
withdrawals,
sidecars: Some(Default::default()),
requests,
}
// Note: we're using unchecked here because we know the block contains valid txs
// wrt to its height and can ignore the s value check so pre
// EIP-2 txs are allowed
.try_with_senders_unchecked(senders)
.map(Some)
.map_err(|_| ProviderError::SenderRecoveryError)
},
)
}
Expand All @@ -2116,13 +2123,20 @@ impl<TX: DbTx> BlockReader for DatabaseProvider<TX> {
transaction_kind,
|block_number| self.sealed_header(block_number),
|header, body, senders, ommers, withdrawals, requests| {
SealedBlock { header, body, ommers, withdrawals, sidecars: Some(Default::default()), requests }
// Note: we're using unchecked here because we know the block contains valid txs
// wrt to its height and can ignore the s value check so pre
// EIP-2 txs are allowed
.try_with_senders_unchecked(senders)
.map(Some)
.map_err(|_| ProviderError::SenderRecoveryError)
SealedBlock {
header,
body,
ommers,
withdrawals,
sidecars: Some(Default::default()),
requests,
}
// Note: we're using unchecked here because we know the block contains valid txs
// wrt to its height and can ignore the s value check so pre
// EIP-2 txs are allowed
.try_with_senders_unchecked(senders)
.map(Some)
.map_err(|_| ProviderError::SenderRecoveryError)
},
)
}
Expand Down
6 changes: 3 additions & 3 deletions crates/transaction-pool/src/pool/txpool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1235,7 +1235,7 @@ impl<T: PoolTransaction> AllTransactions<T> {
pub(crate) fn descendant_txs_exclusive<'a, 'b: 'a>(
&'a self,
id: &'b TransactionId,
) -> impl Iterator<Item = (&'a TransactionId, &'a PoolInternalTransaction<T>)> + '_ {
) -> impl Iterator<Item = (&'a TransactionId, &'a PoolInternalTransaction<T>)> + 'a {
self.txs.range((Excluded(id), Unbounded)).take_while(|(other, _)| id.sender == other.sender)
}

Expand All @@ -1246,7 +1246,7 @@ impl<T: PoolTransaction> AllTransactions<T> {
pub(crate) fn descendant_txs_inclusive<'a, 'b: 'a>(
&'a self,
id: &'b TransactionId,
) -> impl Iterator<Item = (&'a TransactionId, &'a PoolInternalTransaction<T>)> + '_ {
) -> impl Iterator<Item = (&'a TransactionId, &'a PoolInternalTransaction<T>)> + 'a {
self.txs.range(id..).take_while(|(other, _)| id.sender == other.sender)
}

Expand All @@ -1257,7 +1257,7 @@ impl<T: PoolTransaction> AllTransactions<T> {
pub(crate) fn descendant_txs_mut<'a, 'b: 'a>(
&'a mut self,
id: &'b TransactionId,
) -> impl Iterator<Item = (&'a TransactionId, &'a mut PoolInternalTransaction<T>)> + '_ {
) -> impl Iterator<Item = (&'a TransactionId, &'a mut PoolInternalTransaction<T>)> + 'a {
self.txs.range_mut(id..).take_while(|(other, _)| id.sender == other.sender)
}

Expand Down
1 change: 1 addition & 0 deletions crates/trie/prefetch/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ workspace = true
reth-primitives.workspace = true
reth-db.workspace = true
reth-trie.workspace = true
reth-trie-db.workspace = true
reth-provider.workspace = true
reth-trie-parallel.workspace = true
reth-tasks.workspace = true
Expand Down
62 changes: 18 additions & 44 deletions crates/trie/prefetch/src/prefetch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use reth_trie::{
walker::TrieWalker,
HashedPostState, HashedStorage, StorageRoot,
};
use reth_trie_db::{DatabaseHashedCursorFactory, DatabaseTrieCursorFactory};
use reth_trie_parallel::{parallel_root::ParallelStateRootError, StorageRootTargets};
use std::{collections::HashMap, sync::Arc};
use thiserror::Error;
Expand Down Expand Up @@ -158,10 +159,14 @@ impl TriePrefetch {
.into_par_iter()
.map(|(hashed_address, prefix_set)| {
let provider_ro = consistent_view.provider_ro()?;

let trie_cursor_factory = DatabaseTrieCursorFactory::new(provider_ro.tx_ref());
let hashed_cursor_factory = HashedPostStateCursorFactory::new(
DatabaseHashedCursorFactory::new(provider_ro.tx_ref()),
&hashed_state_sorted,
);
let storage_root_result = StorageRoot::new_hashed(
provider_ro.tx_ref(),
HashedPostStateCursorFactory::new(provider_ro.tx_ref(), &hashed_state_sorted),
trie_cursor_factory,
hashed_cursor_factory,
hashed_address,
#[cfg(feature = "metrics")]
self.metrics.clone(),
Expand All @@ -175,9 +180,12 @@ impl TriePrefetch {

trace!(target: "trie::trie_prefetch", "prefetching account tries");
let provider_ro = consistent_view.provider_ro()?;
let hashed_cursor_factory =
HashedPostStateCursorFactory::new(provider_ro.tx_ref(), &hashed_state_sorted);
let trie_cursor_factory = provider_ro.tx_ref();
let tx = provider_ro.tx_ref();
let trie_cursor_factory = DatabaseTrieCursorFactory::new(tx);
let hashed_cursor_factory = HashedPostStateCursorFactory::new(
DatabaseHashedCursorFactory::new(tx),
&hashed_state_sorted,
);

let walker = TrieWalker::new(
trie_cursor_factory.account_trie_cursor().map_err(ProviderError::Database)?,
Expand All @@ -200,7 +208,7 @@ impl TriePrefetch {
// Since we do not store all intermediate nodes in the database, there might
// be a possibility of re-adding a non-modified leaf to the hash builder.
None => StorageRoot::new_hashed(
trie_cursor_factory,
trie_cursor_factory.clone(),
hashed_cursor_factory.clone(),
hashed_address,
#[cfg(feature = "metrics")]
Expand Down Expand Up @@ -250,44 +258,10 @@ impl From<TriePrefetchError> for ProviderError {
fn from(error: TriePrefetchError) -> Self {
match error {
TriePrefetchError::Provider(error) => error,
TriePrefetchError::StorageRoot(StorageRootError::DB(error)) => Self::Database(error),
TriePrefetchError::ParallelStateRoot(error) => error.into(),
}
}
}

#[cfg(test)]
mod tests {
use tokio::time;

#[tokio::test]
async fn test_channel() {
let (prefetch_tx, mut prefetch_rx) = tokio::sync::mpsc::unbounded_channel();
let (interrupt_tx, mut interrupt_rx) = tokio::sync::oneshot::channel();

tokio::spawn(async move {
loop {
tokio::select! {
_ = prefetch_rx.recv() => {
println!("got message");
time::sleep(time::Duration::from_secs(3)).await;
}
_ = &mut interrupt_rx => {
println!("left items in channel: {}" ,prefetch_rx.len());
break;
}
}
TriePrefetchError::StorageRoot(StorageRootError::Database(error)) => {
Self::Database(error)
}
});

for _ in 0..10 {
prefetch_tx.send(()).unwrap();
TriePrefetchError::ParallelStateRoot(error) => error.into(),
}

time::sleep(time::Duration::from_secs(3)).await;

interrupt_tx.send(()).unwrap();

time::sleep(time::Duration::from_secs(10)).await;
}
}
Loading

0 comments on commit 69139a8

Please sign in to comment.