diff --git a/Cargo.lock b/Cargo.lock index c35e148ff2ba..c9e4363613b1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2924,6 +2924,7 @@ dependencies = [ "eyre", "forge-doc", "forge-fmt", + "forge-script", "forge-verify", "foundry-block-explorers", "foundry-cli", @@ -3010,6 +3011,46 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "forge-script" +version = "0.2.0" +dependencies = [ + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-primitives", + "alloy-rpc-types", + "async-recursion", + "clap", + "const-hex", + "dialoguer", + "dunce", + "ethers-core", + "ethers-providers", + "ethers-signers", + "eyre", + "forge-verify", + "foundry-cheatcodes", + "foundry-cli", + "foundry-common", + "foundry-compilers", + "foundry-config", + "foundry-debugger", + "foundry-evm", + "foundry-linking", + "foundry-wallets", + "futures", + "indicatif", + "itertools 0.11.0", + "parking_lot", + "revm-inspectors", + "semver 1.0.22", + "serde", + "serde_json", + "tempfile", + "tracing", + "yansi 0.5.1", +] + [[package]] name = "forge-verify" version = "0.2.0" diff --git a/Cargo.toml b/Cargo.toml index e0c94e7466dc..68240fde2657 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -119,6 +119,7 @@ forge = { path = "crates/forge" } forge-doc = { path = "crates/doc" } forge-fmt = { path = "crates/fmt" } forge-verify = { path = "crates/verify" } +forge-script = { path = "crates/script" } foundry-cheatcodes = { path = "crates/cheatcodes" } foundry-cheatcodes-spec = { path = "crates/cheatcodes/spec" } foundry-cli = { path = "crates/cli" } diff --git a/crates/common/src/contracts.rs b/crates/common/src/contracts.rs index a1b251b768dd..2687b93e3784 100644 --- a/crates/common/src/contracts.rs +++ b/crates/common/src/contracts.rs @@ -135,23 +135,6 @@ unsafe fn count_different_bytes(a: &[u8], b: &[u8]) -> usize { sum } -/// Flattens the contracts into (`id` -> (`JsonAbi`, `Vec`)) pairs -pub fn flatten_contracts( - contracts: &BTreeMap, - deployed_code: bool, -) -> ContractsByArtifact { - ContractsByArtifact( - contracts - .iter() - .filter_map(|(id, c)| { - let bytecode = - if deployed_code { c.deployed_bytecode.bytes() } else { c.bytecode.bytes() }; - bytecode.cloned().map(|code| (id.clone(), (c.abi.clone(), code.into()))) - }) - .collect(), - ) -} - /// Artifact/Contract identifier can take the following form: /// `:`, the `artifact file name` is the name of the json file of /// the contract's artifact and the contract name is the name of the solidity contract, like diff --git a/crates/forge/Cargo.toml b/crates/forge/Cargo.toml index 6d4e40f6b97f..0a120f975408 100644 --- a/crates/forge/Cargo.toml +++ b/crates/forge/Cargo.toml @@ -15,7 +15,11 @@ name = "forge" path = "bin/main.rs" [build-dependencies] -vergen = { workspace = true, default-features = false, features = ["build", "git", "gitcl"] } +vergen = { workspace = true, default-features = false, features = [ + "build", + "git", + "gitcl", +] } [dependencies] # lib @@ -47,6 +51,7 @@ yansi = "0.5" forge-doc.workspace = true forge-fmt.workspace = true forge-verify.workspace = true +forge-script.workspace = true foundry-cli.workspace = true foundry-debugger.workspace = true @@ -94,14 +99,25 @@ globset = "0.4" paste = "1.0" path-slash = "0.2" pretty_assertions.workspace = true -svm = { package = "svm-rs", version = "0.3", default-features = false, features = ["rustls"] } +svm = { package = "svm-rs", version = "0.3", default-features = false, features = [ + "rustls", +] } tempfile = "3" tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] } [features] default = ["rustls"] -rustls = ["foundry-cli/rustls", "foundry-wallets/rustls", "reqwest/rustls-tls", "reqwest/rustls-tls-native-roots"] -openssl = ["foundry-cli/openssl", "reqwest/default-tls", "foundry-wallets/openssl"] +rustls = [ + "foundry-cli/rustls", + "foundry-wallets/rustls", + "reqwest/rustls-tls", + "reqwest/rustls-tls-native-roots", +] +openssl = [ + "foundry-cli/openssl", + "reqwest/default-tls", + "foundry-wallets/openssl", +] asm-keccak = ["alloy-primitives/asm-keccak"] [[bench]] diff --git a/crates/forge/bin/cmd/debug.rs b/crates/forge/bin/cmd/debug.rs index 75f27da53919..8fe1d2e32a25 100644 --- a/crates/forge/bin/cmd/debug.rs +++ b/crates/forge/bin/cmd/debug.rs @@ -1,5 +1,5 @@ -use super::{build::BuildArgs, script::ScriptArgs}; use clap::{Parser, ValueHint}; +use forge_script::ScriptArgs; use forge_verify::retry::RETRY_VERIFY_ON_CREATE; use foundry_cli::opts::CoreBuildArgs; use foundry_common::evm::EvmArgs; @@ -48,7 +48,7 @@ impl DebugArgs { target_contract: self.target_contract, sig: self.sig, gas_estimate_multiplier: 130, - opts: BuildArgs { args: self.opts, ..Default::default() }, + opts: self.opts, evm_opts: self.evm_opts, debug: true, retry: RETRY_VERIFY_ON_CREATE, diff --git a/crates/forge/bin/cmd/init.rs b/crates/forge/bin/cmd/init.rs index 9c8c3fc90c11..96144dc63d7e 100644 --- a/crates/forge/bin/cmd/init.rs +++ b/crates/forge/bin/cmd/init.rs @@ -164,11 +164,6 @@ impl InitArgs { } } -/// Returns the commit hash of the project if it exists -pub fn get_commit_hash(root: &Path) -> Option { - Git::new(root).commit_hash(true, "HEAD").ok() -} - /// Initialises `root` as a git repository, if it isn't one already. /// /// Creates `.gitignore` and `.github/workflows/test.yml`, if they don't exist already. diff --git a/crates/forge/bin/cmd/mod.rs b/crates/forge/bin/cmd/mod.rs index 1e1a91cbf9c4..b01366aa7ce8 100644 --- a/crates/forge/bin/cmd/mod.rs +++ b/crates/forge/bin/cmd/mod.rs @@ -56,7 +56,6 @@ pub mod inspect; pub mod install; pub mod remappings; pub mod remove; -pub mod script; pub mod selectors; pub mod snapshot; pub mod test; diff --git a/crates/forge/bin/cmd/script/broadcast.rs b/crates/forge/bin/cmd/script/broadcast.rs deleted file mode 100644 index a7ab056332ad..000000000000 --- a/crates/forge/bin/cmd/script/broadcast.rs +++ /dev/null @@ -1,701 +0,0 @@ -use super::{ - multi::MultiChainSequence, providers::ProvidersManager, receipts::clear_pendings, - sequence::ScriptSequence, transaction::TransactionWithMetadata, verify::VerifyBundle, - NestedValue, ScriptArgs, ScriptConfig, ScriptResult, -}; -use alloy_primitives::{utils::format_units, Address, TxHash, U256}; -use ethers_core::types::transaction::eip2718::TypedTransaction; -use ethers_providers::{JsonRpcClient, Middleware, Provider}; -use ethers_signers::Signer; -use eyre::{bail, Context, ContextCompat, Result}; -use forge::{inspectors::cheatcodes::BroadcastableTransactions, traces::CallTraceDecoder}; -use foundry_cli::{ - init_progress, update_progress, - utils::{has_batch_support, has_different_gas_calc}, -}; -use foundry_common::{ - provider::{ - alloy::RpcUrl, - ethers::{estimate_eip1559_fees, try_get_http_provider, RetryProvider}, - }, - shell, - types::{ToAlloy, ToEthers}, - ContractsByArtifact, -}; -use foundry_compilers::{artifacts::Libraries, ArtifactId}; -use foundry_config::Config; -use foundry_wallets::WalletSigner; -use futures::StreamExt; -use std::{ - cmp::min, - collections::{HashMap, HashSet, VecDeque}, - sync::Arc, -}; - -impl ScriptArgs { - /// Sends the transactions which haven't been broadcasted yet. - pub async fn send_transactions( - &self, - deployment_sequence: &mut ScriptSequence, - fork_url: &str, - signers: &HashMap, - ) -> Result<()> { - let provider = Arc::new(try_get_http_provider(fork_url)?); - let already_broadcasted = deployment_sequence.receipts.len(); - - if already_broadcasted < deployment_sequence.transactions.len() { - let required_addresses: HashSet
= deployment_sequence - .typed_transactions() - .skip(already_broadcasted) - .map(|tx| (*tx.from().expect("No sender for onchain transaction!")).to_alloy()) - .collect(); - - let (send_kind, chain) = if self.unlocked { - let chain = provider.get_chainid().await?; - let mut senders = HashSet::from([self - .evm_opts - .sender - .wrap_err("--sender must be set with --unlocked")?]); - // also take all additional senders that where set manually via broadcast - senders.extend( - deployment_sequence - .typed_transactions() - .filter_map(|tx| tx.from().copied().map(|addr| addr.to_alloy())), - ); - (SendTransactionsKind::Unlocked(senders), chain.as_u64()) - } else { - let mut missing_addresses = Vec::new(); - - println!("\n###\nFinding wallets for all the necessary addresses..."); - for addr in &required_addresses { - if !signers.contains_key(addr) { - missing_addresses.push(addr); - } - } - - if !missing_addresses.is_empty() { - let mut error_msg = String::new(); - - // This is an actual used address - if required_addresses.contains(&Config::DEFAULT_SENDER) { - error_msg += "\nYou seem to be using Foundry's default sender. Be sure to set your own --sender.\n"; - } - - eyre::bail!( - "{}No associated wallet for addresses: {:?}. Unlocked wallets: {:?}", - error_msg, - missing_addresses, - signers.keys().collect::>() - ); - } - - let chain = provider.get_chainid().await?.as_u64(); - - (SendTransactionsKind::Raw(signers), chain) - }; - - // We only wait for a transaction receipt before sending the next transaction, if there - // is more than one signer. There would be no way of assuring their order - // otherwise. Or if the chain does not support batched transactions (eg. Arbitrum). - let sequential_broadcast = - send_kind.signers_count() != 1 || self.slow || !has_batch_support(chain); - - // Make a one-time gas price estimation - let (gas_price, eip1559_fees) = { - match deployment_sequence.transactions.front().unwrap().typed_tx() { - TypedTransaction::Eip1559(_) => { - let fees = estimate_eip1559_fees(&provider, Some(chain)) - .await - .wrap_err("Failed to estimate EIP1559 fees. This chain might not support EIP1559, try adding --legacy to your command.")?; - - (None, Some(fees)) - } - _ => (provider.get_gas_price().await.ok(), None), - } - }; - - // Iterate through transactions, matching the `from` field with the associated - // wallet. Then send the transaction. Panics if we find a unknown `from` - let sequence = deployment_sequence - .transactions - .iter() - .skip(already_broadcasted) - .map(|tx_with_metadata| { - let tx = tx_with_metadata.typed_tx(); - let from = (*tx.from().expect("No sender for onchain transaction!")).to_alloy(); - - let kind = send_kind.for_sender(&from)?; - let is_fixed_gas_limit = tx_with_metadata.is_fixed_gas_limit; - - let mut tx = tx.clone(); - - tx.set_chain_id(chain); - - if let Some(gas_price) = self.with_gas_price { - tx.set_gas_price(gas_price.to_ethers()); - } else { - // fill gas price - match tx { - TypedTransaction::Eip1559(ref mut inner) => { - let eip1559_fees = - eip1559_fees.expect("Could not get eip1559 fee estimation."); - if let Some(priority_gas_price) = self.priority_gas_price { - inner.max_priority_fee_per_gas = - Some(priority_gas_price.to_ethers()); - } else { - inner.max_priority_fee_per_gas = Some(eip1559_fees.1); - } - inner.max_fee_per_gas = Some(eip1559_fees.0); - } - _ => { - tx.set_gas_price(gas_price.expect("Could not get gas_price.")); - } - } - } - - Ok((tx, kind, is_fixed_gas_limit)) - }) - .collect::>>()?; - - let pb = init_progress!(deployment_sequence.transactions, "txes"); - - // We send transactions and wait for receipts in batches of 100, since some networks - // cannot handle more than that. - let batch_size = 100; - let mut index = 0; - - for (batch_number, batch) in sequence.chunks(batch_size).map(|f| f.to_vec()).enumerate() - { - let mut pending_transactions = vec![]; - - shell::println(format!( - "##\nSending transactions [{} - {}].", - batch_number * batch_size, - batch_number * batch_size + min(batch_size, batch.len()) - 1 - ))?; - for (tx, kind, is_fixed_gas_limit) in batch.into_iter() { - let tx_hash = self.send_transaction( - provider.clone(), - tx, - kind, - sequential_broadcast, - fork_url, - is_fixed_gas_limit, - ); - - if sequential_broadcast { - let tx_hash = tx_hash.await?; - deployment_sequence.add_pending(index, tx_hash); - - update_progress!(pb, (index + already_broadcasted)); - index += 1; - - clear_pendings(provider.clone(), deployment_sequence, Some(vec![tx_hash])) - .await?; - } else { - pending_transactions.push(tx_hash); - } - } - - if !pending_transactions.is_empty() { - let mut buffer = futures::stream::iter(pending_transactions).buffered(7); - - while let Some(tx_hash) = buffer.next().await { - let tx_hash = tx_hash?; - deployment_sequence.add_pending(index, tx_hash); - - update_progress!(pb, (index + already_broadcasted)); - index += 1; - } - - // Checkpoint save - deployment_sequence.save()?; - - if !sequential_broadcast { - shell::println("##\nWaiting for receipts.")?; - clear_pendings(provider.clone(), deployment_sequence, None).await?; - } - } - - // Checkpoint save - deployment_sequence.save()?; - } - } - - shell::println("\n\n==========================")?; - shell::println("\nONCHAIN EXECUTION COMPLETE & SUCCESSFUL.")?; - - let (total_gas, total_gas_price, total_paid) = deployment_sequence.receipts.iter().fold( - (U256::ZERO, U256::ZERO, U256::ZERO), - |acc, receipt| { - let gas_used = receipt.gas_used.unwrap_or_default().to_alloy(); - let gas_price = receipt.effective_gas_price.unwrap_or_default().to_alloy(); - (acc.0 + gas_used, acc.1 + gas_price, acc.2 + gas_used * gas_price) - }, - ); - let paid = format_units(total_paid, 18).unwrap_or_else(|_| "N/A".to_string()); - let avg_gas_price = - format_units(total_gas_price / U256::from(deployment_sequence.receipts.len()), 9) - .unwrap_or_else(|_| "N/A".to_string()); - shell::println(format!( - "Total Paid: {} ETH ({} gas * avg {} gwei)", - paid.trim_end_matches('0'), - total_gas, - avg_gas_price.trim_end_matches('0').trim_end_matches('.') - ))?; - - Ok(()) - } - - async fn send_transaction( - &self, - provider: Arc, - mut tx: TypedTransaction, - kind: SendTransactionKind<'_>, - sequential_broadcast: bool, - fork_url: &str, - is_fixed_gas_limit: bool, - ) -> Result { - let from = tx.from().expect("no sender"); - - if sequential_broadcast { - let nonce = forge::next_nonce((*from).to_alloy(), fork_url, None) - .await - .map_err(|_| eyre::eyre!("Not able to query the EOA nonce."))?; - - let tx_nonce = tx.nonce().expect("no nonce"); - if let Ok(tx_nonce) = u64::try_from(tx_nonce.to_alloy()) { - if nonce != tx_nonce { - bail!("EOA nonce changed unexpectedly while sending transactions. Expected {tx_nonce} got {nonce} from provider.") - } - } - } - - match kind { - SendTransactionKind::Unlocked(addr) => { - debug!("sending transaction from unlocked account {:?}: {:?}", addr, tx); - - // Chains which use `eth_estimateGas` are being sent sequentially and require their - // gas to be re-estimated right before broadcasting. - if !is_fixed_gas_limit && - (has_different_gas_calc(provider.get_chainid().await?.as_u64()) || - self.skip_simulation) - { - self.estimate_gas(&mut tx, &provider).await?; - } - - // Submit the transaction - let pending = provider.send_transaction(tx, None).await?; - - Ok(pending.tx_hash().to_alloy()) - } - SendTransactionKind::Raw(signer) => self.broadcast(provider, signer, tx).await, - } - } - - /// Executes the created transactions, and if no error has occurred, broadcasts - /// them. - pub async fn handle_broadcastable_transactions( - &self, - mut result: ScriptResult, - libraries: Libraries, - decoder: &CallTraceDecoder, - mut script_config: ScriptConfig, - verify: VerifyBundle, - signers: &HashMap, - ) -> Result<()> { - if let Some(txs) = result.transactions.take() { - script_config.collect_rpcs(&txs); - script_config.check_multi_chain_constraints(&libraries)?; - script_config.check_shanghai_support().await?; - - if !script_config.missing_rpc { - trace!(target: "script", "creating deployments"); - - let mut deployments = self - .create_script_sequences( - txs, - &result, - &mut script_config, - decoder, - &verify.known_contracts, - ) - .await?; - - if script_config.has_multiple_rpcs() { - trace!(target: "script", "broadcasting multi chain deployment"); - - let multi = MultiChainSequence::new( - deployments.clone(), - &self.sig, - script_config.target_contract(), - &script_config.config, - self.broadcast, - )?; - - if self.broadcast { - self.multi_chain_deployment( - multi, - libraries, - &script_config.config, - verify, - signers, - ) - .await?; - } - } else if self.broadcast { - self.single_deployment( - deployments.first_mut().expect("missing deployment"), - script_config, - libraries, - verify, - signers, - ) - .await?; - } - - if !self.broadcast { - shell::println("\nSIMULATION COMPLETE. To broadcast these transactions, add --broadcast and wallet configuration(s) to the previous command. See forge script --help for more.")?; - } - } else { - shell::println("\nIf you wish to simulate on-chain transactions pass a RPC URL.")?; - } - } - Ok(()) - } - - /// Broadcasts a single chain script. - async fn single_deployment( - &self, - deployment_sequence: &mut ScriptSequence, - script_config: ScriptConfig, - libraries: Libraries, - verify: VerifyBundle, - signers: &HashMap, - ) -> Result<()> { - trace!(target: "script", "broadcasting single chain deployment"); - - if self.verify { - deployment_sequence.verify_preflight_check(&script_config.config, &verify)?; - } - - let rpc = script_config.total_rpcs.into_iter().next().expect("exists; qed"); - - deployment_sequence.add_libraries(libraries); - - self.send_transactions(deployment_sequence, &rpc, signers).await?; - - if self.verify { - return deployment_sequence.verify_contracts(&script_config.config, verify).await; - } - Ok(()) - } - - /// Given the collected transactions it creates a list of [`ScriptSequence`]. List length will - /// be higher than 1, if we're dealing with a multi chain deployment. - /// - /// If `--skip-simulation` is not passed, it will make an onchain simulation of the transactions - /// before adding them to [`ScriptSequence`]. - async fn create_script_sequences( - &self, - txs: BroadcastableTransactions, - script_result: &ScriptResult, - script_config: &mut ScriptConfig, - decoder: &CallTraceDecoder, - known_contracts: &ContractsByArtifact, - ) -> Result> { - if !txs.is_empty() { - let gas_filled_txs = self - .fills_transactions_with_gas(txs, script_config, decoder, known_contracts) - .await?; - - let returns = self.get_returns(&*script_config, &script_result.returned)?; - - return self - .bundle_transactions( - gas_filled_txs, - &script_config.target_contract().clone(), - &mut script_config.config, - returns, - ) - .await; - } else if self.broadcast { - eyre::bail!("No onchain transactions generated in script"); - } - - Ok(vec![]) - } - - /// Takes the collected transactions and executes them locally before converting them to - /// [`TransactionWithMetadata`] with the appropriate gas execution estimation. If - /// `--skip-simulation` is passed, then it will skip the execution. - async fn fills_transactions_with_gas( - &self, - txs: BroadcastableTransactions, - script_config: &ScriptConfig, - decoder: &CallTraceDecoder, - known_contracts: &ContractsByArtifact, - ) -> Result> { - let gas_filled_txs = if self.skip_simulation { - shell::println("\nSKIPPING ON CHAIN SIMULATION.")?; - txs.into_iter() - .map(|btx| { - let mut tx = TransactionWithMetadata::from_tx_request(btx.transaction); - tx.rpc = btx.rpc; - tx - }) - .collect() - } else { - self.onchain_simulation( - txs, - script_config, - decoder, - known_contracts, - ) - .await - .wrap_err("\nTransaction failed when running the on-chain simulation. Check the trace above for more information.")? - }; - Ok(gas_filled_txs) - } - - /// Returns all transactions of the [`TransactionWithMetadata`] type in a list of - /// [`ScriptSequence`]. List length will be higher than 1, if we're dealing with a multi - /// chain deployment. - /// - /// Each transaction will be added with the correct transaction type and gas estimation. - async fn bundle_transactions( - &self, - transactions: VecDeque, - target: &ArtifactId, - config: &mut Config, - returns: HashMap, - ) -> Result> { - // User might be using both "in-code" forks and `--fork-url`. - let last_rpc = &transactions.back().expect("exists; qed").rpc; - let is_multi_deployment = transactions.iter().any(|tx| &tx.rpc != last_rpc); - - let mut total_gas_per_rpc: HashMap = HashMap::new(); - - // Batches sequence of transactions from different rpcs. - let mut new_sequence = VecDeque::new(); - let mut manager = ProvidersManager::default(); - let mut deployments = vec![]; - - // Config is used to initialize the sequence chain, so we need to change when handling a new - // sequence. This makes sure we don't lose the original value. - let original_config_chain = config.chain; - - // Peeking is used to check if the next rpc url is different. If so, it creates a - // [`ScriptSequence`] from all the collected transactions up to this point. - let mut txes_iter = transactions.into_iter().peekable(); - - while let Some(mut tx) = txes_iter.next() { - let tx_rpc = match tx.rpc.clone() { - Some(rpc) => rpc, - None => { - let rpc = self.evm_opts.ensure_fork_url()?.clone(); - // Fills the RPC inside the transaction, if missing one. - tx.rpc = Some(rpc.clone()); - rpc - } - }; - - let provider_info = manager.get_or_init_provider(&tx_rpc, self.legacy).await?; - - // Handles chain specific requirements. - tx.change_type(provider_info.is_legacy); - tx.transaction.set_chain_id(provider_info.chain); - - if !self.skip_simulation { - let typed_tx = tx.typed_tx_mut(); - - if has_different_gas_calc(provider_info.chain) { - trace!("estimating with different gas calculation"); - let gas = *typed_tx.gas().expect("gas is set by simulation."); - - // We are trying to show the user an estimation of the total gas usage. - // - // However, some transactions might depend on previous ones. For - // example, tx1 might deploy a contract that tx2 uses. That - // will result in the following `estimate_gas` call to fail, - // since tx1 hasn't been broadcasted yet. - // - // Not exiting here will not be a problem when actually broadcasting, because - // for chains where `has_different_gas_calc` returns true, - // we await each transaction before broadcasting the next - // one. - if let Err(err) = self.estimate_gas(typed_tx, &provider_info.provider).await { - trace!("gas estimation failed: {err}"); - - // Restore gas value, since `estimate_gas` will remove it. - typed_tx.set_gas(gas); - } - } - - let total_gas = total_gas_per_rpc.entry(tx_rpc.clone()).or_insert(U256::ZERO); - *total_gas += (*typed_tx.gas().expect("gas is set")).to_alloy(); - } - - new_sequence.push_back(tx); - // We only create a [`ScriptSequence`] object when we collect all the rpc related - // transactions. - if let Some(next_tx) = txes_iter.peek() { - if next_tx.rpc == Some(tx_rpc) { - continue; - } - } - - config.chain = Some(provider_info.chain.into()); - let sequence = ScriptSequence::new( - new_sequence, - returns.clone(), - &self.sig, - target, - config, - self.broadcast, - is_multi_deployment, - )?; - - deployments.push(sequence); - - new_sequence = VecDeque::new(); - } - - // Restore previous config chain. - config.chain = original_config_chain; - - if !self.skip_simulation { - // Present gas information on a per RPC basis. - for (rpc, total_gas) in total_gas_per_rpc { - let provider_info = manager.get(&rpc).expect("provider is set."); - - // We don't store it in the transactions, since we want the most updated value. - // Right before broadcasting. - let per_gas = if let Some(gas_price) = self.with_gas_price { - gas_price - } else { - provider_info.gas_price()? - }; - - shell::println("\n==========================")?; - shell::println(format!("\nChain {}", provider_info.chain))?; - - shell::println(format!( - "\nEstimated gas price: {} gwei", - format_units(per_gas, 9) - .unwrap_or_else(|_| "[Could not calculate]".to_string()) - .trim_end_matches('0') - .trim_end_matches('.') - ))?; - shell::println(format!("\nEstimated total gas used for script: {total_gas}"))?; - shell::println(format!( - "\nEstimated amount required: {} ETH", - format_units(total_gas.saturating_mul(per_gas), 18) - .unwrap_or_else(|_| "[Could not calculate]".to_string()) - .trim_end_matches('0') - ))?; - shell::println("\n==========================")?; - } - } - Ok(deployments) - } - - /// Uses the signer to submit a transaction to the network. If it fails, it tries to retrieve - /// the transaction hash that can be used on a later run with `--resume`. - async fn broadcast( - &self, - provider: Arc, - signer: &WalletSigner, - mut legacy_or_1559: TypedTransaction, - ) -> Result { - debug!("sending transaction: {:?}", legacy_or_1559); - - // Chains which use `eth_estimateGas` are being sent sequentially and require their gas - // to be re-estimated right before broadcasting. - if has_different_gas_calc(signer.chain_id()) || self.skip_simulation { - // if already set, some RPC endpoints might simply return the gas value that is - // already set in the request and omit the estimate altogether, so - // we remove it here - let _ = legacy_or_1559.gas_mut().take(); - - self.estimate_gas(&mut legacy_or_1559, &provider).await?; - } - - // Signing manually so we skip `fill_transaction` and its `eth_createAccessList` - // request. - let signature = signer - .sign_transaction(&legacy_or_1559) - .await - .wrap_err("Failed to sign transaction")?; - - // Submit the raw transaction - let pending = provider.send_raw_transaction(legacy_or_1559.rlp_signed(&signature)).await?; - - Ok(pending.tx_hash().to_alloy()) - } - - async fn estimate_gas(&self, tx: &mut TypedTransaction, provider: &Provider) -> Result<()> - where - T: JsonRpcClient, - { - // if already set, some RPC endpoints might simply return the gas value that is already - // set in the request and omit the estimate altogether, so we remove it here - let _ = tx.gas_mut().take(); - - tx.set_gas( - provider - .estimate_gas(tx, None) - .await - .wrap_err_with(|| format!("Failed to estimate gas for tx: {:?}", tx.sighash()))? * - self.gas_estimate_multiplier / - 100, - ); - Ok(()) - } -} - -/// How to send a single transaction -#[derive(Clone)] -enum SendTransactionKind<'a> { - Unlocked(Address), - Raw(&'a WalletSigner), -} - -/// Represents how to send _all_ transactions -enum SendTransactionsKind<'a> { - /// Send via `eth_sendTransaction` and rely on the `from` address being unlocked. - Unlocked(HashSet
), - /// Send a signed transaction via `eth_sendRawTransaction` - Raw(&'a HashMap), -} - -impl SendTransactionsKind<'_> { - /// Returns the [`SendTransactionKind`] for the given address - /// - /// Returns an error if no matching signer is found or the address is not unlocked - fn for_sender(&self, addr: &Address) -> Result> { - match self { - SendTransactionsKind::Unlocked(unlocked) => { - if !unlocked.contains(addr) { - bail!("Sender address {:?} is not unlocked", addr) - } - Ok(SendTransactionKind::Unlocked(*addr)) - } - SendTransactionsKind::Raw(wallets) => { - if let Some(wallet) = wallets.get(addr) { - Ok(SendTransactionKind::Raw(wallet)) - } else { - bail!("No matching signer for {:?} found", addr) - } - } - } - } - - /// How many signers are set - fn signers_count(&self) -> usize { - match self { - SendTransactionsKind::Unlocked(addr) => addr.len(), - SendTransactionsKind::Raw(signers) => signers.len(), - } - } -} diff --git a/crates/forge/bin/cmd/script/build.rs b/crates/forge/bin/cmd/script/build.rs deleted file mode 100644 index a2bdc1490d4b..000000000000 --- a/crates/forge/bin/cmd/script/build.rs +++ /dev/null @@ -1,208 +0,0 @@ -use super::{ScriptArgs, ScriptConfig}; -use alloy_primitives::{Address, Bytes}; -use eyre::{Context, ContextCompat, Result}; -use foundry_cli::utils::get_cached_entry_by_name; -use foundry_common::compile::{self, ContractSources, ProjectCompiler}; -use foundry_compilers::{ - artifacts::{ContractBytecode, ContractBytecodeSome, Libraries}, - cache::SolFilesCache, - contracts::ArtifactContracts, - info::ContractInfo, - ArtifactId, Project, ProjectCompileOutput, -}; -use foundry_linking::{LinkOutput, Linker}; -use std::str::FromStr; - -impl ScriptArgs { - /// Compiles the file or project and the verify metadata. - pub fn compile(&mut self, script_config: &mut ScriptConfig) -> Result { - trace!(target: "script", "compiling script"); - - self.build(script_config) - } - - /// Compiles the file with auto-detection and compiler params. - pub fn build(&mut self, script_config: &mut ScriptConfig) -> Result { - let (project, output) = self.get_project_and_output(script_config)?; - let root = project.root(); - let output = output.with_stripped_file_prefixes(root); - let sources = ContractSources::from_project_output(&output, root)?; - let contracts = output.into_artifacts().collect(); - - let target = self.find_target(&project, &contracts)?.clone(); - script_config.target_contract = Some(target.clone()); - - let libraries = script_config.config.libraries_with_remappings()?; - let linker = Linker::new(project.root(), contracts); - - let (highlevel_known_contracts, libraries, predeploy_libraries) = self.link_script_target( - &linker, - libraries, - script_config.evm_opts.sender, - script_config.sender_nonce, - target.clone(), - )?; - - let contract = highlevel_known_contracts.get(&target).unwrap(); - - Ok(BuildOutput { - project, - linker, - contract: contract.clone(), - highlevel_known_contracts, - libraries, - predeploy_libraries, - sources, - }) - } - - /// Tries to find artifact for the target script contract. - pub fn find_target<'a>( - &self, - project: &Project, - contracts: &'a ArtifactContracts, - ) -> Result<&'a ArtifactId> { - let mut target_fname = dunce::canonicalize(&self.path) - .wrap_err("Couldn't convert contract path to absolute path.")? - .strip_prefix(project.root()) - .wrap_err("Couldn't strip project root from contract path.")? - .to_str() - .wrap_err("Bad path to string.")? - .to_string(); - - let no_target_name = if let Some(target_name) = &self.target_contract { - target_fname = target_fname + ":" + target_name; - false - } else { - true - }; - - let mut target: Option<&ArtifactId> = None; - - for (id, contract) in contracts.iter() { - if no_target_name { - // Match artifact source, and ignore interfaces - if id.source == std::path::Path::new(&target_fname) && - contract.bytecode.as_ref().map_or(false, |b| b.object.bytes_len() > 0) - { - if let Some(target) = target { - // We might have multiple artifacts for the same contract but with different - // solc versions. Their names will have form of {name}.0.X.Y, so we are - // stripping versions off before comparing them. - let target_name = target.name.split('.').next().unwrap(); - let id_name = id.name.split('.').next().unwrap(); - if target_name != id_name { - eyre::bail!("Multiple contracts in the target path. Please specify the contract name with `--tc ContractName`") - } - } - target = Some(id); - } - } else { - let (path, name) = - target_fname.rsplit_once(':').expect("The target specifier is malformed."); - let path = std::path::Path::new(path); - if path == id.source && name == id.name { - target = Some(id); - } - } - } - - target.ok_or_else(|| eyre::eyre!("Could not find target contract: {}", target_fname)) - } - - /// Links script artifact with given libraries or library addresses computed from script sender - /// and nonce. - /// - /// Populates [BuildOutput] with linked target contract, libraries, bytes of libs that need to - /// be predeployed and `highlevel_known_contracts` - set of known fully linked contracts - pub fn link_script_target( - &self, - linker: &Linker, - libraries: Libraries, - sender: Address, - nonce: u64, - target: ArtifactId, - ) -> Result<(ArtifactContracts, Libraries, Vec)> { - let LinkOutput { libs_to_deploy, libraries } = - linker.link_with_nonce_or_address(libraries, sender, nonce, &target)?; - - // Collect all linked contracts with non-empty bytecode - let highlevel_known_contracts = linker - .get_linked_artifacts(&libraries)? - .iter() - .filter_map(|(id, contract)| { - ContractBytecodeSome::try_from(ContractBytecode::from(contract.clone())) - .ok() - .map(|tc| (id.clone(), tc)) - }) - .filter(|(_, tc)| tc.bytecode.object.is_non_empty_bytecode()) - .collect(); - - Ok((highlevel_known_contracts, libraries, libs_to_deploy)) - } - - pub fn get_project_and_output( - &mut self, - script_config: &ScriptConfig, - ) -> Result<(Project, ProjectCompileOutput)> { - let project = script_config.config.project()?; - - let filters = self.opts.skip.clone().unwrap_or_default(); - // We received a valid file path. - // If this file does not exist, `dunce::canonicalize` will - // result in an error and it will be handled below. - if let Ok(target_contract) = dunce::canonicalize(&self.path) { - let output = compile::compile_target_with_filter( - &target_contract, - &project, - self.opts.args.silent, - self.verify, - filters, - )?; - return Ok((project, output)) - } - - if !project.paths.has_input_files() { - eyre::bail!("The project doesn't have any input files. Make sure the `script` directory is configured properly in foundry.toml. Otherwise, provide the path to the file.") - } - - let contract = ContractInfo::from_str(&self.path)?; - self.target_contract = Some(contract.name.clone()); - - // We received `contract_path:contract_name` - if let Some(path) = contract.path { - let path = - dunce::canonicalize(path).wrap_err("Could not canonicalize the target path")?; - let output = compile::compile_target_with_filter( - &path, - &project, - self.opts.args.silent, - self.verify, - filters, - )?; - self.path = path.to_string_lossy().to_string(); - return Ok((project, output)) - } - - // We received `contract_name`, and need to find its file path. - let output = ProjectCompiler::new().compile(&project)?; - let cache = - SolFilesCache::read_joined(&project.paths).wrap_err("Could not open compiler cache")?; - - let (path, _) = get_cached_entry_by_name(&cache, &contract.name) - .wrap_err("Could not find target contract in cache")?; - self.path = path.to_string_lossy().to_string(); - - Ok((project, output)) - } -} - -pub struct BuildOutput { - pub project: Project, - pub contract: ContractBytecodeSome, - pub linker: Linker, - pub highlevel_known_contracts: ArtifactContracts, - pub libraries: Libraries, - pub predeploy_libraries: Vec, - pub sources: ContractSources, -} diff --git a/crates/forge/bin/cmd/script/cmd.rs b/crates/forge/bin/cmd/script/cmd.rs deleted file mode 100644 index f864f29f8bc0..000000000000 --- a/crates/forge/bin/cmd/script/cmd.rs +++ /dev/null @@ -1,382 +0,0 @@ -use super::{ - multi::MultiChainSequence, sequence::ScriptSequence, verify::VerifyBundle, ScriptArgs, - ScriptConfig, ScriptResult, -}; -use crate::cmd::script::{build::BuildOutput, receipts}; -use alloy_primitives::{Address, Bytes}; -use ethers_providers::Middleware; -use ethers_signers::Signer; -use eyre::{OptionExt, Result}; -use forge::traces::CallTraceDecoder; -use foundry_cli::utils::LoadConfig; -use foundry_common::{ - contracts::flatten_contracts, provider::ethers::try_get_http_provider, types::ToAlloy, -}; -use foundry_compilers::{ - artifacts::{ContractBytecodeSome, Libraries}, - contracts::ArtifactContracts, -}; -use foundry_debugger::Debugger; -use foundry_evm::inspectors::cheatcodes::{BroadcastableTransaction, ScriptWallets}; -use foundry_linking::Linker; -use foundry_wallets::WalletSigner; -use std::{collections::HashMap, sync::Arc}; - -/// Helper alias type for the collection of data changed due to the new sender. -type NewSenderChanges = (CallTraceDecoder, Libraries, ArtifactContracts); - -impl ScriptArgs { - /// Executes the script - pub async fn run_script(mut self) -> Result<()> { - trace!(target: "script", "executing script command"); - - let (config, evm_opts) = self.load_config_and_evm_opts_emit_warnings()?; - let mut script_config = ScriptConfig { - // dapptools compatibility - sender_nonce: 1, - config, - evm_opts, - debug: self.debug, - ..Default::default() - }; - - if let Some(sender) = self.maybe_load_private_key()? { - script_config.evm_opts.sender = sender; - } - - if let Some(ref fork_url) = script_config.evm_opts.fork_url { - // when forking, override the sender's nonce to the onchain value - script_config.sender_nonce = - forge::next_nonce(script_config.evm_opts.sender, fork_url, None).await? - } else { - // if not forking, then ignore any pre-deployed library addresses - script_config.config.libraries = Default::default(); - } - - let build_output = self.compile(&mut script_config)?; - - let mut verify = VerifyBundle::new( - &build_output.project, - &script_config.config, - flatten_contracts(&build_output.highlevel_known_contracts, false), - self.retry, - self.verifier.clone(), - ); - - let BuildOutput { - contract, - mut highlevel_known_contracts, - predeploy_libraries, - linker, - sources, - mut libraries, - .. - } = build_output; - - // Execute once with default sender. - let sender = script_config.evm_opts.sender; - - let multi_wallet = self.wallets.get_multi_wallet().await?; - let script_wallets = ScriptWallets::new(multi_wallet, self.evm_opts.sender); - - // We need to execute the script even if just resuming, in case we need to collect private - // keys from the execution. - let mut result = self - .execute( - &mut script_config, - contract, - sender, - &predeploy_libraries, - script_wallets.clone(), - ) - .await?; - - if self.resume || (self.verify && !self.broadcast) { - let signers = script_wallets.into_multi_wallet().into_signers()?; - return self.resume_deployment(script_config, linker, libraries, verify, &signers).await; - } - - let known_contracts = flatten_contracts(&highlevel_known_contracts, true); - let mut decoder = self.decode_traces(&script_config, &mut result, &known_contracts)?; - - if self.debug { - let mut debugger = Debugger::builder() - .debug_arenas(result.debug.as_deref().unwrap_or_default()) - .decoder(&decoder) - .sources(sources) - .breakpoints(result.breakpoints.clone()) - .build(); - debugger.try_run()?; - } - - if let Some((new_traces, updated_libraries, updated_contracts)) = self - .maybe_prepare_libraries( - &mut script_config, - linker, - predeploy_libraries, - &mut result, - script_wallets.clone(), - ) - .await? - { - decoder = new_traces; - highlevel_known_contracts = updated_contracts; - libraries = updated_libraries; - } - - if self.json { - self.show_json(&script_config, &result)?; - } else { - self.show_traces(&script_config, &decoder, &mut result).await?; - } - - verify.known_contracts = flatten_contracts(&highlevel_known_contracts, false); - self.check_contract_sizes(&result, &highlevel_known_contracts)?; - - let signers = script_wallets.into_multi_wallet().into_signers()?; - - self.handle_broadcastable_transactions( - result, - libraries, - &decoder, - script_config, - verify, - &signers, - ) - .await - } - - // In case there are libraries to be deployed, it makes sure that these are added to the list of - // broadcastable transactions with the appropriate sender. - async fn maybe_prepare_libraries( - &mut self, - script_config: &mut ScriptConfig, - linker: Linker, - predeploy_libraries: Vec, - result: &mut ScriptResult, - script_wallets: ScriptWallets, - ) -> Result> { - if let Some(new_sender) = self.maybe_new_sender( - &script_config.evm_opts, - result.transactions.as_ref(), - &predeploy_libraries, - )? { - // We have a new sender, so we need to relink all the predeployed libraries. - let (libraries, highlevel_known_contracts) = self - .rerun_with_new_deployer(script_config, new_sender, result, linker, script_wallets) - .await?; - - // redo traces for the new addresses - let new_traces = self.decode_traces( - &*script_config, - result, - &flatten_contracts(&highlevel_known_contracts, true), - )?; - - return Ok(Some((new_traces, libraries, highlevel_known_contracts))); - } - - // Add predeploy libraries to the list of broadcastable transactions. - let mut lib_deploy = self.create_deploy_transactions( - script_config.evm_opts.sender, - script_config.sender_nonce, - &predeploy_libraries, - &script_config.evm_opts.fork_url, - ); - - if let Some(txs) = &mut result.transactions { - for tx in txs.iter() { - lib_deploy.push_back(BroadcastableTransaction { - rpc: tx.rpc.clone(), - transaction: tx.transaction.clone(), - }); - } - *txs = lib_deploy; - } - - Ok(None) - } - - /// Resumes the deployment and/or verification of the script. - async fn resume_deployment( - &mut self, - script_config: ScriptConfig, - linker: Linker, - libraries: Libraries, - verify: VerifyBundle, - signers: &HashMap, - ) -> Result<()> { - if self.multi { - return self - .multi_chain_deployment( - MultiChainSequence::load( - &script_config.config, - &self.sig, - script_config.target_contract(), - )?, - libraries, - &script_config.config, - verify, - signers, - ) - .await; - } - self.resume_single_deployment( - script_config, - linker, - verify, - signers, - ) - .await - .map_err(|err| { - eyre::eyre!("{err}\n\nIf you were trying to resume or verify a multi chain deployment, add `--multi` to your command invocation.") - }) - } - - /// Resumes the deployment and/or verification of a single RPC script. - async fn resume_single_deployment( - &mut self, - script_config: ScriptConfig, - linker: Linker, - mut verify: VerifyBundle, - signers: &HashMap, - ) -> Result<()> { - trace!(target: "script", "resuming single deployment"); - - let fork_url = script_config - .evm_opts - .fork_url - .as_deref() - .ok_or_else(|| eyre::eyre!("Missing `--fork-url` field."))?; - let provider = Arc::new(try_get_http_provider(fork_url)?); - - let chain = provider.get_chainid().await?.as_u64(); - verify.set_chain(&script_config.config, chain.into()); - - let broadcasted = self.broadcast || self.resume; - let mut deployment_sequence = match ScriptSequence::load( - &script_config.config, - &self.sig, - script_config.target_contract(), - chain, - broadcasted, - ) { - Ok(seq) => seq, - // If the script was simulated, but there was no attempt to broadcast yet, - // try to read the script sequence from the `dry-run/` folder - Err(_) if broadcasted => ScriptSequence::load( - &script_config.config, - &self.sig, - script_config.target_contract(), - chain, - false, - )?, - Err(err) => eyre::bail!(err), - }; - - if self.verify { - deployment_sequence.verify_preflight_check(&script_config.config, &verify)?; - } - - receipts::wait_for_pending(provider, &mut deployment_sequence).await?; - - if self.resume { - self.send_transactions(&mut deployment_sequence, fork_url, signers).await?; - } - - if self.verify { - let target = script_config.target_contract(); - let libraries = Libraries::parse(&deployment_sequence.libraries)? - .with_stripped_file_prefixes(linker.root.as_path()); - // We might have predeployed libraries from the broadcasting, so we need to - // relink the contracts with them, since their mapping is - // not included in the solc cache files. - let (highlevel_known_contracts, _, predeploy_libraries) = self.link_script_target( - &linker, - libraries, - script_config.config.sender, // irrelevant, since we're not creating any - 0, // irrelevant, since we're not creating any - target.clone(), - )?; - - if !predeploy_libraries.is_empty() { - eyre::bail!("Incomplete set of libraries in deployment artifact."); - } - - verify.known_contracts = flatten_contracts(&highlevel_known_contracts, false); - - deployment_sequence.verify_contracts(&script_config.config, verify).await?; - } - - Ok(()) - } - - /// Reruns the execution with a new sender and relinks the libraries accordingly - async fn rerun_with_new_deployer( - &mut self, - script_config: &mut ScriptConfig, - new_sender: Address, - first_run_result: &mut ScriptResult, - linker: Linker, - script_wallets: ScriptWallets, - ) -> Result<(Libraries, ArtifactContracts)> { - // if we had a new sender that requires relinking, we need to - // get the nonce mainnet for accurate addresses for predeploy libs - let nonce = forge::next_nonce( - new_sender, - script_config.evm_opts.fork_url.as_ref().ok_or_else(|| { - eyre::eyre!("You must provide an RPC URL (see --fork-url) when broadcasting.") - })?, - None, - ) - .await?; - script_config.sender_nonce = nonce; - let target = script_config.target_contract(); - - let libraries = script_config.config.libraries_with_remappings()?; - - let (highlevel_known_contracts, libraries, predeploy_libraries) = - self.link_script_target(&linker, libraries, new_sender, nonce, target.clone())?; - - let contract = highlevel_known_contracts - .get(target) - .ok_or_eyre("target not found in linked artifacts")? - .clone(); - - let mut txs = self.create_deploy_transactions( - new_sender, - nonce, - &predeploy_libraries, - &script_config.evm_opts.fork_url, - ); - - let result = self - .execute(script_config, contract, new_sender, &predeploy_libraries, script_wallets) - .await?; - - if let Some(new_txs) = &result.transactions { - for new_tx in new_txs.iter() { - txs.push_back(BroadcastableTransaction { - rpc: new_tx.rpc.clone(), - transaction: new_tx.transaction.clone(), - }); - } - } - - *first_run_result = result; - first_run_result.transactions = Some(txs); - - Ok((libraries, highlevel_known_contracts)) - } - - /// In case the user has loaded *only* one private-key, we can assume that he's using it as the - /// `--sender` - fn maybe_load_private_key(&mut self) -> Result> { - let maybe_sender = self - .wallets - .private_keys()? - .filter(|pks| pks.len() == 1) - .map(|pks| pks.first().unwrap().address().to_alloy()); - Ok(maybe_sender) - } -} diff --git a/crates/forge/bin/cmd/script/executor.rs b/crates/forge/bin/cmd/script/executor.rs deleted file mode 100644 index a78f5b9f451a..000000000000 --- a/crates/forge/bin/cmd/script/executor.rs +++ /dev/null @@ -1,325 +0,0 @@ -use super::{ - artifacts::ArtifactInfo, - runner::{ScriptRunner, SimulationStage}, - transaction::{AdditionalContract, TransactionWithMetadata}, - ScriptArgs, ScriptConfig, ScriptResult, -}; -use alloy_primitives::{Address, Bytes, U256}; -use eyre::{Context, Result}; -use forge::{ - backend::Backend, - executors::ExecutorBuilder, - inspectors::{cheatcodes::BroadcastableTransactions, CheatsConfig}, - traces::{render_trace_arena, CallTraceDecoder}, -}; -use foundry_cli::utils::{ensure_clean_constructor, needs_setup}; -use foundry_common::{get_contract_name, provider::ethers::RpcUrl, shell, ContractsByArtifact}; -use foundry_compilers::artifacts::ContractBytecodeSome; -use foundry_evm::inspectors::cheatcodes::ScriptWallets; -use futures::future::join_all; -use parking_lot::RwLock; -use std::{ - collections::{BTreeMap, HashMap, VecDeque}, - sync::Arc, -}; - -impl ScriptArgs { - /// Locally deploys and executes the contract method that will collect all broadcastable - /// transactions. - pub async fn execute( - &self, - script_config: &mut ScriptConfig, - contract: ContractBytecodeSome, - sender: Address, - predeploy_libraries: &[Bytes], - script_wallets: ScriptWallets, - ) -> Result { - trace!(target: "script", "start executing script"); - - let ContractBytecodeSome { abi, bytecode, .. } = contract; - - let bytecode = bytecode.into_bytes().ok_or_else(|| { - eyre::eyre!("expected fully linked bytecode, found unlinked bytecode") - })?; - - ensure_clean_constructor(&abi)?; - - let mut runner = self - .prepare_runner(script_config, sender, SimulationStage::Local, Some(script_wallets)) - .await?; - let (address, mut result) = runner.setup( - predeploy_libraries, - bytecode, - needs_setup(&abi), - script_config.sender_nonce, - self.broadcast, - script_config.evm_opts.fork_url.is_none(), - )?; - - let (func, calldata) = self.get_method_and_calldata(&abi)?; - script_config.called_function = Some(func); - - // Only call the method if `setUp()` succeeded. - if result.success { - let script_result = runner.script(address, calldata)?; - - result.success &= script_result.success; - result.gas_used = script_result.gas_used; - result.logs.extend(script_result.logs); - result.traces.extend(script_result.traces); - result.debug = script_result.debug; - result.labeled_addresses.extend(script_result.labeled_addresses); - result.returned = script_result.returned; - result.breakpoints = script_result.breakpoints; - - match (&mut result.transactions, script_result.transactions) { - (Some(txs), Some(new_txs)) => { - txs.extend(new_txs); - } - (None, Some(new_txs)) => { - result.transactions = Some(new_txs); - } - _ => {} - } - } - - Ok(result) - } - - /// Simulates onchain state by executing a list of transactions locally and persisting their - /// state. Returns the transactions and any CREATE2 contract address created. - pub async fn onchain_simulation( - &self, - transactions: BroadcastableTransactions, - script_config: &ScriptConfig, - decoder: &CallTraceDecoder, - contracts: &ContractsByArtifact, - ) -> Result> { - trace!(target: "script", "executing onchain simulation"); - - let runners = Arc::new( - self.build_runners(script_config) - .await? - .into_iter() - .map(|(rpc, runner)| (rpc, Arc::new(RwLock::new(runner)))) - .collect::>(), - ); - - if script_config.evm_opts.verbosity > 3 { - println!("=========================="); - println!("Simulated On-chain Traces:\n"); - } - - let address_to_abi: BTreeMap = decoder - .contracts - .iter() - .filter_map(|(addr, contract_id)| { - let contract_name = get_contract_name(contract_id); - if let Ok(Some((_, (abi, code)))) = - contracts.find_by_name_or_identifier(contract_name) - { - let info = ArtifactInfo { - contract_name: contract_name.to_string(), - contract_id: contract_id.to_string(), - abi, - code, - }; - return Some((*addr, info)); - } - None - }) - .collect(); - - let mut final_txs = VecDeque::new(); - - // Executes all transactions from the different forks concurrently. - let futs = transactions - .into_iter() - .map(|transaction| async { - let rpc = transaction.rpc.as_ref().expect("missing broadcastable tx rpc url"); - let mut runner = runners.get(rpc).expect("invalid rpc url").write(); - - let mut tx = transaction.transaction; - let result = runner - .simulate( - tx.from - .expect("transaction doesn't have a `from` address at execution time"), - tx.to, - tx.input.clone().into_input(), - tx.value, - ) - .wrap_err("Internal EVM error during simulation")?; - - if !result.success || result.traces.is_empty() { - return Ok((None, result.traces)); - } - - let created_contracts = result - .traces - .iter() - .flat_map(|(_, traces)| { - traces.nodes().iter().filter_map(|node| { - if node.trace.kind.is_any_create() { - return Some(AdditionalContract { - opcode: node.trace.kind, - address: node.trace.address, - init_code: node.trace.data.clone(), - }); - } - None - }) - }) - .collect(); - - // Simulate mining the transaction if the user passes `--slow`. - if self.slow { - runner.executor.env.block.number += U256::from(1); - } - - let is_fixed_gas_limit = tx.gas.is_some(); - match tx.gas { - // If tx.gas is already set that means it was specified in script - Some(gas) => { - println!("Gas limit was set in script to {gas}"); - } - // We inflate the gas used by the user specified percentage - None => { - let gas = U256::from(result.gas_used * self.gas_estimate_multiplier / 100); - tx.gas = Some(gas); - } - } - - let tx = TransactionWithMetadata::new( - tx, - transaction.rpc, - &result, - &address_to_abi, - decoder, - created_contracts, - is_fixed_gas_limit, - )?; - - eyre::Ok((Some(tx), result.traces)) - }) - .collect::>(); - - let mut abort = false; - for res in join_all(futs).await { - let (tx, traces) = res?; - - // Transaction will be `None`, if execution didn't pass. - if tx.is_none() || script_config.evm_opts.verbosity > 3 { - // Identify all contracts created during the call. - if traces.is_empty() { - eyre::bail!( - "forge script requires tracing enabled to collect created contracts" - ); - } - - for (_, trace) in &traces { - println!("{}", render_trace_arena(trace, decoder).await?); - } - } - - if let Some(tx) = tx { - final_txs.push_back(tx); - } else { - abort = true; - } - } - - if abort { - eyre::bail!("Simulated execution failed.") - } - - Ok(final_txs) - } - - /// Build the multiple runners from different forks. - async fn build_runners( - &self, - script_config: &ScriptConfig, - ) -> Result> { - let sender = script_config.evm_opts.sender; - - if !shell::verbosity().is_silent() { - let n = script_config.total_rpcs.len(); - let s = if n != 1 { "s" } else { "" }; - println!("\n## Setting up {n} EVM{s}."); - } - - let futs = script_config - .total_rpcs - .iter() - .map(|rpc| async { - let mut script_config = script_config.clone(); - script_config.evm_opts.fork_url = Some(rpc.clone()); - let runner = self - .prepare_runner(&mut script_config, sender, SimulationStage::OnChain, None) - .await?; - Ok((rpc.clone(), runner)) - }) - .collect::>(); - - join_all(futs).await.into_iter().collect() - } - - /// Creates the Runner that drives script execution - async fn prepare_runner( - &self, - script_config: &mut ScriptConfig, - sender: Address, - stage: SimulationStage, - script_wallets: Option, - ) -> Result { - trace!("preparing script runner"); - let env = script_config.evm_opts.evm_env().await?; - - // The db backend that serves all the data. - let db = match &script_config.evm_opts.fork_url { - Some(url) => match script_config.backends.get(url) { - Some(db) => db.clone(), - None => { - let fork = script_config.evm_opts.get_fork(&script_config.config, env.clone()); - let backend = Backend::spawn(fork); - script_config.backends.insert(url.clone(), backend.clone()); - backend - } - }, - None => { - // It's only really `None`, when we don't pass any `--fork-url`. And if so, there is - // no need to cache it, since there won't be any onchain simulation that we'd need - // to cache the backend for. - Backend::spawn(script_config.evm_opts.get_fork(&script_config.config, env.clone())) - } - }; - - // We need to enable tracing to decode contract names: local or external. - let mut builder = ExecutorBuilder::new() - .inspectors(|stack| stack.trace(true)) - .spec(script_config.config.evm_spec_id()) - .gas_limit(script_config.evm_opts.gas_limit()); - - if let SimulationStage::Local = stage { - builder = builder.inspectors(|stack| { - stack - .debug(self.debug) - .cheatcodes( - CheatsConfig::new( - &script_config.config, - script_config.evm_opts.clone(), - script_wallets, - ) - .into(), - ) - .enable_isolation(script_config.evm_opts.isolate) - }); - } - - Ok(ScriptRunner::new( - builder.build(env, db), - script_config.evm_opts.initial_balance, - sender, - )) - } -} diff --git a/crates/forge/bin/cmd/script/multi.rs b/crates/forge/bin/cmd/script/multi.rs deleted file mode 100644 index 874dd24ba636..000000000000 --- a/crates/forge/bin/cmd/script/multi.rs +++ /dev/null @@ -1,240 +0,0 @@ -use super::{ - receipts, - sequence::{sig_to_file_name, ScriptSequence, SensitiveScriptSequence, DRY_RUN_DIR}, - verify::VerifyBundle, - ScriptArgs, -}; -use alloy_primitives::Address; -use eyre::{ContextCompat, Report, Result, WrapErr}; -use foundry_cli::utils::now; -use foundry_common::{fs, provider::ethers::get_http_provider}; -use foundry_compilers::{artifacts::Libraries, ArtifactId}; -use foundry_config::Config; -use foundry_wallets::WalletSigner; -use futures::future::join_all; -use serde::{Deserialize, Serialize}; -use std::{ - collections::HashMap, - io::{BufWriter, Write}, - path::{Path, PathBuf}, - sync::Arc, -}; - -/// Holds the sequences of multiple chain deployments. -#[derive(Clone, Default, Serialize, Deserialize)] -pub struct MultiChainSequence { - pub deployments: Vec, - #[serde(skip)] - pub path: PathBuf, - #[serde(skip)] - pub sensitive_path: PathBuf, - pub timestamp: u64, -} - -/// Sensitive values from script sequences. -#[derive(Clone, Default, Serialize, Deserialize)] -pub struct SensitiveMultiChainSequence { - pub deployments: Vec, -} - -fn to_sensitive(sequence: &mut MultiChainSequence) -> SensitiveMultiChainSequence { - SensitiveMultiChainSequence { - deployments: sequence.deployments.iter_mut().map(|sequence| sequence.into()).collect(), - } -} - -impl Drop for MultiChainSequence { - fn drop(&mut self) { - self.deployments.iter_mut().for_each(|sequence| sequence.sort_receipts()); - self.save().expect("could not save multi deployment sequence"); - } -} - -impl MultiChainSequence { - pub fn new( - deployments: Vec, - sig: &str, - target: &ArtifactId, - config: &Config, - broadcasted: bool, - ) -> Result { - let (path, sensitive_path) = MultiChainSequence::get_paths( - &config.broadcast, - &config.cache_path, - sig, - target, - broadcasted, - )?; - - Ok(MultiChainSequence { deployments, path, sensitive_path, timestamp: now().as_secs() }) - } - - /// Gets paths in the formats - /// ./broadcast/multi/contract_filename[-timestamp]/sig.json and - /// ./cache/multi/contract_filename[-timestamp]/sig.json - pub fn get_paths( - broadcast: &Path, - cache: &Path, - sig: &str, - target: &ArtifactId, - broadcasted: bool, - ) -> Result<(PathBuf, PathBuf)> { - let mut broadcast = broadcast.to_path_buf(); - let mut cache = cache.to_path_buf(); - let mut common = PathBuf::new(); - - common.push("multi"); - - if !broadcasted { - common.push(DRY_RUN_DIR); - } - - let target_fname = target - .source - .file_name() - .wrap_err_with(|| format!("No filename for {:?}", target.source))? - .to_string_lossy(); - - common.push(format!("{target_fname}-latest")); - - broadcast.push(common.clone()); - cache.push(common); - - fs::create_dir_all(&broadcast)?; - fs::create_dir_all(&cache)?; - - let filename = format!("{}.json", sig_to_file_name(sig)); - - broadcast.push(filename.clone()); - cache.push(filename); - - Ok((broadcast, cache)) - } - - /// Loads the sequences for the multi chain deployment. - pub fn load(config: &Config, sig: &str, target: &ArtifactId) -> Result { - let (path, sensitive_path) = MultiChainSequence::get_paths( - &config.broadcast, - &config.cache_path, - sig, - target, - true, - )?; - let mut sequence: MultiChainSequence = foundry_compilers::utils::read_json_file(&path) - .wrap_err("Multi-chain deployment not found.")?; - let sensitive_sequence: SensitiveMultiChainSequence = - foundry_compilers::utils::read_json_file(&sensitive_path) - .wrap_err("Multi-chain deployment sensitive details not found.")?; - - sequence.deployments.iter_mut().enumerate().for_each(|(i, sequence)| { - sequence.fill_sensitive(&sensitive_sequence.deployments[i]); - }); - - sequence.path = path; - sequence.sensitive_path = sensitive_path; - - Ok(sequence) - } - - /// Saves the transactions as file if it's a standalone deployment. - pub fn save(&mut self) -> Result<()> { - self.timestamp = now().as_secs(); - - let sensitive_sequence: SensitiveMultiChainSequence = to_sensitive(self); - - // broadcast writes - //../Contract-latest/run.json - let mut writer = BufWriter::new(fs::create_file(&self.path)?); - serde_json::to_writer_pretty(&mut writer, &self)?; - writer.flush()?; - - //../Contract-[timestamp]/run.json - let path = self.path.to_string_lossy(); - let file = PathBuf::from(&path.replace("-latest", &format!("-{}", self.timestamp))); - fs::create_dir_all(file.parent().unwrap())?; - fs::copy(&self.path, &file)?; - - // cache writes - //../Contract-latest/run.json - let mut writer = BufWriter::new(fs::create_file(&self.sensitive_path)?); - serde_json::to_writer_pretty(&mut writer, &sensitive_sequence)?; - writer.flush()?; - - //../Contract-[timestamp]/run.json - let path = self.sensitive_path.to_string_lossy(); - let file = PathBuf::from(&path.replace("-latest", &format!("-{}", self.timestamp))); - fs::create_dir_all(file.parent().unwrap())?; - fs::copy(&self.sensitive_path, &file)?; - - println!("\nTransactions saved to: {}\n", self.path.display()); - println!("Sensitive details saved to: {}\n", self.sensitive_path.display()); - - Ok(()) - } -} - -impl ScriptArgs { - /// Given a [`MultiChainSequence`] with multiple sequences of different chains, it executes them - /// all in parallel. Supports `--resume` and `--verify`. - pub async fn multi_chain_deployment( - &self, - mut deployments: MultiChainSequence, - libraries: Libraries, - config: &Config, - verify: VerifyBundle, - signers: &HashMap, - ) -> Result<()> { - if !libraries.is_empty() { - eyre::bail!("Libraries are currently not supported on multi deployment setups."); - } - - if self.verify { - for sequence in &deployments.deployments { - sequence.verify_preflight_check(config, &verify)?; - } - } - - if self.resume { - trace!(target: "script", "resuming multi chain deployment"); - - let futs = deployments - .deployments - .iter_mut() - .map(|sequence| async move { - let rpc_url = sequence.rpc_url().unwrap(); - let provider = Arc::new(get_http_provider(rpc_url)); - receipts::wait_for_pending(provider, sequence).await - }) - .collect::>(); - - let errors = - join_all(futs).await.into_iter().filter(|res| res.is_err()).collect::>(); - - if !errors.is_empty() { - return Err(eyre::eyre!("{errors:?}")); - } - } - - trace!(target: "script", "broadcasting multi chain deployments"); - - let mut results: Vec> = Vec::new(); - - for sequence in deployments.deployments.iter_mut() { - let rpc_url = sequence.rpc_url().unwrap().to_string(); - let result = match self.send_transactions(sequence, &rpc_url, signers).await { - Ok(_) if self.verify => sequence.verify_contracts(config, verify.clone()).await, - Ok(_) => Ok(()), - Err(err) => Err(err), - }; - results.push(result); - } - - let errors = results.into_iter().filter(|res| res.is_err()).collect::>(); - - if !errors.is_empty() { - return Err(eyre::eyre!("{errors:?}")); - } - - Ok(()) - } -} diff --git a/crates/forge/bin/main.rs b/crates/forge/bin/main.rs index 5fdc7c408a83..0743d69e61aa 100644 --- a/crates/forge/bin/main.rs +++ b/crates/forge/bin/main.rs @@ -31,7 +31,7 @@ fn main() -> Result<()> { ForgeSubcommand::Script(cmd) => { // install the shell before executing the command foundry_common::shell::set_shell(foundry_common::shell::Shell::from_args( - cmd.opts.args.silent, + cmd.opts.silent, cmd.json, ))?; utils::block_on(cmd.run_script()) diff --git a/crates/forge/bin/opts.rs b/crates/forge/bin/opts.rs index 5e5cfda7c8b6..03ed4d551f4b 100644 --- a/crates/forge/bin/opts.rs +++ b/crates/forge/bin/opts.rs @@ -2,9 +2,10 @@ use crate::cmd::{ bind::BindArgs, build::BuildArgs, cache::CacheArgs, config, coverage, create::CreateArgs, debug::DebugArgs, doc::DocArgs, flatten, fmt::FmtArgs, geiger, generate, init::InitArgs, inspect, install::InstallArgs, remappings::RemappingArgs, remove::RemoveArgs, - script::ScriptArgs, selectors::SelectorsSubcommands, snapshot, test, tree, update, + selectors::SelectorsSubcommands, snapshot, test, tree, update, }; use clap::{Parser, Subcommand, ValueHint}; +use forge_script::ScriptArgs; use forge_verify::{VerifyArgs, VerifyCheckArgs}; use std::path::PathBuf; diff --git a/crates/forge/src/lib.rs b/crates/forge/src/lib.rs index 39854abac8a1..f6b9a54a8e68 100644 --- a/crates/forge/src/lib.rs +++ b/crates/forge/src/lib.rs @@ -207,23 +207,3 @@ impl TestOptionsBuilder { TestOptions::new(output, root, profiles, base_fuzz, base_invariant) } } - -mod utils2 { - use alloy_primitives::Address; - use ethers_core::types::BlockId; - use ethers_providers::{Middleware, Provider}; - use eyre::Context; - use foundry_common::types::{ToAlloy, ToEthers}; - - pub async fn next_nonce( - caller: Address, - provider_url: &str, - block: Option, - ) -> eyre::Result { - let provider = Provider::try_from(provider_url) - .wrap_err_with(|| format!("bad fork_url provider: {provider_url}"))?; - let res = provider.get_transaction_count(caller.to_ethers(), block).await?.to_alloy(); - res.try_into().map_err(Into::into) - } -} -pub use utils2::*; diff --git a/crates/forge/tests/cli/multi_script.rs b/crates/forge/tests/cli/multi_script.rs index d6f7628da169..121fa986269a 100644 --- a/crates/forge/tests/cli/multi_script.rs +++ b/crates/forge/tests/cli/multi_script.rs @@ -61,6 +61,5 @@ forgetest_async!(can_resume_multi_chain_script, |prj, cmd| { .broadcast(ScriptOutcome::MissingWallet) .load_private_keys(&[0, 1]) .await - .arg("--multi") .resume(ScriptOutcome::OkBroadcast); }); diff --git a/crates/forge/tests/cli/script.rs b/crates/forge/tests/cli/script.rs index b45ce52cf445..e212683c1a2b 100644 --- a/crates/forge/tests/cli/script.rs +++ b/crates/forge/tests/cli/script.rs @@ -1080,6 +1080,28 @@ interface Interface {} assert!(cmd.stdout_lossy().contains("Script ran successfully.")); }); +forgetest_async!(assert_can_detect_unlinked_target_with_libraries, |prj, cmd| { + let script = prj + .add_script( + "ScriptWithExtLib.s.sol", + r#" +library Lib { + function f() public {} +} + +contract Script { + function run() external { + Lib.f(); + } +} + "#, + ) + .unwrap(); + + cmd.arg("script").arg(script); + assert!(cmd.stdout_lossy().contains("Script ran successfully.")); +}); + forgetest_async!(assert_can_resume_with_additional_contracts, |prj, cmd| { let (_api, handle) = spawn(NodeConfig::test()).await; let mut tester = ScriptTester::new_broadcast(cmd, &handle.http_endpoint(), prj.root()); diff --git a/crates/script/Cargo.toml b/crates/script/Cargo.toml new file mode 100644 index 000000000000..5ae57482200e --- /dev/null +++ b/crates/script/Cargo.toml @@ -0,0 +1,50 @@ +[package] +name = "forge-script" +description = "Solidity scripting" + +version.workspace = true +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +forge-verify.workspace = true +foundry-cli.workspace = true +foundry-config.workspace = true +foundry-common.workspace = true +foundry-evm.workspace = true +foundry-debugger.workspace = true +foundry-cheatcodes.workspace = true +foundry-wallets.workspace = true +foundry-linking.workspace = true + +hex.workspace = true +serde.workspace = true +eyre.workspace = true +serde_json.workspace = true +dunce = "1" +foundry-compilers = { workspace = true, features = ["full"] } +tracing.workspace = true +clap = { version = "4", features = ["derive", "env", "unicode", "wrap_help"] } +semver = "1" +futures = "0.3" +async-recursion = "1.0.5" +alloy-primitives.workspace = true +alloy-dyn-abi.workspace = true +itertools.workspace = true +parking_lot = "0.12" +yansi = "0.5" +ethers-core.workspace = true +ethers-providers.workspace = true +ethers-signers.workspace = true +revm-inspectors.workspace = true +alloy-rpc-types.workspace = true +alloy-json-abi.workspace = true +dialoguer = { version = "0.11", default-features = false } +indicatif = "0.17" + +[dev-dependencies] +tempfile = "3" \ No newline at end of file diff --git a/crates/forge/bin/cmd/script/artifacts.rs b/crates/script/src/artifacts.rs similarity index 100% rename from crates/forge/bin/cmd/script/artifacts.rs rename to crates/script/src/artifacts.rs diff --git a/crates/script/src/broadcast.rs b/crates/script/src/broadcast.rs new file mode 100644 index 000000000000..fb21276c2838 --- /dev/null +++ b/crates/script/src/broadcast.rs @@ -0,0 +1,431 @@ +use crate::{ + build::LinkedBuildData, + execute::{ExecutionArtifacts, ExecutionData}, + sequence::ScriptSequenceKind, + verify::BroadcastedState, + ScriptArgs, ScriptConfig, +}; + +use super::receipts; +use alloy_primitives::{utils::format_units, Address, TxHash, U256}; +use ethers_core::types::{transaction::eip2718::TypedTransaction, BlockId}; +use ethers_providers::{JsonRpcClient, Middleware, Provider}; +use ethers_signers::Signer; +use eyre::{bail, Context, Result}; +use forge_verify::provider::VerificationProviderType; +use foundry_cheatcodes::ScriptWallets; +use foundry_cli::{ + init_progress, update_progress, + utils::{has_batch_support, has_different_gas_calc}, +}; +use foundry_common::{ + provider::ethers::{ + estimate_eip1559_fees, get_http_provider, try_get_http_provider, RetryProvider, + }, + shell, + types::{ToAlloy, ToEthers}, +}; +use foundry_config::Config; +use foundry_wallets::WalletSigner; +use futures::{future::join_all, StreamExt}; +use itertools::Itertools; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + +pub async fn estimate_gas( + tx: &mut TypedTransaction, + provider: &Provider, + estimate_multiplier: u64, +) -> Result<()> +where + T: JsonRpcClient, +{ + // if already set, some RPC endpoints might simply return the gas value that is already + // set in the request and omit the estimate altogether, so we remove it here + let _ = tx.gas_mut().take(); + + tx.set_gas( + provider + .estimate_gas(tx, None) + .await + .wrap_err_with(|| format!("Failed to estimate gas for tx: {:?}", tx.sighash()))? * + estimate_multiplier / + 100, + ); + Ok(()) +} + +pub async fn next_nonce( + caller: Address, + provider_url: &str, + block: Option, +) -> eyre::Result { + let provider = Provider::try_from(provider_url) + .wrap_err_with(|| format!("bad fork_url provider: {provider_url}"))?; + let res = provider.get_transaction_count(caller.to_ethers(), block).await?.to_alloy(); + res.try_into().map_err(Into::into) +} + +pub async fn send_transaction( + provider: Arc, + mut tx: TypedTransaction, + kind: SendTransactionKind<'_>, + sequential_broadcast: bool, + is_fixed_gas_limit: bool, + estimate_via_rpc: bool, + estimate_multiplier: u64, +) -> Result { + let from = tx.from().expect("no sender"); + + if sequential_broadcast { + let nonce = provider.get_transaction_count(*from, None).await?; + + let tx_nonce = tx.nonce().expect("no nonce"); + if nonce != *tx_nonce { + bail!("EOA nonce changed unexpectedly while sending transactions. Expected {tx_nonce} got {nonce} from provider.") + } + } + + // Chains which use `eth_estimateGas` are being sent sequentially and require their + // gas to be re-estimated right before broadcasting. + if !is_fixed_gas_limit && estimate_via_rpc { + estimate_gas(&mut tx, &provider, estimate_multiplier).await?; + } + + let pending = match kind { + SendTransactionKind::Unlocked(addr) => { + debug!("sending transaction from unlocked account {:?}: {:?}", addr, tx); + + // Submit the transaction + provider.send_transaction(tx, None).await? + } + SendTransactionKind::Raw(signer) => { + debug!("sending transaction: {:?}", tx); + + // Signing manually so we skip `fill_transaction` and its `eth_createAccessList` + // request. + let signature = + signer.sign_transaction(&tx).await.wrap_err("Failed to sign transaction")?; + + // Submit the raw transaction + provider.send_raw_transaction(tx.rlp_signed(&signature)).await? + } + }; + + Ok(pending.tx_hash().to_alloy()) +} + +/// How to send a single transaction +#[derive(Clone)] +pub enum SendTransactionKind<'a> { + Unlocked(Address), + Raw(&'a WalletSigner), +} + +/// Represents how to send _all_ transactions +pub enum SendTransactionsKind { + /// Send via `eth_sendTransaction` and rely on the `from` address being unlocked. + Unlocked(HashSet
), + /// Send a signed transaction via `eth_sendRawTransaction` + Raw(HashMap), +} + +impl SendTransactionsKind { + /// Returns the [`SendTransactionKind`] for the given address + /// + /// Returns an error if no matching signer is found or the address is not unlocked + pub fn for_sender(&self, addr: &Address) -> Result> { + match self { + SendTransactionsKind::Unlocked(unlocked) => { + if !unlocked.contains(addr) { + bail!("Sender address {:?} is not unlocked", addr) + } + Ok(SendTransactionKind::Unlocked(*addr)) + } + SendTransactionsKind::Raw(wallets) => { + if let Some(wallet) = wallets.get(addr) { + Ok(SendTransactionKind::Raw(wallet)) + } else { + bail!("No matching signer for {:?} found", addr) + } + } + } + } + + /// How many signers are set + pub fn signers_count(&self) -> usize { + match self { + SendTransactionsKind::Unlocked(addr) => addr.len(), + SendTransactionsKind::Raw(signers) => signers.len(), + } + } +} + +/// State after we have bundled all [TransactionWithMetadata] objects into a single +/// [ScriptSequenceKind] object containing one or more script sequences. +pub struct BundledState { + pub args: ScriptArgs, + pub script_config: ScriptConfig, + pub script_wallets: ScriptWallets, + pub build_data: LinkedBuildData, + pub execution_data: ExecutionData, + pub execution_artifacts: ExecutionArtifacts, + pub sequence: ScriptSequenceKind, +} + +impl BundledState { + pub async fn wait_for_pending(mut self) -> Result { + let futs = self + .sequence + .sequences_mut() + .iter_mut() + .map(|sequence| async move { + let rpc_url = sequence.rpc_url(); + let provider = Arc::new(get_http_provider(rpc_url)); + receipts::wait_for_pending(provider, sequence).await + }) + .collect::>(); + + let errors = join_all(futs).await.into_iter().filter_map(Result::err).collect::>(); + + self.sequence.save(true, false)?; + + if !errors.is_empty() { + return Err(eyre::eyre!("{}", errors.iter().format("\n"))); + } + + Ok(self) + } + + /// Broadcasts transactions from all sequences. + pub async fn broadcast(mut self) -> Result { + let required_addresses = self + .sequence + .sequences() + .iter() + .flat_map(|sequence| { + sequence + .typed_transactions() + .map(|tx| (*tx.from().expect("No sender for onchain transaction!")).to_alloy()) + }) + .collect::>(); + + if required_addresses.contains(&Config::DEFAULT_SENDER) { + eyre::bail!( + "You seem to be using Foundry's default sender. Be sure to set your own --sender." + ); + } + + let send_kind = if self.args.unlocked { + SendTransactionsKind::Unlocked(required_addresses) + } else { + let signers = self.script_wallets.into_multi_wallet().into_signers()?; + let mut missing_addresses = Vec::new(); + + for addr in &required_addresses { + if !signers.contains_key(addr) { + missing_addresses.push(addr); + } + } + + if !missing_addresses.is_empty() { + eyre::bail!( + "No associated wallet for addresses: {:?}. Unlocked wallets: {:?}", + missing_addresses, + signers.keys().collect::>() + ); + } + + SendTransactionsKind::Raw(signers) + }; + + for i in 0..self.sequence.sequences().len() { + let mut sequence = self.sequence.sequences_mut().get_mut(i).unwrap(); + + let provider = Arc::new(try_get_http_provider(sequence.rpc_url())?); + let already_broadcasted = sequence.receipts.len(); + + if already_broadcasted < sequence.transactions.len() { + // Make a one-time gas price estimation + let (gas_price, eip1559_fees) = match self.args.with_gas_price { + None => match sequence.transactions.front().unwrap().typed_tx() { + TypedTransaction::Eip1559(_) => { + let mut fees = estimate_eip1559_fees(&provider, Some(sequence.chain)) + .await + .wrap_err("Failed to estimate EIP1559 fees. This chain might not support EIP1559, try adding --legacy to your command.")?; + + if let Some(priority_gas_price) = self.args.priority_gas_price { + fees.1 = priority_gas_price.to_ethers(); + } + + (None, Some(fees)) + } + _ => (provider.get_gas_price().await.ok(), None), + }, + Some(gas_price) => (Some(gas_price.to_ethers()), None), + }; + + // Iterate through transactions, matching the `from` field with the associated + // wallet. Then send the transaction. Panics if we find a unknown `from` + let transactions = sequence + .transactions + .iter() + .skip(already_broadcasted) + .map(|tx_with_metadata| { + let tx = tx_with_metadata.typed_tx(); + let from = + (*tx.from().expect("No sender for onchain transaction!")).to_alloy(); + + let kind = send_kind.for_sender(&from)?; + let is_fixed_gas_limit = tx_with_metadata.is_fixed_gas_limit; + + let mut tx = tx.clone(); + + tx.set_chain_id(sequence.chain); + + if let Some(gas_price) = gas_price { + tx.set_gas_price(gas_price); + } else { + let eip1559_fees = eip1559_fees.expect("was set above"); + // fill gas price + match tx { + TypedTransaction::Eip1559(ref mut inner) => { + inner.max_priority_fee_per_gas = Some(eip1559_fees.1); + inner.max_fee_per_gas = Some(eip1559_fees.0); + } + _ => { + // If we're here, it means that first transaction of the + // sequence was EIP1559 transaction (see match statement above), + // however, we can only have transactions of the same type in + // the sequence. + unreachable!() + } + } + } + + Ok((tx, kind, is_fixed_gas_limit)) + }) + .collect::>>()?; + + let estimate_via_rpc = + has_different_gas_calc(sequence.chain) || self.args.skip_simulation; + + // We only wait for a transaction receipt before sending the next transaction, if + // there is more than one signer. There would be no way of assuring + // their order otherwise. + // Or if the chain does not support batched transactions (eg. Arbitrum). + // Or if we need to invoke eth_estimateGas before sending transactions. + let sequential_broadcast = estimate_via_rpc || + self.args.slow || + send_kind.signers_count() != 1 || + !has_batch_support(sequence.chain); + + let pb = init_progress!(transactions, "txes"); + + // We send transactions and wait for receipts in batches of 100, since some networks + // cannot handle more than that. + let batch_size = if sequential_broadcast { 1 } else { 100 }; + let mut index = already_broadcasted; + + for (batch_number, batch) in + transactions.chunks(batch_size).map(|f| f.to_vec()).enumerate() + { + let mut pending_transactions = vec![]; + + shell::println(format!( + "##\nSending transactions [{} - {}].", + batch_number * batch_size, + batch_number * batch_size + std::cmp::min(batch_size, batch.len()) - 1 + ))?; + for (tx, kind, is_fixed_gas_limit) in batch.into_iter() { + let tx_hash = send_transaction( + provider.clone(), + tx, + kind, + sequential_broadcast, + is_fixed_gas_limit, + estimate_via_rpc, + self.args.gas_estimate_multiplier, + ); + pending_transactions.push(tx_hash); + } + + if !pending_transactions.is_empty() { + let mut buffer = futures::stream::iter(pending_transactions).buffered(7); + + while let Some(tx_hash) = buffer.next().await { + let tx_hash = tx_hash.wrap_err("Failed to send transaction")?; + sequence.add_pending(index, tx_hash); + + // Checkpoint save + self.sequence.save(true, false)?; + sequence = self.sequence.sequences_mut().get_mut(i).unwrap(); + + update_progress!(pb, index - already_broadcasted); + index += 1; + } + + // Checkpoint save + self.sequence.save(true, false)?; + sequence = self.sequence.sequences_mut().get_mut(i).unwrap(); + + shell::println("##\nWaiting for receipts.")?; + receipts::clear_pendings(provider.clone(), sequence, None).await?; + } + // Checkpoint save + self.sequence.save(true, false)?; + sequence = self.sequence.sequences_mut().get_mut(i).unwrap(); + } + } + + shell::println("\n\n==========================")?; + shell::println("\nONCHAIN EXECUTION COMPLETE & SUCCESSFUL.")?; + + let (total_gas, total_gas_price, total_paid) = sequence.receipts.iter().fold( + (U256::ZERO, U256::ZERO, U256::ZERO), + |acc, receipt| { + let gas_used = receipt.gas_used.unwrap_or_default().to_alloy(); + let gas_price = receipt.effective_gas_price.unwrap_or_default().to_alloy(); + (acc.0 + gas_used, acc.1 + gas_price, acc.2 + gas_used * gas_price) + }, + ); + let paid = format_units(total_paid, 18).unwrap_or_else(|_| "N/A".to_string()); + let avg_gas_price = + format_units(total_gas_price / U256::from(sequence.receipts.len()), 9) + .unwrap_or_else(|_| "N/A".to_string()); + + shell::println(format!( + "Total Paid: {} ETH ({} gas * avg {} gwei)", + paid.trim_end_matches('0'), + total_gas, + avg_gas_price.trim_end_matches('0').trim_end_matches('.') + ))?; + } + + Ok(BroadcastedState { + args: self.args, + script_config: self.script_config, + build_data: self.build_data, + execution_data: self.execution_data, + execution_artifacts: self.execution_artifacts, + sequence: self.sequence, + }) + } + + pub fn verify_preflight_check(&self) -> Result<()> { + for sequence in self.sequence.sequences() { + if self.args.verifier.verifier == VerificationProviderType::Etherscan && + self.script_config + .config + .get_etherscan_api_key(Some(sequence.chain.into())) + .is_none() + { + eyre::bail!("Missing etherscan key for chain {}", sequence.chain); + } + } + + Ok(()) + } +} diff --git a/crates/script/src/build.rs b/crates/script/src/build.rs new file mode 100644 index 000000000000..4dc78b0cdc56 --- /dev/null +++ b/crates/script/src/build.rs @@ -0,0 +1,248 @@ +use crate::{execute::LinkedState, ScriptArgs, ScriptConfig}; + +use alloy_primitives::{Address, Bytes}; +use eyre::{Context, OptionExt, Result}; +use foundry_cheatcodes::ScriptWallets; +use foundry_cli::utils::get_cached_entry_by_name; +use foundry_common::{ + compile::{self, ContractSources, ProjectCompiler}, + ContractsByArtifact, +}; +use foundry_compilers::{ + artifacts::{BytecodeObject, ContractBytecode, ContractBytecodeSome, Libraries}, + cache::SolFilesCache, + contracts::ArtifactContracts, + info::ContractInfo, + ArtifactId, +}; +use foundry_linking::{LinkOutput, Linker}; +use std::str::FromStr; + +/// Container for the compiled contracts. +pub struct BuildData { + /// Linker which can be used to link contracts, owns [ArtifactContracts] map. + pub linker: Linker, + /// Id of target contract artifact. + pub target: ArtifactId, + /// Source files of the contracts. Used by debugger. + pub sources: ContractSources, +} + +impl BuildData { + /// Links the build data with given libraries, using sender and nonce to compute addresses of + /// missing libraries. + pub fn link( + self, + known_libraries: Libraries, + sender: Address, + nonce: u64, + ) -> Result { + let link_output = + self.linker.link_with_nonce_or_address(known_libraries, sender, nonce, &self.target)?; + + LinkedBuildData::new(link_output, self) + } + + /// Links the build data with the given libraries. Expects supplied libraries set being enough + /// to fully link target contract. + pub fn link_with_libraries(self, libraries: Libraries) -> Result { + let link_output = + self.linker.link_with_nonce_or_address(libraries, Address::ZERO, 0, &self.target)?; + + if !link_output.libs_to_deploy.is_empty() { + eyre::bail!("incomplete libraries set"); + } + + LinkedBuildData::new(link_output, self) + } +} + +/// Container for the linked contracts and their dependencies +pub struct LinkedBuildData { + /// Original build data, might be used to relink this object with different libraries. + pub build_data: BuildData, + /// Known fully linked contracts. + pub highlevel_known_contracts: ArtifactContracts, + /// Libraries used to link the contracts. + pub libraries: Libraries, + /// Libraries that need to be deployed by sender before script execution. + pub predeploy_libraries: Vec, +} + +impl LinkedBuildData { + pub fn new(link_output: LinkOutput, build_data: BuildData) -> Result { + let highlevel_known_contracts = build_data + .linker + .get_linked_artifacts(&link_output.libraries)? + .iter() + .filter_map(|(id, contract)| { + ContractBytecodeSome::try_from(ContractBytecode::from(contract.clone())) + .ok() + .map(|tc| (id.clone(), tc)) + }) + .filter(|(_, tc)| tc.bytecode.object.is_non_empty_bytecode()) + .collect(); + + Ok(Self { + build_data, + highlevel_known_contracts, + libraries: link_output.libraries, + predeploy_libraries: link_output.libs_to_deploy, + }) + } + + /// Flattens the contracts into (`id` -> (`JsonAbi`, `Vec`)) pairs + pub fn get_flattened_contracts(&self, deployed_code: bool) -> ContractsByArtifact { + ContractsByArtifact( + self.highlevel_known_contracts + .iter() + .filter_map(|(id, c)| { + let bytecode = if deployed_code { + c.deployed_bytecode.bytes() + } else { + c.bytecode.bytes() + }; + bytecode.cloned().map(|code| (id.clone(), (c.abi.clone(), code.into()))) + }) + .collect(), + ) + } + + /// Fetches target bytecode from linked contracts. + pub fn get_target_contract(&self) -> Result { + self.highlevel_known_contracts + .get(&self.build_data.target) + .cloned() + .ok_or_eyre("target not found in linked artifacts") + } +} + +/// First state basically containing only inputs of the user. +pub struct PreprocessedState { + pub args: ScriptArgs, + pub script_config: ScriptConfig, + pub script_wallets: ScriptWallets, +} + +impl PreprocessedState { + /// Parses user input and compiles the contracts depending on script target. + /// After compilation, finds exact [ArtifactId] of the target contract. + pub fn compile(self) -> Result { + let Self { args, script_config, script_wallets } = self; + let project = script_config.config.project()?; + let filters = args.skip.clone().unwrap_or_default(); + + let mut target_name = args.target_contract.clone(); + + // If we've received correct path, use it as target_path + // Otherwise, parse input as : and use the path from the contract info, if + // present. + let target_path = if let Ok(path) = dunce::canonicalize(&args.path) { + Some(path) + } else { + let contract = ContractInfo::from_str(&args.path)?; + target_name = Some(contract.name.clone()); + if let Some(path) = contract.path { + Some(dunce::canonicalize(path)?) + } else { + None + } + }; + + // If we've found target path above, only compile it. + // Otherwise, compile everything to match contract by name later. + let output = if let Some(target_path) = target_path.clone() { + compile::compile_target_with_filter( + &target_path, + &project, + args.opts.silent, + args.verify, + filters, + ) + } else if !project.paths.has_input_files() { + Err(eyre::eyre!("The project doesn't have any input files. Make sure the `script` directory is configured properly in foundry.toml. Otherwise, provide the path to the file.")) + } else { + ProjectCompiler::new().compile(&project) + }?; + + // If we still don't have target path, find it by name in the compilation cache. + let target_path = if let Some(target_path) = target_path { + target_path + } else { + let target_name = target_name.clone().expect("was set above"); + let cache = SolFilesCache::read_joined(&project.paths) + .wrap_err("Could not open compiler cache")?; + let (path, _) = get_cached_entry_by_name(&cache, &target_name) + .wrap_err("Could not find target contract in cache")?; + path + }; + + let target_path = project.root().join(target_path); + + let mut target_id: Option = None; + + // Find target artfifact id by name and path in compilation artifacts. + for (id, contract) in output.artifact_ids().filter(|(id, _)| id.source == target_path) { + if let Some(name) = &target_name { + if id.name != *name { + continue; + } + } else if contract.abi.as_ref().map_or(true, |abi| abi.is_empty()) || + contract.bytecode.as_ref().map_or(true, |b| match &b.object { + BytecodeObject::Bytecode(b) => b.is_empty(), + BytecodeObject::Unlinked(_) => false, + }) + { + // Ignore contracts with empty abi or linked bytecode of length 0 which are + // interfaces/abstract contracts/libraries. + continue; + } + + if let Some(target) = target_id { + // We might have multiple artifacts for the same contract but with different + // solc versions. Their names will have form of {name}.0.X.Y, so we are + // stripping versions off before comparing them. + let target_name = target.name.split('.').next().unwrap(); + let id_name = id.name.split('.').next().unwrap(); + if target_name != id_name { + eyre::bail!("Multiple contracts in the target path. Please specify the contract name with `--tc ContractName`") + } + } + target_id = Some(id); + } + + let sources = ContractSources::from_project_output(&output, project.root())?; + let contracts = output.into_artifacts().collect(); + let target = target_id.ok_or_eyre("Could not find target contract")?; + let linker = Linker::new(project.root(), contracts); + + Ok(CompiledState { + args, + script_config, + script_wallets, + build_data: BuildData { linker, target, sources }, + }) + } +} + +/// State after we have determined and compiled target contract to be executed. +pub struct CompiledState { + pub args: ScriptArgs, + pub script_config: ScriptConfig, + pub script_wallets: ScriptWallets, + pub build_data: BuildData, +} + +impl CompiledState { + /// Uses provided sender address to compute library addresses and link contracts with them. + pub fn link(self) -> Result { + let Self { args, script_config, script_wallets, build_data } = self; + + let sender = script_config.evm_opts.sender; + let nonce = script_config.sender_nonce; + let known_libraries = script_config.config.libraries_with_remappings()?; + let build_data = build_data.link(known_libraries, sender, nonce)?; + + Ok(LinkedState { args, script_config, script_wallets, build_data }) + } +} diff --git a/crates/script/src/execute.rs b/crates/script/src/execute.rs new file mode 100644 index 000000000000..cdf4353d3860 --- /dev/null +++ b/crates/script/src/execute.rs @@ -0,0 +1,522 @@ +use crate::{ + build::{CompiledState, LinkedBuildData}, + simulate::PreSimulationState, + ScriptArgs, ScriptConfig, +}; + +use super::{runner::ScriptRunner, JsonResult, NestedValue, ScriptResult}; +use alloy_dyn_abi::FunctionExt; +use alloy_json_abi::{Function, InternalType, JsonAbi}; +use alloy_primitives::{Address, Bytes, U64}; +use alloy_rpc_types::request::TransactionRequest; +use async_recursion::async_recursion; +use ethers_providers::Middleware; +use eyre::Result; +use foundry_cheatcodes::ScriptWallets; +use foundry_cli::utils::{ensure_clean_constructor, needs_setup}; +use foundry_common::{ + fmt::{format_token, format_token_raw}, + provider::ethers::{get_http_provider, RpcUrl}, + shell, ContractsByArtifact, +}; +use foundry_compilers::artifacts::ContractBytecodeSome; +use foundry_config::{Config, NamedChain}; +use foundry_debugger::Debugger; +use foundry_evm::{ + decode::{decode_console_logs, RevertDecoder}, + inspectors::cheatcodes::{BroadcastableTransaction, BroadcastableTransactions}, + traces::{ + identifier::{SignaturesIdentifier, TraceIdentifiers}, + render_trace_arena, CallTraceDecoder, CallTraceDecoderBuilder, TraceKind, + }, +}; +use futures::future::join_all; +use itertools::Itertools; +use std::collections::{HashMap, HashSet}; +use yansi::Paint; + +/// State after linking, contains the linked build data along with library addresses and optional +/// array of libraries that need to be predeployed. +pub struct LinkedState { + pub args: ScriptArgs, + pub script_config: ScriptConfig, + pub script_wallets: ScriptWallets, + pub build_data: LinkedBuildData, +} + +/// Container for data we need for execution which can only be obtained after linking stage. +pub struct ExecutionData { + /// Function to call. + pub func: Function, + /// Calldata to pass to the target contract. + pub calldata: Bytes, + /// Bytecode of the target contract. + pub bytecode: Bytes, + /// ABI of the target contract. + pub abi: JsonAbi, +} + +impl LinkedState { + /// Given linked and compiled artifacts, prepares data we need for execution. + /// This includes the function to call and the calldata to pass to it. + pub async fn prepare_execution(self) -> Result { + let Self { args, script_config, script_wallets, build_data } = self; + + let ContractBytecodeSome { abi, bytecode, .. } = build_data.get_target_contract()?; + + let bytecode = bytecode.into_bytes().ok_or_else(|| { + eyre::eyre!("expected fully linked bytecode, found unlinked bytecode") + })?; + + let (func, calldata) = args.get_method_and_calldata(&abi)?; + + ensure_clean_constructor(&abi)?; + + Ok(PreExecutionState { + args, + script_config, + script_wallets, + build_data, + execution_data: ExecutionData { func, calldata, bytecode, abi }, + }) + } +} + +/// Same as [LinkedState], but also contains [ExecutionData]. +pub struct PreExecutionState { + pub args: ScriptArgs, + pub script_config: ScriptConfig, + pub script_wallets: ScriptWallets, + pub build_data: LinkedBuildData, + pub execution_data: ExecutionData, +} + +impl PreExecutionState { + /// Executes the script and returns the state after execution. + /// Might require executing script twice in cases when we determine sender from execution. + #[async_recursion] + pub async fn execute(mut self) -> Result { + let mut runner = self + .script_config + .get_runner_with_cheatcodes(self.script_wallets.clone(), self.args.debug) + .await?; + let mut result = self.execute_with_runner(&mut runner).await?; + + // If we have a new sender from execution, we need to use it to deploy libraries and relink + // contracts. + if let Some(new_sender) = self.maybe_new_sender(result.transactions.as_ref())? { + self.script_config.update_sender(new_sender).await?; + + // Rollback to rerun linking with the new sender. + let state = CompiledState { + args: self.args, + script_config: self.script_config, + script_wallets: self.script_wallets, + build_data: self.build_data.build_data, + }; + + return state.link()?.prepare_execution().await?.execute().await; + } + + // Add library deployment transactions to broadcastable transactions list. + if let Some(txs) = result.transactions.take() { + result.transactions = Some( + self.build_data + .predeploy_libraries + .iter() + .enumerate() + .map(|(i, bytes)| BroadcastableTransaction { + rpc: self.script_config.evm_opts.fork_url.clone(), + transaction: TransactionRequest { + from: Some(self.script_config.evm_opts.sender), + input: Some(bytes.clone()).into(), + nonce: Some(U64::from(self.script_config.sender_nonce + i as u64)), + ..Default::default() + }, + }) + .chain(txs) + .collect(), + ); + } + + Ok(ExecutedState { + args: self.args, + script_config: self.script_config, + script_wallets: self.script_wallets, + build_data: self.build_data, + execution_data: self.execution_data, + execution_result: result, + }) + } + + /// Executes the script using the provided runner and returns the [ScriptResult]. + pub async fn execute_with_runner(&self, runner: &mut ScriptRunner) -> Result { + let (address, mut setup_result) = runner.setup( + &self.build_data.predeploy_libraries, + self.execution_data.bytecode.clone(), + needs_setup(&self.execution_data.abi), + self.script_config.sender_nonce, + self.args.broadcast, + self.script_config.evm_opts.fork_url.is_none(), + )?; + + if setup_result.success { + let script_result = runner.script(address, self.execution_data.calldata.clone())?; + + setup_result.success &= script_result.success; + setup_result.gas_used = script_result.gas_used; + setup_result.logs.extend(script_result.logs); + setup_result.traces.extend(script_result.traces); + setup_result.debug = script_result.debug; + setup_result.labeled_addresses.extend(script_result.labeled_addresses); + setup_result.returned = script_result.returned; + setup_result.breakpoints = script_result.breakpoints; + + match (&mut setup_result.transactions, script_result.transactions) { + (Some(txs), Some(new_txs)) => { + txs.extend(new_txs); + } + (None, Some(new_txs)) => { + setup_result.transactions = Some(new_txs); + } + _ => {} + } + } + + Ok(setup_result) + } + + /// It finds the deployer from the running script and uses it to predeploy libraries. + /// + /// If there are multiple candidate addresses, it skips everything and lets `--sender` deploy + /// them instead. + fn maybe_new_sender( + &self, + transactions: Option<&BroadcastableTransactions>, + ) -> Result> { + let mut new_sender = None; + + if let Some(txs) = transactions { + // If the user passed a `--sender` don't check anything. + if !self.build_data.predeploy_libraries.is_empty() && + self.args.evm_opts.sender.is_none() + { + for tx in txs.iter() { + if tx.transaction.to.is_none() { + let sender = tx.transaction.from.expect("no sender"); + if let Some(ns) = new_sender { + if sender != ns { + shell::println("You have more than one deployer who could predeploy libraries. Using `--sender` instead.")?; + return Ok(None); + } + } else if sender != self.script_config.evm_opts.sender { + new_sender = Some(sender); + } + } + } + } + } + Ok(new_sender) + } +} + +/// Container for information about RPC-endpoints used during script execution. +pub struct RpcData { + /// Unique list of rpc urls present. + pub total_rpcs: HashSet, + /// If true, one of the transactions did not have a rpc. + pub missing_rpc: bool, +} + +impl RpcData { + /// Iterates over script transactions and collects RPC urls. + fn from_transactions(txs: &BroadcastableTransactions) -> Self { + let missing_rpc = txs.iter().any(|tx| tx.rpc.is_none()); + let total_rpcs = + txs.iter().filter_map(|tx| tx.rpc.as_ref().cloned()).collect::>(); + + Self { total_rpcs, missing_rpc } + } + + /// Returns true if script might be multi-chain. + /// Returns false positive in case when missing rpc is the same as the only rpc present. + pub fn is_multi_chain(&self) -> bool { + self.total_rpcs.len() > 1 || (self.missing_rpc && !self.total_rpcs.is_empty()) + } + + /// Checks if all RPCs support EIP-3855. Prints a warning if not. + async fn check_shanghai_support(&self) -> Result<()> { + let chain_ids = self.total_rpcs.iter().map(|rpc| async move { + let provider = get_http_provider(rpc); + let id = provider.get_chainid().await.ok()?; + let id_u64: u64 = id.try_into().ok()?; + NamedChain::try_from(id_u64).ok() + }); + + let chains = join_all(chain_ids).await; + let iter = chains.iter().flatten().map(|c| (c.supports_shanghai(), c)); + if iter.clone().any(|(s, _)| !s) { + let msg = format!( + "\ +EIP-3855 is not supported in one or more of the RPCs used. +Unsupported Chain IDs: {}. +Contracts deployed with a Solidity version equal or higher than 0.8.20 might not work properly. +For more information, please see https://eips.ethereum.org/EIPS/eip-3855", + iter.filter(|(supported, _)| !supported) + .map(|(_, chain)| *chain as u64) + .format(", ") + ); + shell::println(Paint::yellow(msg))?; + } + Ok(()) + } +} + +/// Container for data being collected after execution. +pub struct ExecutionArtifacts { + /// Mapping from contract to its runtime code. + pub known_contracts: ContractsByArtifact, + /// Trace decoder used to decode traces. + pub decoder: CallTraceDecoder, + /// Return values from the execution result. + pub returns: HashMap, + /// Information about RPC endpoints used during script execution. + pub rpc_data: RpcData, +} + +/// State after the script has been executed. +pub struct ExecutedState { + pub args: ScriptArgs, + pub script_config: ScriptConfig, + pub script_wallets: ScriptWallets, + pub build_data: LinkedBuildData, + pub execution_data: ExecutionData, + pub execution_result: ScriptResult, +} + +impl ExecutedState { + /// Collects the data we need for simulation and various post-execution tasks. + pub async fn prepare_simulation(self) -> Result { + let returns = self.get_returns()?; + + let known_contracts = self.build_data.get_flattened_contracts(true); + let decoder = self.build_trace_decoder(&known_contracts)?; + + let txs = self.execution_result.transactions.clone().unwrap_or_default(); + let rpc_data = RpcData::from_transactions(&txs); + + if rpc_data.is_multi_chain() { + shell::eprintln(format!( + "{}", + Paint::yellow( + "Multi chain deployment is still under development. Use with caution." + ) + ))?; + if !self.build_data.libraries.is_empty() { + eyre::bail!( + "Multi chain deployment does not support library linking at the moment." + ) + } + } + rpc_data.check_shanghai_support().await?; + + Ok(PreSimulationState { + args: self.args, + script_config: self.script_config, + script_wallets: self.script_wallets, + build_data: self.build_data, + execution_data: self.execution_data, + execution_result: self.execution_result, + execution_artifacts: ExecutionArtifacts { known_contracts, decoder, returns, rpc_data }, + }) + } + + /// Builds [CallTraceDecoder] from the execution result and known contracts. + fn build_trace_decoder( + &self, + known_contracts: &ContractsByArtifact, + ) -> Result { + let mut decoder = CallTraceDecoderBuilder::new() + .with_labels(self.execution_result.labeled_addresses.clone()) + .with_verbosity(self.script_config.evm_opts.verbosity) + .with_known_contracts(known_contracts) + .with_signature_identifier(SignaturesIdentifier::new( + Config::foundry_cache_dir(), + self.script_config.config.offline, + )?) + .build(); + + let mut identifier = TraceIdentifiers::new().with_local(known_contracts).with_etherscan( + &self.script_config.config, + self.script_config.evm_opts.get_remote_chain_id(), + )?; + + // Decoding traces using etherscan is costly as we run into rate limits, + // causing scripts to run for a very long time unnecessarily. + // Therefore, we only try and use etherscan if the user has provided an API key. + let should_use_etherscan_traces = self.script_config.config.etherscan_api_key.is_some(); + if !should_use_etherscan_traces { + identifier.etherscan = None; + } + + for (_, trace) in &self.execution_result.traces { + decoder.identify(trace, &mut identifier); + } + + Ok(decoder) + } + + /// Collects the return values from the execution result. + fn get_returns(&self) -> Result> { + let mut returns = HashMap::new(); + let returned = &self.execution_result.returned; + let func = &self.execution_data.func; + + match func.abi_decode_output(returned, false) { + Ok(decoded) => { + for (index, (token, output)) in decoded.iter().zip(&func.outputs).enumerate() { + let internal_type = + output.internal_type.clone().unwrap_or(InternalType::Other { + contract: None, + ty: "unknown".to_string(), + }); + + let label = if !output.name.is_empty() { + output.name.to_string() + } else { + index.to_string() + }; + + returns.insert( + label, + NestedValue { + internal_type: internal_type.to_string(), + value: format_token_raw(token), + }, + ); + } + } + Err(_) => { + shell::println(format!("{returned:?}"))?; + } + } + + Ok(returns) + } +} + +impl PreSimulationState { + pub fn show_json(&self) -> Result<()> { + let result = &self.execution_result; + + let console_logs = decode_console_logs(&result.logs); + let output = JsonResult { + logs: console_logs, + gas_used: result.gas_used, + returns: self.execution_artifacts.returns.clone(), + }; + let j = serde_json::to_string(&output)?; + shell::println(j)?; + + if !self.execution_result.success { + return Err(eyre::eyre!( + "script failed: {}", + RevertDecoder::new().decode(&self.execution_result.returned[..], None) + )); + } + + Ok(()) + } + + pub async fn show_traces(&self) -> Result<()> { + let verbosity = self.script_config.evm_opts.verbosity; + let func = &self.execution_data.func; + let result = &self.execution_result; + let decoder = &self.execution_artifacts.decoder; + + if !result.success || verbosity > 3 { + if result.traces.is_empty() { + warn!(verbosity, "no traces"); + } + + shell::println("Traces:")?; + for (kind, trace) in &result.traces { + let should_include = match kind { + TraceKind::Setup => verbosity >= 5, + TraceKind::Execution => verbosity > 3, + _ => false, + } || !result.success; + + if should_include { + shell::println(render_trace_arena(trace, decoder).await?)?; + } + } + shell::println(String::new())?; + } + + if result.success { + shell::println(format!("{}", Paint::green("Script ran successfully.")))?; + } + + if self.script_config.evm_opts.fork_url.is_none() { + shell::println(format!("Gas used: {}", result.gas_used))?; + } + + if result.success && !result.returned.is_empty() { + shell::println("\n== Return ==")?; + match func.abi_decode_output(&result.returned, false) { + Ok(decoded) => { + for (index, (token, output)) in decoded.iter().zip(&func.outputs).enumerate() { + let internal_type = + output.internal_type.clone().unwrap_or(InternalType::Other { + contract: None, + ty: "unknown".to_string(), + }); + + let label = if !output.name.is_empty() { + output.name.to_string() + } else { + index.to_string() + }; + shell::println(format!( + "{}: {internal_type} {}", + label.trim_end(), + format_token(token) + ))?; + } + } + Err(_) => { + shell::println(format!("{:x?}", (&result.returned)))?; + } + } + } + + let console_logs = decode_console_logs(&result.logs); + if !console_logs.is_empty() { + shell::println("\n== Logs ==")?; + for log in console_logs { + shell::println(format!(" {log}"))?; + } + } + + if !result.success { + return Err(eyre::eyre!( + "script failed: {}", + RevertDecoder::new().decode(&result.returned[..], None) + )); + } + + Ok(()) + } + + pub fn run_debugger(&self) -> Result<()> { + let mut debugger = Debugger::builder() + .debug_arenas(self.execution_result.debug.as_deref().unwrap_or_default()) + .decoder(&self.execution_artifacts.decoder) + .sources(self.build_data.build_data.sources.clone()) + .breakpoints(self.execution_result.breakpoints.clone()) + .build(); + debugger.try_run()?; + Ok(()) + } +} diff --git a/crates/forge/bin/cmd/script/mod.rs b/crates/script/src/lib.rs similarity index 62% rename from crates/forge/bin/cmd/script/mod.rs rename to crates/script/src/lib.rs index 98c3e223db76..6d8fb87b8329 100644 --- a/crates/forge/bin/cmd/script/mod.rs +++ b/crates/script/src/lib.rs @@ -1,67 +1,68 @@ -use super::build::BuildArgs; -use alloy_dyn_abi::FunctionExt; -use alloy_json_abi::{Function, InternalType, JsonAbi}; -use alloy_primitives::{Address, Bytes, Log, U256, U64}; -use alloy_rpc_types::request::TransactionRequest; +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +#[macro_use] +extern crate tracing; + +use self::transaction::AdditionalContract; +use crate::runner::ScriptRunner; +use alloy_json_abi::{Function, JsonAbi}; +use alloy_primitives::{Address, Bytes, Log, U256}; +use broadcast::next_nonce; +use build::PreprocessedState; use clap::{Parser, ValueHint}; use dialoguer::Confirm; -use ethers_providers::{Http, Middleware}; +use ethers_signers::Signer; use eyre::{ContextCompat, Result, WrapErr}; -use forge::{ - backend::Backend, - debug::DebugArena, - decode::decode_console_logs, - opts::EvmOpts, - traces::{ - identifier::SignaturesIdentifier, render_trace_arena, CallTraceDecoder, - CallTraceDecoderBuilder, TraceKind, Traces, - }, -}; use forge_verify::RetryArgs; +use foundry_cli::{opts::CoreBuildArgs, utils::LoadConfig}; use foundry_common::{ abi::{encode_function_args, get_func}, + compile::SkipBuildFilter, errors::UnlinkedByteCode, evm::{Breakpoints, EvmArgs}, - fmt::{format_token, format_token_raw}, provider::ethers::RpcUrl, - shell, ContractsByArtifact, CONTRACT_MAX_SIZE, SELECTOR_LEN, -}; -use foundry_compilers::{ - artifacts::{ContractBytecodeSome, Libraries}, - ArtifactId, + shell, + types::ToAlloy, + CONTRACT_MAX_SIZE, SELECTOR_LEN, }; +use foundry_compilers::{artifacts::ContractBytecodeSome, ArtifactId}; use foundry_config::{ figment, figment::{ value::{Dict, Map}, Metadata, Profile, Provider, }, - Config, NamedChain, + Config, }; use foundry_evm::{ + backend::Backend, constants::DEFAULT_CREATE2_DEPLOYER, - decode::RevertDecoder, - inspectors::cheatcodes::{BroadcastableTransaction, BroadcastableTransactions}, - traces::identifier::TraceIdentifiers, + debug::DebugArena, + executors::ExecutorBuilder, + inspectors::{ + cheatcodes::{BroadcastableTransactions, ScriptWallets}, + CheatsConfig, + }, + opts::EvmOpts, + traces::Traces, }; use foundry_wallets::MultiWalletOpts; -use futures::future; -use itertools::Itertools; use serde::{Deserialize, Serialize}; -use std::collections::{BTreeMap, HashMap, HashSet}; +use std::collections::{BTreeMap, HashMap}; use yansi::Paint; mod artifacts; mod broadcast; mod build; -mod cmd; -mod executor; -mod multi; +mod execute; +mod multi_sequence; mod providers; mod receipts; +mod resume; mod runner; mod sequence; -pub mod transaction; +mod simulate; +mod transaction; mod verify; // Loads project's figment and merges the build cli arguments into it @@ -174,8 +175,14 @@ pub struct ScriptArgs { )] pub with_gas_price: Option, + /// Skip building files whose names contain the given filter. + /// + /// `test` and `script` are aliases for `.t.sol` and `.s.sol`. + #[arg(long, num_args(1..))] + pub skip: Option>, + #[command(flatten)] - pub opts: BuildArgs, + pub opts: CoreBuildArgs, #[command(flatten)] pub wallets: MultiWalletOpts, @@ -193,235 +200,106 @@ pub struct ScriptArgs { // === impl ScriptArgs === impl ScriptArgs { - fn decode_traces( - &self, - script_config: &ScriptConfig, - result: &mut ScriptResult, - known_contracts: &ContractsByArtifact, - ) -> Result { - let mut decoder = CallTraceDecoderBuilder::new() - .with_labels(result.labeled_addresses.clone()) - .with_verbosity(script_config.evm_opts.verbosity) - .with_known_contracts(known_contracts) - .with_signature_identifier(SignaturesIdentifier::new( - Config::foundry_cache_dir(), - script_config.config.offline, - )?) - .build(); - let mut identifier = TraceIdentifiers::new() - .with_local(known_contracts) - .with_etherscan(&script_config.config, script_config.evm_opts.get_remote_chain_id())?; - - // Decoding traces using etherscan is costly as we run into rate limits, - // causing scripts to run for a very long time unnecessarily. - // Therefore, we only try and use etherscan if the user has provided an API key. - let should_use_etherscan_traces = script_config.config.etherscan_api_key.is_some(); - if !should_use_etherscan_traces { - identifier.etherscan = None; - } + async fn preprocess(self) -> Result { + let script_wallets = + ScriptWallets::new(self.wallets.get_multi_wallet().await?, self.evm_opts.sender); - for (_, trace) in &mut result.traces { - decoder.identify(trace, &mut identifier); - } - Ok(decoder) - } + let (config, mut evm_opts) = self.load_config_and_evm_opts_emit_warnings()?; - fn get_returns( - &self, - script_config: &ScriptConfig, - returned: &Bytes, - ) -> Result> { - let func = script_config.called_function.as_ref().expect("There should be a function."); - let mut returns = HashMap::new(); - - match func.abi_decode_output(returned, false) { - Ok(decoded) => { - for (index, (token, output)) in decoded.iter().zip(&func.outputs).enumerate() { - let internal_type = - output.internal_type.clone().unwrap_or(InternalType::Other { - contract: None, - ty: "unknown".to_string(), - }); - - let label = if !output.name.is_empty() { - output.name.to_string() - } else { - index.to_string() - }; - - returns.insert( - label, - NestedValue { - internal_type: internal_type.to_string(), - value: format_token_raw(token), - }, - ); - } - } - Err(_) => { - shell::println(format!("{returned:?}"))?; - } + if let Some(sender) = self.maybe_load_private_key()? { + evm_opts.sender = sender; } - Ok(returns) - } - - async fn show_traces( - &self, - script_config: &ScriptConfig, - decoder: &CallTraceDecoder, - result: &mut ScriptResult, - ) -> Result<()> { - let verbosity = script_config.evm_opts.verbosity; - let func = script_config.called_function.as_ref().expect("There should be a function."); + let script_config = ScriptConfig::new(config, evm_opts).await?; - if !result.success || verbosity > 3 { - if result.traces.is_empty() { - warn!(verbosity, "no traces"); - } - - shell::println("Traces:")?; - for (kind, trace) in &result.traces { - let should_include = match kind { - TraceKind::Setup => verbosity >= 5, - TraceKind::Execution => verbosity > 3, - _ => false, - } || !result.success; + Ok(PreprocessedState { args: self, script_config, script_wallets }) + } - if should_include { - shell::println(render_trace_arena(trace, decoder).await?)?; - } - } - shell::println(String::new())?; + /// Executes the script + pub async fn run_script(self) -> Result<()> { + trace!(target: "script", "executing script command"); + + // Drive state machine to point at which we have everything needed for simulation/resuming. + let pre_simulation = self + .preprocess() + .await? + .compile()? + .link()? + .prepare_execution() + .await? + .execute() + .await? + .prepare_simulation() + .await?; + + if pre_simulation.args.debug { + pre_simulation.run_debugger()?; } - if result.success { - shell::println(format!("{}", Paint::green("Script ran successfully.")))?; + if pre_simulation.args.json { + pre_simulation.show_json()?; + } else { + pre_simulation.show_traces().await?; } - if script_config.evm_opts.fork_url.is_none() { - shell::println(format!("Gas used: {}", result.gas_used))?; + // Ensure that we have transactions to simulate/broadcast, otherwise exit early to avoid + // hard error. + if pre_simulation.execution_result.transactions.as_ref().map_or(true, |txs| txs.is_empty()) + { + return Ok(()); } - if result.success && !result.returned.is_empty() { - shell::println("\n== Return ==")?; - match func.abi_decode_output(&result.returned, false) { - Ok(decoded) => { - for (index, (token, output)) in decoded.iter().zip(&func.outputs).enumerate() { - let internal_type = - output.internal_type.clone().unwrap_or(InternalType::Other { - contract: None, - ty: "unknown".to_string(), - }); - - let label = if !output.name.is_empty() { - output.name.to_string() - } else { - index.to_string() - }; - shell::println(format!( - "{}: {internal_type} {}", - label.trim_end(), - format_token(token) - ))?; - } - } - Err(_) => { - shell::println(format!("{:x?}", (&result.returned)))?; - } - } + // Check if there are any missing RPCs and exit early to avoid hard error. + if pre_simulation.execution_artifacts.rpc_data.missing_rpc { + shell::println("\nIf you wish to simulate on-chain transactions pass a RPC URL.")?; + return Ok(()); } - let console_logs = decode_console_logs(&result.logs); - if !console_logs.is_empty() { - shell::println("\n== Logs ==")?; - for log in console_logs { - shell::println(format!(" {log}"))?; - } - } + // Move from `PreSimulationState` to `BundledState` either by resuming or simulating + // transactions. + let bundled = if pre_simulation.args.resume || + (pre_simulation.args.verify && !pre_simulation.args.broadcast) + { + pre_simulation.resume().await? + } else { + pre_simulation.args.check_contract_sizes( + &pre_simulation.execution_result, + &pre_simulation.build_data.highlevel_known_contracts, + )?; - if !result.success { - return Err(eyre::eyre!( - "script failed: {}", - RevertDecoder::new().decode(&result.returned[..], None) - )); - } + pre_simulation.fill_metadata().await?.bundle().await? + }; - Ok(()) - } + // Exit early in case user didn't provide any broadcast/verify related flags. + if !bundled.args.broadcast && !bundled.args.resume && !bundled.args.verify { + shell::println("\nSIMULATION COMPLETE. To broadcast these transactions, add --broadcast and wallet configuration(s) to the previous command. See forge script --help for more.")?; + return Ok(()); + } - fn show_json(&self, script_config: &ScriptConfig, result: &ScriptResult) -> Result<()> { - let returns = self.get_returns(script_config, &result.returned)?; + // Exit early if something is wrong with verification options. + if bundled.args.verify { + bundled.verify_preflight_check()?; + } - let console_logs = decode_console_logs(&result.logs); - let output = JsonResult { logs: console_logs, gas_used: result.gas_used, returns }; - let j = serde_json::to_string(&output)?; - shell::println(j)?; + // Wait for pending txes and broadcast others. + let broadcasted = bundled.wait_for_pending().await?.broadcast().await?; - if !result.success { - return Err(eyre::eyre!( - "script failed: {}", - RevertDecoder::new().decode(&result.returned[..], None) - )); + if broadcasted.args.verify { + broadcasted.verify().await?; } Ok(()) } - /// It finds the deployer from the running script and uses it to predeploy libraries. - /// - /// If there are multiple candidate addresses, it skips everything and lets `--sender` deploy - /// them instead. - fn maybe_new_sender( - &self, - evm_opts: &EvmOpts, - transactions: Option<&BroadcastableTransactions>, - predeploy_libraries: &[Bytes], - ) -> Result> { - let mut new_sender = None; - - if let Some(txs) = transactions { - // If the user passed a `--sender` don't check anything. - if !predeploy_libraries.is_empty() && self.evm_opts.sender.is_none() { - for tx in txs.iter() { - if tx.transaction.to.is_none() { - let sender = tx.transaction.from.expect("no sender"); - if let Some(ns) = new_sender { - if sender != ns { - shell::println("You have more than one deployer who could predeploy libraries. Using `--sender` instead.")?; - return Ok(None); - } - } else if sender != evm_opts.sender { - new_sender = Some(sender); - } - } - } - } - } - Ok(new_sender) - } - - /// Helper for building the transactions for any libraries that need to be deployed ahead of - /// linking - fn create_deploy_transactions( - &self, - from: Address, - nonce: u64, - data: &[Bytes], - fork_url: &Option, - ) -> BroadcastableTransactions { - data.iter() - .enumerate() - .map(|(i, bytes)| BroadcastableTransaction { - rpc: fork_url.clone(), - transaction: TransactionRequest { - from: Some(from), - input: Some(bytes.clone()).into(), - nonce: Some(U64::from(nonce + i as u64)), - ..Default::default() - }, - }) - .collect() + /// In case the user has loaded *only* one private-key, we can assume that he's using it as the + /// `--sender` + fn maybe_load_private_key(&self) -> Result> { + let maybe_sender = self + .wallets + .private_keys()? + .filter(|pks| pks.len() == 1) + .map(|pks| pks.first().unwrap().address().to_alloy()); + Ok(maybe_sender) } /// Returns the Function and calldata based on the signature @@ -593,6 +471,26 @@ pub struct ScriptResult { pub breakpoints: Breakpoints, } +impl ScriptResult { + pub fn get_created_contracts(&self) -> Vec { + self.traces + .iter() + .flat_map(|(_, traces)| { + traces.nodes().iter().filter_map(|node| { + if node.trace.kind.is_any_create() { + return Some(AdditionalContract { + opcode: node.trace.kind, + address: node.trace.address, + init_code: node.trace.data.clone(), + }); + } + None + }) + }) + .collect() + } +} + #[derive(Serialize, Deserialize)] struct JsonResult { logs: Vec, @@ -606,91 +504,101 @@ pub struct NestedValue { pub value: String, } -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] pub struct ScriptConfig { pub config: Config, pub evm_opts: EvmOpts, pub sender_nonce: u64, /// Maps a rpc url to a backend pub backends: HashMap, - /// Script target contract - pub target_contract: Option, - /// Function called by the script - pub called_function: Option, - /// Unique list of rpc urls present - pub total_rpcs: HashSet, - /// If true, one of the transactions did not have a rpc - pub missing_rpc: bool, - /// Should return some debug information - pub debug: bool, } impl ScriptConfig { - fn collect_rpcs(&mut self, txs: &BroadcastableTransactions) { - self.missing_rpc = txs.iter().any(|tx| tx.rpc.is_none()); - - self.total_rpcs - .extend(txs.iter().filter_map(|tx| tx.rpc.as_ref().cloned()).collect::>()); - - if let Some(rpc) = &self.evm_opts.fork_url { - self.total_rpcs.insert(rpc.clone()); - } + pub async fn new(config: Config, evm_opts: EvmOpts) -> Result { + let sender_nonce = if let Some(fork_url) = evm_opts.fork_url.as_ref() { + next_nonce(evm_opts.sender, fork_url, None).await? + } else { + // dapptools compatibility + 1 + }; + Ok(Self { config, evm_opts, sender_nonce, backends: HashMap::new() }) } - fn has_multiple_rpcs(&self) -> bool { - self.total_rpcs.len() > 1 + pub async fn update_sender(&mut self, sender: Address) -> Result<()> { + self.sender_nonce = if let Some(fork_url) = self.evm_opts.fork_url.as_ref() { + next_nonce(sender, fork_url, None).await? + } else { + // dapptools compatibility + 1 + }; + self.evm_opts.sender = sender; + Ok(()) } - /// Certain features are disabled for multi chain deployments, and if tried, will return - /// error. [library support] - fn check_multi_chain_constraints(&self, libraries: &Libraries) -> Result<()> { - if self.has_multiple_rpcs() || (self.missing_rpc && !self.total_rpcs.is_empty()) { - shell::eprintln(format!( - "{}", - Paint::yellow( - "Multi chain deployment is still under development. Use with caution." - ) - ))?; - if !libraries.libs.is_empty() { - eyre::bail!( - "Multi chain deployment does not support library linking at the moment." - ) - } - } - Ok(()) + async fn get_runner(&mut self) -> Result { + self._get_runner(None, false).await } - /// Returns the script target contract - fn target_contract(&self) -> &ArtifactId { - self.target_contract.as_ref().expect("should exist after building") + async fn get_runner_with_cheatcodes( + &mut self, + script_wallets: ScriptWallets, + debug: bool, + ) -> Result { + self._get_runner(Some(script_wallets), debug).await } - /// Checks if the RPCs used point to chains that support EIP-3855. - /// If not, warns the user. - async fn check_shanghai_support(&self) -> Result<()> { - let chain_ids = self.total_rpcs.iter().map(|rpc| async move { - let provider = ethers_providers::Provider::::try_from(rpc).ok()?; - let id = provider.get_chainid().await.ok()?; - let id_u64: u64 = id.try_into().ok()?; - NamedChain::try_from(id_u64).ok() - }); + async fn _get_runner( + &mut self, + script_wallets: Option, + debug: bool, + ) -> Result { + trace!("preparing script runner"); + let env = self.evm_opts.evm_env().await?; + + let db = if let Some(fork_url) = self.evm_opts.fork_url.as_ref() { + match self.backends.get(fork_url) { + Some(db) => db.clone(), + None => { + let fork = self.evm_opts.get_fork(&self.config, env.clone()); + let backend = Backend::spawn(fork); + self.backends.insert(fork_url.clone(), backend.clone()); + backend + } + } + } else { + // It's only really `None`, when we don't pass any `--fork-url`. And if so, there is + // no need to cache it, since there won't be any onchain simulation that we'd need + // to cache the backend for. + Backend::spawn(None) + }; - let chains = future::join_all(chain_ids).await; - let iter = chains.iter().flatten().map(|c| (c.supports_shanghai(), c)); - if iter.clone().any(|(s, _)| !s) { - let msg = format!( - "\ -EIP-3855 is not supported in one or more of the RPCs used. -Unsupported Chain IDs: {}. -Contracts deployed with a Solidity version equal or higher than 0.8.20 might not work properly. -For more information, please see https://eips.ethereum.org/EIPS/eip-3855", - iter.filter(|(supported, _)| !supported) - .map(|(_, chain)| *chain as u64) - .format(", ") - ); - shell::println(Paint::yellow(msg))?; + // We need to enable tracing to decode contract names: local or external. + let mut builder = ExecutorBuilder::new() + .inspectors(|stack| stack.trace(true)) + .spec(self.config.evm_spec_id()) + .gas_limit(self.evm_opts.gas_limit()); + + if let Some(script_wallets) = script_wallets { + builder = builder.inspectors(|stack| { + stack + .debug(debug) + .cheatcodes( + CheatsConfig::new( + &self.config, + self.evm_opts.clone(), + Some(script_wallets), + ) + .into(), + ) + .enable_isolation(self.evm_opts.isolate) + }); } - Ok(()) + + Ok(ScriptRunner::new( + builder.build(env, db), + self.evm_opts.initial_balance, + self.evm_opts.sender, + )) } } @@ -698,7 +606,7 @@ For more information, please see https://eips.ethereum.org/EIPS/eip-3855", mod tests { use super::*; use foundry_cli::utils::LoadConfig; - use foundry_config::UnresolvedEnvVarError; + use foundry_config::{NamedChain, UnresolvedEnvVarError}; use std::fs; use tempfile::tempdir; diff --git a/crates/script/src/multi_sequence.rs b/crates/script/src/multi_sequence.rs new file mode 100644 index 000000000000..ea6dfd0d4c5f --- /dev/null +++ b/crates/script/src/multi_sequence.rs @@ -0,0 +1,154 @@ +use super::sequence::{sig_to_file_name, ScriptSequence, SensitiveScriptSequence, DRY_RUN_DIR}; +use eyre::{ContextCompat, Result, WrapErr}; +use foundry_cli::utils::now; +use foundry_common::fs; +use foundry_compilers::ArtifactId; +use foundry_config::Config; +use serde::{Deserialize, Serialize}; +use std::{ + io::{BufWriter, Write}, + path::PathBuf, +}; + +/// Holds the sequences of multiple chain deployments. +#[derive(Clone, Default, Serialize, Deserialize)] +pub struct MultiChainSequence { + pub deployments: Vec, + #[serde(skip)] + pub path: PathBuf, + #[serde(skip)] + pub sensitive_path: PathBuf, + pub timestamp: u64, +} + +/// Sensitive values from script sequences. +#[derive(Clone, Default, Serialize, Deserialize)] +pub struct SensitiveMultiChainSequence { + pub deployments: Vec, +} + +impl SensitiveMultiChainSequence { + fn from_multi_sequence(sequence: MultiChainSequence) -> SensitiveMultiChainSequence { + SensitiveMultiChainSequence { + deployments: sequence.deployments.into_iter().map(|sequence| sequence.into()).collect(), + } + } +} + +impl MultiChainSequence { + pub fn new( + deployments: Vec, + sig: &str, + target: &ArtifactId, + config: &Config, + dry_run: bool, + ) -> Result { + let (path, sensitive_path) = MultiChainSequence::get_paths(config, sig, target, dry_run)?; + + Ok(MultiChainSequence { deployments, path, sensitive_path, timestamp: now().as_secs() }) + } + + /// Gets paths in the formats + /// ./broadcast/multi/contract_filename[-timestamp]/sig.json and + /// ./cache/multi/contract_filename[-timestamp]/sig.json + pub fn get_paths( + config: &Config, + sig: &str, + target: &ArtifactId, + dry_run: bool, + ) -> Result<(PathBuf, PathBuf)> { + let mut broadcast = config.broadcast.to_path_buf(); + let mut cache = config.cache_path.to_path_buf(); + let mut common = PathBuf::new(); + + common.push("multi"); + + if dry_run { + common.push(DRY_RUN_DIR); + } + + let target_fname = target + .source + .file_name() + .wrap_err_with(|| format!("No filename for {:?}", target.source))? + .to_string_lossy(); + + common.push(format!("{target_fname}-latest")); + + broadcast.push(common.clone()); + cache.push(common); + + fs::create_dir_all(&broadcast)?; + fs::create_dir_all(&cache)?; + + let filename = format!("{}.json", sig_to_file_name(sig)); + + broadcast.push(filename.clone()); + cache.push(filename); + + Ok((broadcast, cache)) + } + + /// Loads the sequences for the multi chain deployment. + pub fn load(config: &Config, sig: &str, target: &ArtifactId, dry_run: bool) -> Result { + let (path, sensitive_path) = MultiChainSequence::get_paths(config, sig, target, dry_run)?; + let mut sequence: MultiChainSequence = foundry_compilers::utils::read_json_file(&path) + .wrap_err("Multi-chain deployment not found.")?; + let sensitive_sequence: SensitiveMultiChainSequence = + foundry_compilers::utils::read_json_file(&sensitive_path) + .wrap_err("Multi-chain deployment sensitive details not found.")?; + + sequence.deployments.iter_mut().enumerate().for_each(|(i, sequence)| { + sequence.fill_sensitive(&sensitive_sequence.deployments[i]); + }); + + sequence.path = path; + sequence.sensitive_path = sensitive_path; + + Ok(sequence) + } + + /// Saves the transactions as file if it's a standalone deployment. + pub fn save(&mut self, silent: bool, save_ts: bool) -> Result<()> { + self.deployments.iter_mut().for_each(|sequence| sequence.sort_receipts()); + + self.timestamp = now().as_secs(); + + let sensitive_sequence = SensitiveMultiChainSequence::from_multi_sequence(self.clone()); + + // broadcast writes + //../Contract-latest/run.json + let mut writer = BufWriter::new(fs::create_file(&self.path)?); + serde_json::to_writer_pretty(&mut writer, &self)?; + writer.flush()?; + + if save_ts { + //../Contract-[timestamp]/run.json + let path = self.path.to_string_lossy(); + let file = PathBuf::from(&path.replace("-latest", &format!("-{}", self.timestamp))); + fs::create_dir_all(file.parent().unwrap())?; + fs::copy(&self.path, &file)?; + } + + // cache writes + //../Contract-latest/run.json + let mut writer = BufWriter::new(fs::create_file(&self.sensitive_path)?); + serde_json::to_writer_pretty(&mut writer, &sensitive_sequence)?; + writer.flush()?; + + if save_ts { + //../Contract-[timestamp]/run.json + let path = self.sensitive_path.to_string_lossy(); + let file = PathBuf::from(&path.replace("-latest", &format!("-{}", self.timestamp))); + fs::create_dir_all(file.parent().unwrap())?; + fs::copy(&self.sensitive_path, &file)?; + } + + if !silent { + println!("\nTransactions saved to: {}\n", self.path.display()); + println!("Sensitive details saved to: {}\n", self.sensitive_path.display()); + } + + Ok(()) + } +} diff --git a/crates/forge/bin/cmd/script/providers.rs b/crates/script/src/providers.rs similarity index 100% rename from crates/forge/bin/cmd/script/providers.rs rename to crates/script/src/providers.rs diff --git a/crates/forge/bin/cmd/script/receipts.rs b/crates/script/src/receipts.rs similarity index 100% rename from crates/forge/bin/cmd/script/receipts.rs rename to crates/script/src/receipts.rs diff --git a/crates/script/src/resume.rs b/crates/script/src/resume.rs new file mode 100644 index 000000000000..4f704ed601fa --- /dev/null +++ b/crates/script/src/resume.rs @@ -0,0 +1,106 @@ +use crate::{broadcast::BundledState, simulate::PreSimulationState}; + +use super::{ + multi_sequence::MultiChainSequence, + sequence::{ScriptSequence, ScriptSequenceKind}, +}; +use ethers_providers::Middleware; +use eyre::Result; +use foundry_common::provider::ethers::try_get_http_provider; +use foundry_compilers::artifacts::Libraries; +use std::sync::Arc; + +impl PreSimulationState { + /// Tries loading the resumed state from the cache files, skipping simulation stage. + pub async fn resume(mut self) -> Result { + if self.execution_artifacts.rpc_data.missing_rpc { + eyre::bail!("Missing `--fork-url` field.") + } + + let chain = match self.execution_artifacts.rpc_data.total_rpcs.len() { + 2.. => None, + 1 => { + let fork_url = self.execution_artifacts.rpc_data.total_rpcs.iter().next().unwrap(); + + let provider = Arc::new(try_get_http_provider(fork_url)?); + Some(provider.get_chainid().await?.as_u64()) + } + 0 => eyre::bail!("No RPC URLs"), + }; + + let sequence = match self.try_load_sequence(chain, false) { + Ok(sequence) => sequence, + Err(_) => { + // If the script was simulated, but there was no attempt to broadcast yet, + // try to read the script sequence from the `dry-run/` folder + let mut sequence = self.try_load_sequence(chain, true)?; + + // If sequence was in /dry-run, Update its paths so it is not saved into /dry-run + // this time as we are about to broadcast it. + sequence.update_paths_to_broadcasted( + &self.script_config.config, + &self.args.sig, + &self.build_data.build_data.target, + )?; + + sequence.save(true, true)?; + sequence + } + }; + + match sequence { + ScriptSequenceKind::Single(ref seq) => { + // We might have predeployed libraries from the broadcasting, so we need to + // relink the contracts with them, since their mapping is not included in the solc + // cache files. + self.build_data = self + .build_data + .build_data + .link_with_libraries(Libraries::parse(&seq.libraries)?)?; + } + // Library linking is not supported for multi-chain sequences + ScriptSequenceKind::Multi(_) => {} + } + + let Self { + args, + script_config, + script_wallets, + build_data, + execution_data, + execution_result: _, + execution_artifacts, + } = self; + + Ok(BundledState { + args, + script_config, + script_wallets, + build_data, + execution_data, + execution_artifacts, + sequence, + }) + } + + fn try_load_sequence(&self, chain: Option, dry_run: bool) -> Result { + if let Some(chain) = chain { + let sequence = ScriptSequence::load( + &self.script_config.config, + &self.args.sig, + &self.build_data.build_data.target, + chain, + dry_run, + )?; + Ok(ScriptSequenceKind::Single(sequence)) + } else { + let sequence = MultiChainSequence::load( + &self.script_config.config, + &self.args.sig, + &self.build_data.build_data.target, + dry_run, + )?; + Ok(ScriptSequenceKind::Multi(sequence)) + } + } +} diff --git a/crates/forge/bin/cmd/script/runner.rs b/crates/script/src/runner.rs similarity index 98% rename from crates/forge/bin/cmd/script/runner.rs rename to crates/script/src/runner.rs index 96937bfdbc97..59f6402d368a 100644 --- a/crates/forge/bin/cmd/script/runner.rs +++ b/crates/script/src/runner.rs @@ -1,21 +1,15 @@ use super::ScriptResult; use alloy_primitives::{Address, Bytes, U256}; use eyre::Result; -use forge::{ +use foundry_config::Config; +use foundry_evm::{ constants::CALLER, executors::{CallResult, DeployResult, EvmError, ExecutionErr, Executor, RawCallResult}, revm::interpreter::{return_ok, InstructionResult}, traces::{TraceKind, Traces}, }; -use foundry_config::Config; use yansi::Paint; -/// Represents which simulation stage is the script execution at. -pub enum SimulationStage { - Local, - OnChain, -} - /// Drives script execution #[derive(Debug)] pub struct ScriptRunner { diff --git a/crates/forge/bin/cmd/script/sequence.rs b/crates/script/src/sequence.rs similarity index 72% rename from crates/forge/bin/cmd/script/sequence.rs rename to crates/script/src/sequence.rs index 0e97862bcf10..6a78d1ac4f58 100644 --- a/crates/forge/bin/cmd/script/sequence.rs +++ b/crates/script/src/sequence.rs @@ -1,22 +1,19 @@ -use super::NestedValue; -use crate::cmd::{ - init::get_commit_hash, - script::{ - transaction::{wrapper, AdditionalContract, TransactionWithMetadata}, - verify::VerifyBundle, - }, +use super::{multi_sequence::MultiChainSequence, NestedValue}; +use crate::{ + transaction::{wrapper, AdditionalContract, TransactionWithMetadata}, + verify::VerifyBundle, }; use alloy_primitives::{Address, TxHash}; use ethers_core::types::{transaction::eip2718::TypedTransaction, TransactionReceipt}; use eyre::{ContextCompat, Result, WrapErr}; use forge_verify::provider::VerificationProviderType; -use foundry_cli::utils::now; +use foundry_cli::utils::{now, Git}; use foundry_common::{ fs, shell, types::{ToAlloy, ToEthers}, SELECTOR_LEN, }; -use foundry_compilers::{artifacts::Libraries, ArtifactId}; +use foundry_compilers::ArtifactId; use foundry_config::Config; use serde::{Deserialize, Serialize}; use std::{ @@ -26,6 +23,67 @@ use std::{ }; use yansi::Paint; +/// Returns the commit hash of the project if it exists +pub fn get_commit_hash(root: &Path) -> Option { + Git::new(root).commit_hash(true, "HEAD").ok() +} + +pub enum ScriptSequenceKind { + Single(ScriptSequence), + Multi(MultiChainSequence), +} + +impl ScriptSequenceKind { + pub fn save(&mut self, silent: bool, save_ts: bool) -> Result<()> { + match self { + ScriptSequenceKind::Single(sequence) => sequence.save(silent, save_ts), + ScriptSequenceKind::Multi(sequence) => sequence.save(silent, save_ts), + } + } + + pub fn sequences(&self) -> &[ScriptSequence] { + match self { + ScriptSequenceKind::Single(sequence) => std::slice::from_ref(sequence), + ScriptSequenceKind::Multi(sequence) => &sequence.deployments, + } + } + + pub fn sequences_mut(&mut self) -> &mut [ScriptSequence] { + match self { + ScriptSequenceKind::Single(sequence) => std::slice::from_mut(sequence), + ScriptSequenceKind::Multi(sequence) => &mut sequence.deployments, + } + } + /// Updates underlying sequence paths to not be under /dry-run directory. + pub fn update_paths_to_broadcasted( + &mut self, + config: &Config, + sig: &str, + target: &ArtifactId, + ) -> Result<()> { + match self { + ScriptSequenceKind::Single(sequence) => { + sequence.paths = + Some(ScriptSequence::get_paths(config, sig, target, sequence.chain, false)?); + } + ScriptSequenceKind::Multi(sequence) => { + (sequence.path, sequence.sensitive_path) = + MultiChainSequence::get_paths(config, sig, target, false)?; + } + }; + + Ok(()) + } +} + +impl Drop for ScriptSequenceKind { + fn drop(&mut self) { + if let Err(err) = self.save(false, true) { + error!(?err, "could not save deployment sequence"); + } + } +} + pub const DRY_RUN_DIR: &str = "dry-run"; /// Helper that saves the transactions sequence and its state on which transactions have been @@ -38,21 +96,19 @@ pub struct ScriptSequence { pub libraries: Vec, pub pending: Vec, #[serde(skip)] - pub path: PathBuf, - #[serde(skip)] - pub sensitive_path: PathBuf, + /// Contains paths to the sequence files + /// None if sequence should not be saved to disk (e.g. part of a multi-chain sequence) + pub paths: Option<(PathBuf, PathBuf)>, pub returns: HashMap, pub timestamp: u64, pub chain: u64, - /// If `True`, the sequence belongs to a `MultiChainSequence` and won't save to disk as usual. - pub multi: bool, pub commit: Option, } /// Sensitive values from the transactions in a script sequence #[derive(Clone, Default, Serialize, Deserialize)] pub struct SensitiveTransactionMetadata { - pub rpc: Option, + pub rpc: String, } /// Sensitive info from the script sequence which is saved into the cache folder @@ -61,8 +117,8 @@ pub struct SensitiveScriptSequence { pub transactions: VecDeque, } -impl From<&mut ScriptSequence> for SensitiveScriptSequence { - fn from(sequence: &mut ScriptSequence) -> Self { +impl From for SensitiveScriptSequence { + fn from(sequence: ScriptSequence) -> Self { SensitiveScriptSequence { transactions: sequence .transactions @@ -74,59 +130,16 @@ impl From<&mut ScriptSequence> for SensitiveScriptSequence { } impl ScriptSequence { - pub fn new( - transactions: VecDeque, - returns: HashMap, - sig: &str, - target: &ArtifactId, - config: &Config, - broadcasted: bool, - is_multi: bool, - ) -> Result { - let chain = config.chain.unwrap_or_default().id(); - - let (path, sensitive_path) = ScriptSequence::get_paths( - &config.broadcast, - &config.cache_path, - sig, - target, - chain, - broadcasted && !is_multi, - )?; - - let commit = get_commit_hash(&config.__root.0); - - Ok(ScriptSequence { - transactions, - returns, - receipts: vec![], - pending: vec![], - path, - sensitive_path, - timestamp: now().as_secs(), - libraries: vec![], - chain, - multi: is_multi, - commit, - }) - } - /// Loads The sequence for the corresponding json file pub fn load( config: &Config, sig: &str, target: &ArtifactId, chain_id: u64, - broadcasted: bool, + dry_run: bool, ) -> Result { - let (path, sensitive_path) = ScriptSequence::get_paths( - &config.broadcast, - &config.cache_path, - sig, - target, - chain_id, - broadcasted, - )?; + let (path, sensitive_path) = + ScriptSequence::get_paths(config, sig, target, chain_id, dry_run)?; let mut script_sequence: Self = foundry_compilers::utils::read_json_file(&path) .wrap_err(format!("Deployment not found for chain `{chain_id}`."))?; @@ -138,41 +151,52 @@ impl ScriptSequence { script_sequence.fill_sensitive(&sensitive_script_sequence); - script_sequence.path = path; - script_sequence.sensitive_path = sensitive_path; + script_sequence.paths = Some((path, sensitive_path)); Ok(script_sequence) } /// Saves the transactions as file if it's a standalone deployment. - pub fn save(&mut self) -> Result<()> { - if self.multi || self.transactions.is_empty() { + /// `save_ts` should be set to true for checkpoint updates, which might happen many times and + /// could result in us saving many identical files. + pub fn save(&mut self, silent: bool, save_ts: bool) -> Result<()> { + self.sort_receipts(); + + if self.transactions.is_empty() { return Ok(()) } + let Some((path, sensitive_path)) = self.paths.clone() else { return Ok(()) }; + self.timestamp = now().as_secs(); let ts_name = format!("run-{}.json", self.timestamp); - let sensitive_script_sequence: SensitiveScriptSequence = self.into(); + let sensitive_script_sequence: SensitiveScriptSequence = self.clone().into(); // broadcast folder writes //../run-latest.json - let mut writer = BufWriter::new(fs::create_file(&self.path)?); + let mut writer = BufWriter::new(fs::create_file(&path)?); serde_json::to_writer_pretty(&mut writer, &self)?; writer.flush()?; - //../run-[timestamp].json - fs::copy(&self.path, self.path.with_file_name(&ts_name))?; + if save_ts { + //../run-[timestamp].json + fs::copy(&path, path.with_file_name(&ts_name))?; + } // cache folder writes //../run-latest.json - let mut writer = BufWriter::new(fs::create_file(&self.sensitive_path)?); + let mut writer = BufWriter::new(fs::create_file(&sensitive_path)?); serde_json::to_writer_pretty(&mut writer, &sensitive_script_sequence)?; writer.flush()?; - //../run-[timestamp].json - fs::copy(&self.sensitive_path, self.sensitive_path.with_file_name(&ts_name))?; + if save_ts { + //../run-[timestamp].json + fs::copy(&sensitive_path, sensitive_path.with_file_name(&ts_name))?; + } - shell::println(format!("\nTransactions saved to: {}\n", self.path.display()))?; - shell::println(format!("Sensitive values saved to: {}\n", self.sensitive_path.display()))?; + if !silent { + shell::println(format!("\nTransactions saved to: {}\n", path.display()))?; + shell::println(format!("Sensitive values saved to: {}\n", sensitive_path.display()))?; + } Ok(()) } @@ -197,36 +221,24 @@ impl ScriptSequence { self.pending.retain(|element| element != &tx_hash); } - pub fn add_libraries(&mut self, libraries: Libraries) { - self.libraries = libraries - .libs - .iter() - .flat_map(|(file, libs)| { - libs.iter() - .map(|(name, address)| format!("{}:{name}:{address}", file.to_string_lossy())) - }) - .collect(); - } - /// Gets paths in the formats /// ./broadcast/[contract_filename]/[chain_id]/[sig]-[timestamp].json and /// ./cache/[contract_filename]/[chain_id]/[sig]-[timestamp].json pub fn get_paths( - broadcast: &Path, - cache: &Path, + config: &Config, sig: &str, target: &ArtifactId, chain_id: u64, - broadcasted: bool, + dry_run: bool, ) -> Result<(PathBuf, PathBuf)> { - let mut broadcast = broadcast.to_path_buf(); - let mut cache = cache.to_path_buf(); + let mut broadcast = config.broadcast.to_path_buf(); + let mut cache = config.cache_path.to_path_buf(); let mut common = PathBuf::new(); let target_fname = target.source.file_name().wrap_err("No filename.")?; common.push(target_fname); common.push(chain_id.to_string()); - if !broadcasted { + if dry_run { common.push(DRY_RUN_DIR); } @@ -245,20 +257,6 @@ impl ScriptSequence { Ok((broadcast, cache)) } - /// Checks that there is an Etherscan key for the chain id of this sequence. - pub fn verify_preflight_check(&self, config: &Config, verify: &VerifyBundle) -> Result<()> { - if config.get_etherscan_api_key(Some(self.chain.into())).is_none() && - verify.verifier.verifier == VerificationProviderType::Etherscan - { - eyre::bail!( - "Etherscan API key wasn't found for chain id {}. On-chain execution aborted", - self.chain - ) - } - - Ok(()) - } - /// Given the broadcast log, it matches transactions with receipts, and tries to verify any /// created contract on etherscan. pub async fn verify_contracts( @@ -354,8 +352,8 @@ impl ScriptSequence { } /// Returns the first RPC URL of this sequence. - pub fn rpc_url(&self) -> Option<&str> { - self.transactions.front().and_then(|tx| tx.rpc.as_deref()) + pub fn rpc_url(&self) -> &str { + self.transactions.front().expect("empty sequence").rpc.as_str() } /// Returns the list of the transactions without the metadata. @@ -371,13 +369,6 @@ impl ScriptSequence { } } -impl Drop for ScriptSequence { - fn drop(&mut self) { - self.sort_receipts(); - self.save().expect("not able to save deployment sequence"); - } -} - /// Converts the `sig` argument into the corresponding file path. /// /// This accepts either the signature of the function or the raw calldata diff --git a/crates/script/src/simulate.rs b/crates/script/src/simulate.rs new file mode 100644 index 000000000000..99bd39ad9515 --- /dev/null +++ b/crates/script/src/simulate.rs @@ -0,0 +1,455 @@ +use super::{ + artifacts::ArtifactInfo, + multi_sequence::MultiChainSequence, + providers::ProvidersManager, + runner::ScriptRunner, + sequence::{ScriptSequence, ScriptSequenceKind}, + transaction::TransactionWithMetadata, +}; +use crate::{ + broadcast::{estimate_gas, BundledState}, + build::LinkedBuildData, + execute::{ExecutionArtifacts, ExecutionData}, + sequence::get_commit_hash, + ScriptArgs, ScriptConfig, ScriptResult, +}; +use alloy_primitives::{utils::format_units, Address, U256}; +use eyre::{Context, Result}; +use foundry_cheatcodes::{BroadcastableTransactions, ScriptWallets}; +use foundry_cli::utils::{has_different_gas_calc, now}; +use foundry_common::{ + get_contract_name, provider::ethers::RpcUrl, shell, types::ToAlloy, ContractsByArtifact, +}; +use foundry_evm::traces::render_trace_arena; +use futures::future::join_all; +use parking_lot::RwLock; +use std::{ + collections::{BTreeMap, HashMap, VecDeque}, + sync::Arc, +}; + +/// Same as [ExecutedState], but also contains [ExecutionArtifacts] which are obtained from +/// [ScriptResult]. +/// +/// Can be either converted directly to [BundledState] via [PreSimulationState::resume] or driven to +/// it through [FilledTransactionsState]. +pub struct PreSimulationState { + pub args: ScriptArgs, + pub script_config: ScriptConfig, + pub script_wallets: ScriptWallets, + pub build_data: LinkedBuildData, + pub execution_data: ExecutionData, + pub execution_result: ScriptResult, + pub execution_artifacts: ExecutionArtifacts, +} + +impl PreSimulationState { + /// If simulation is enabled, simulates transactions against fork and fills gas estimation and + /// metadata. Otherwise, metadata (e.g. additional contracts, created contract names) is + /// left empty. + /// + /// Both modes will panic if any of the transactions have None for the `rpc` field. + pub async fn fill_metadata(self) -> Result { + let transactions = if let Some(txs) = self.execution_result.transactions.as_ref() { + if self.args.skip_simulation { + shell::println("\nSKIPPING ON CHAIN SIMULATION.")?; + self.no_simulation(txs.clone())? + } else { + self.onchain_simulation(txs.clone()).await? + } + } else { + VecDeque::new() + }; + + Ok(FilledTransactionsState { + args: self.args, + script_config: self.script_config, + script_wallets: self.script_wallets, + build_data: self.build_data, + execution_data: self.execution_data, + execution_artifacts: self.execution_artifacts, + transactions, + }) + } + + /// Builds separate runners and environments for each RPC used in script and executes all + /// transactions in those environments. + /// + /// Collects gas usage and metadata for each transaction. + pub async fn onchain_simulation( + &self, + transactions: BroadcastableTransactions, + ) -> Result> { + trace!(target: "script", "executing onchain simulation"); + + let runners = Arc::new( + self.build_runners() + .await? + .into_iter() + .map(|(rpc, runner)| (rpc, Arc::new(RwLock::new(runner)))) + .collect::>(), + ); + + let contracts = self.build_data.get_flattened_contracts(false); + let address_to_abi: BTreeMap = + self.build_address_to_abi_map(&contracts); + + let mut final_txs = VecDeque::new(); + + // Executes all transactions from the different forks concurrently. + let futs = transactions + .into_iter() + .map(|transaction| async { + let rpc = transaction.rpc.expect("missing broadcastable tx rpc url"); + let mut runner = runners.get(&rpc).expect("invalid rpc url").write(); + + let mut tx = transaction.transaction; + let result = runner + .simulate( + tx.from + .expect("transaction doesn't have a `from` address at execution time"), + tx.to, + tx.input.clone().into_input(), + tx.value, + ) + .wrap_err("Internal EVM error during simulation")?; + + if !result.success { + return Ok((None, result.traces)); + } + + let created_contracts = result.get_created_contracts(); + + // Simulate mining the transaction if the user passes `--slow`. + if self.args.slow { + runner.executor.env.block.number += U256::from(1); + } + + let is_fixed_gas_limit = tx.gas.is_some(); + match tx.gas { + // If tx.gas is already set that means it was specified in script + Some(gas) => { + println!("Gas limit was set in script to {gas}"); + } + // We inflate the gas used by the user specified percentage + None => { + let gas = + U256::from(result.gas_used * self.args.gas_estimate_multiplier / 100); + tx.gas = Some(gas); + } + } + let tx = TransactionWithMetadata::new( + tx, + rpc, + &result, + &address_to_abi, + &self.execution_artifacts.decoder, + created_contracts, + is_fixed_gas_limit, + )?; + + eyre::Ok((Some(tx), result.traces)) + }) + .collect::>(); + + if self.script_config.evm_opts.verbosity > 3 { + println!("=========================="); + println!("Simulated On-chain Traces:\n"); + } + + let mut abort = false; + for res in join_all(futs).await { + let (tx, traces) = res?; + + // Transaction will be `None`, if execution didn't pass. + if tx.is_none() || self.script_config.evm_opts.verbosity > 3 { + for (_, trace) in &traces { + println!( + "{}", + render_trace_arena(trace, &self.execution_artifacts.decoder).await? + ); + } + } + + if let Some(tx) = tx { + final_txs.push_back(tx); + } else { + abort = true; + } + } + + if abort { + eyre::bail!("Simulated execution failed.") + } + + Ok(final_txs) + } + + /// Build mapping from contract address to its ABI, code and contract name. + fn build_address_to_abi_map<'a>( + &self, + contracts: &'a ContractsByArtifact, + ) -> BTreeMap> { + self.execution_artifacts + .decoder + .contracts + .iter() + .filter_map(move |(addr, contract_id)| { + let contract_name = get_contract_name(contract_id); + if let Ok(Some((_, (abi, code)))) = + contracts.find_by_name_or_identifier(contract_name) + { + let info = ArtifactInfo { + contract_name: contract_name.to_string(), + contract_id: contract_id.to_string(), + abi, + code, + }; + return Some((*addr, info)); + } + None + }) + .collect() + } + + /// Build [ScriptRunner] forking given RPC for each RPC used in the script. + async fn build_runners(&self) -> Result> { + let rpcs = self.execution_artifacts.rpc_data.total_rpcs.clone(); + if !shell::verbosity().is_silent() { + let n = rpcs.len(); + let s = if n != 1 { "s" } else { "" }; + println!("\n## Setting up {n} EVM{s}."); + } + + let futs = rpcs + .into_iter() + .map(|rpc| async move { + let mut script_config = self.script_config.clone(); + script_config.evm_opts.fork_url = Some(rpc.clone()); + let runner = script_config.get_runner().await?; + Ok((rpc.clone(), runner)) + }) + .collect::>(); + + join_all(futs).await.into_iter().collect() + } + + /// If simulation is disabled, converts transactions into [TransactionWithMetadata] type + /// skipping metadata filling. + fn no_simulation( + &self, + transactions: BroadcastableTransactions, + ) -> Result> { + Ok(transactions + .into_iter() + .map(|btx| { + let mut tx = TransactionWithMetadata::from_tx_request(btx.transaction); + tx.rpc = btx.rpc.expect("missing broadcastable tx rpc url"); + tx + }) + .collect()) + } +} + +/// At this point we have converted transactions collected during script execution to +/// [TransactionWithMetadata] objects which contain additional metadata needed for broadcasting and +/// verification. +pub struct FilledTransactionsState { + pub args: ScriptArgs, + pub script_config: ScriptConfig, + pub script_wallets: ScriptWallets, + pub build_data: LinkedBuildData, + pub execution_data: ExecutionData, + pub execution_artifacts: ExecutionArtifacts, + pub transactions: VecDeque, +} + +impl FilledTransactionsState { + /// Bundles all transactions of the [`TransactionWithMetadata`] type in a list of + /// [`ScriptSequence`]. List length will be higher than 1, if we're dealing with a multi + /// chain deployment. + /// + /// Each transaction will be added with the correct transaction type and gas estimation. + pub async fn bundle(self) -> Result { + let is_multi_deployment = self.execution_artifacts.rpc_data.total_rpcs.len() > 1; + + if is_multi_deployment && !self.build_data.libraries.is_empty() { + eyre::bail!("Multi-chain deployment is not supported with libraries."); + } + + let mut total_gas_per_rpc: HashMap = HashMap::new(); + + // Batches sequence of transactions from different rpcs. + let mut new_sequence = VecDeque::new(); + let mut manager = ProvidersManager::default(); + let mut sequences = vec![]; + + // Peeking is used to check if the next rpc url is different. If so, it creates a + // [`ScriptSequence`] from all the collected transactions up to this point. + let mut txes_iter = self.transactions.clone().into_iter().peekable(); + + while let Some(mut tx) = txes_iter.next() { + let tx_rpc = tx.rpc.clone(); + let provider_info = manager.get_or_init_provider(&tx.rpc, self.args.legacy).await?; + + // Handles chain specific requirements. + tx.change_type(provider_info.is_legacy); + tx.transaction.set_chain_id(provider_info.chain); + + if !self.args.skip_simulation { + let typed_tx = tx.typed_tx_mut(); + + if has_different_gas_calc(provider_info.chain) { + trace!("estimating with different gas calculation"); + let gas = *typed_tx.gas().expect("gas is set by simulation."); + + // We are trying to show the user an estimation of the total gas usage. + // + // However, some transactions might depend on previous ones. For + // example, tx1 might deploy a contract that tx2 uses. That + // will result in the following `estimate_gas` call to fail, + // since tx1 hasn't been broadcasted yet. + // + // Not exiting here will not be a problem when actually broadcasting, because + // for chains where `has_different_gas_calc` returns true, + // we await each transaction before broadcasting the next + // one. + if let Err(err) = estimate_gas( + typed_tx, + &provider_info.provider, + self.args.gas_estimate_multiplier, + ) + .await + { + trace!("gas estimation failed: {err}"); + + // Restore gas value, since `estimate_gas` will remove it. + typed_tx.set_gas(gas); + } + } + + let total_gas = total_gas_per_rpc.entry(tx_rpc.clone()).or_insert(U256::ZERO); + *total_gas += (*typed_tx.gas().expect("gas is set")).to_alloy(); + } + + new_sequence.push_back(tx); + // We only create a [`ScriptSequence`] object when we collect all the rpc related + // transactions. + if let Some(next_tx) = txes_iter.peek() { + if next_tx.rpc == tx_rpc { + continue; + } + } + + let sequence = + self.create_sequence(is_multi_deployment, provider_info.chain, new_sequence)?; + + sequences.push(sequence); + + new_sequence = VecDeque::new(); + } + + if !self.args.skip_simulation { + // Present gas information on a per RPC basis. + for (rpc, total_gas) in total_gas_per_rpc { + let provider_info = manager.get(&rpc).expect("provider is set."); + + // We don't store it in the transactions, since we want the most updated value. + // Right before broadcasting. + let per_gas = if let Some(gas_price) = self.args.with_gas_price { + gas_price + } else { + provider_info.gas_price()? + }; + + shell::println("\n==========================")?; + shell::println(format!("\nChain {}", provider_info.chain))?; + + shell::println(format!( + "\nEstimated gas price: {} gwei", + format_units(per_gas, 9) + .unwrap_or_else(|_| "[Could not calculate]".to_string()) + .trim_end_matches('0') + .trim_end_matches('.') + ))?; + shell::println(format!("\nEstimated total gas used for script: {total_gas}"))?; + shell::println(format!( + "\nEstimated amount required: {} ETH", + format_units(total_gas.saturating_mul(per_gas), 18) + .unwrap_or_else(|_| "[Could not calculate]".to_string()) + .trim_end_matches('0') + ))?; + shell::println("\n==========================")?; + } + } + + let sequence = if sequences.len() == 1 { + ScriptSequenceKind::Single(sequences.pop().expect("empty sequences")) + } else { + ScriptSequenceKind::Multi(MultiChainSequence::new( + sequences, + &self.args.sig, + &self.build_data.build_data.target, + &self.script_config.config, + !self.args.broadcast, + )?) + }; + + Ok(BundledState { + args: self.args, + script_config: self.script_config, + script_wallets: self.script_wallets, + build_data: self.build_data, + execution_data: self.execution_data, + execution_artifacts: self.execution_artifacts, + sequence, + }) + } + + /// Creates a [ScriptSequence] object from the given transactions. + fn create_sequence( + &self, + multi: bool, + chain: u64, + transactions: VecDeque, + ) -> Result { + // Paths are set to None for multi-chain sequences parts, because they don't need to be + // saved to a separate file. + let paths = if multi { + None + } else { + Some(ScriptSequence::get_paths( + &self.script_config.config, + &self.args.sig, + &self.build_data.build_data.target, + chain, + !self.args.broadcast, + )?) + }; + + let commit = get_commit_hash(&self.script_config.config.__root.0); + + let libraries = self + .build_data + .libraries + .libs + .iter() + .flat_map(|(file, libs)| { + libs.iter() + .map(|(name, address)| format!("{}:{name}:{address}", file.to_string_lossy())) + }) + .collect(); + + Ok(ScriptSequence { + transactions, + returns: self.execution_artifacts.returns.clone(), + receipts: vec![], + pending: vec![], + paths, + timestamp: now().as_secs(), + libraries, + chain, + commit, + }) + } +} diff --git a/crates/forge/bin/cmd/script/transaction.rs b/crates/script/src/transaction.rs similarity index 98% rename from crates/forge/bin/cmd/script/transaction.rs rename to crates/script/src/transaction.rs index fec73b22f001..3f92e2f31be2 100644 --- a/crates/forge/bin/cmd/script/transaction.rs +++ b/crates/script/src/transaction.rs @@ -44,7 +44,7 @@ pub struct TransactionWithMetadata { #[serde(default = "default_vec_of_strings")] pub arguments: Option>, #[serde(skip)] - pub rpc: Option, + pub rpc: RpcUrl, pub transaction: TypedTransaction, pub additional_contracts: Vec, pub is_fixed_gas_limit: bool, @@ -80,7 +80,7 @@ impl TransactionWithMetadata { pub fn new( transaction: TransactionRequest, - rpc: Option, + rpc: RpcUrl, result: &ScriptResult, local_contracts: &BTreeMap, decoder: &CallTraceDecoder, @@ -195,6 +195,7 @@ impl TransactionWithMetadata { decoder: &CallTraceDecoder, ) -> Result<()> { self.opcode = CallKind::Call; + self.contract_address = Some(target); let Some(data) = self.transaction.data() else { return Ok(()) }; if data.len() < SELECTOR_LEN { @@ -211,10 +212,6 @@ impl TransactionWithMetadata { decoder.functions.get(selector).and_then(|v| v.first()) }; if let Some(function) = function { - if self.contract_address.is_none() { - self.contract_name = decoder.contracts.get(&target).cloned(); - } - self.function = Some(function.signature()); let values = function.abi_decode_input(data, false).map_err(|e| { @@ -229,15 +226,9 @@ impl TransactionWithMetadata { self.arguments = Some(values.iter().map(format_token_raw).collect()); } - self.contract_address = Some(target); - Ok(()) } - pub fn set_tx(&mut self, tx: TypedTransaction) { - self.transaction = tx; - } - pub fn change_type(&mut self, is_legacy: bool) { self.transaction = if is_legacy { TypedTransaction::Legacy(self.transaction.clone().into()) diff --git a/crates/forge/bin/cmd/script/verify.rs b/crates/script/src/verify.rs similarity index 80% rename from crates/forge/bin/cmd/script/verify.rs rename to crates/script/src/verify.rs index 43293268d5c1..be5825dfc7e8 100644 --- a/crates/forge/bin/cmd/script/verify.rs +++ b/crates/script/src/verify.rs @@ -1,4 +1,12 @@ +use crate::{ + build::LinkedBuildData, + execute::{ExecutionArtifacts, ExecutionData}, + sequence::ScriptSequenceKind, + ScriptArgs, ScriptConfig, +}; + use alloy_primitives::Address; +use eyre::Result; use forge_verify::{RetryArgs, VerifierArgs, VerifyArgs}; use foundry_cli::opts::{EtherscanOpts, ProjectPathsArgs}; use foundry_common::ContractsByArtifact; @@ -6,6 +14,38 @@ use foundry_compilers::{info::ContractInfo, Project}; use foundry_config::{Chain, Config}; use semver::Version; +/// State after we have broadcasted the script. +/// It is assumed that at this point [BroadcastedState::sequence] contains receipts for all +/// broadcasted transactions. +pub struct BroadcastedState { + pub args: ScriptArgs, + pub script_config: ScriptConfig, + pub build_data: LinkedBuildData, + pub execution_data: ExecutionData, + pub execution_artifacts: ExecutionArtifacts, + pub sequence: ScriptSequenceKind, +} + +impl BroadcastedState { + pub async fn verify(self) -> Result<()> { + let Self { args, script_config, build_data, mut sequence, .. } = self; + + let verify = VerifyBundle::new( + &script_config.config.project()?, + &script_config.config, + build_data.get_flattened_contracts(false), + args.retry, + args.verifier, + ); + + for sequence in sequence.sequences_mut() { + sequence.verify_contracts(&script_config.config, verify.clone()).await?; + } + + Ok(()) + } +} + /// Data struct to help `ScriptSequence` verify contracts on `etherscan`. #[derive(Clone)] pub struct VerifyBundle {