From 1240f14b207ee5deed465c0162ff86e0e7654bf3 Mon Sep 17 00:00:00 2001 From: elfedy Date: Tue, 12 Nov 2024 18:04:41 -0300 Subject: [PATCH 1/2] refactor: remove all compilers overrides --- Cargo.toml | 38 +- crates/artifacts/artifacts/Cargo.toml | 23 - crates/artifacts/artifacts/src/lib.rs | 9 - crates/artifacts/solc/Cargo.toml | 44 - crates/artifacts/solc/src/ast/lowfidelity.rs | 219 -- crates/artifacts/solc/src/ast/macros.rs | 104 - crates/artifacts/solc/src/ast/misc.rs | 113 - crates/artifacts/solc/src/ast/mod.rs | 1136 --------- crates/artifacts/solc/src/ast/utils.rs | 26 - crates/artifacts/solc/src/ast/visitor.rs | 623 ----- crates/artifacts/solc/src/ast/yul.rs | 192 -- crates/artifacts/solc/src/bytecode.rs | 503 ---- crates/artifacts/solc/src/configurable.rs | 115 - crates/artifacts/solc/src/contract.rs | 539 ---- crates/artifacts/solc/src/error.rs | 423 ---- crates/artifacts/solc/src/hh.rs | 86 - crates/artifacts/solc/src/lib.rs | 2224 ----------------- crates/artifacts/solc/src/output_selection.rs | 659 ----- crates/artifacts/solc/src/remappings.rs | 1377 ---------- crates/artifacts/solc/src/serde_helpers.rs | 246 -- crates/artifacts/solc/src/sourcemap.rs | 628 ----- crates/artifacts/solc/src/sources.rs | 273 -- crates/artifacts/vyper/Cargo.toml | 33 - crates/artifacts/vyper/src/error.rs | 42 - crates/artifacts/vyper/src/input.rs | 63 - crates/artifacts/vyper/src/lib.rs | 16 - crates/artifacts/vyper/src/output.rs | 181 -- crates/artifacts/vyper/src/settings.rs | 113 - crates/artifacts/zksolc/Cargo.toml | 45 - crates/compilers/Cargo.toml | 116 - crates/compilers/README.md | 1 - .../src/artifact_output/configurable.rs | 841 ------- crates/compilers/src/artifact_output/hh.rs | 79 - crates/compilers/src/artifact_output/mod.rs | 1216 --------- crates/compilers/src/buildinfo.rs | 150 -- crates/compilers/src/cache.rs | 1073 -------- crates/compilers/src/compile/many.rs | 43 - crates/compilers/src/compile/mod.rs | 6 - .../compilers/src/compile/output/contracts.rs | 316 --- crates/compilers/src/compile/output/info.rs | 155 -- crates/compilers/src/compile/output/mod.rs | 932 ------- .../compilers/src/compile/output/sources.rs | 227 -- crates/compilers/src/compile/project.rs | 786 ------ crates/compilers/src/compilers/mod.rs | 302 --- crates/compilers/src/compilers/multi.rs | 394 --- .../compilers/src/compilers/solc/compiler.rs | 783 ------ crates/compilers/src/compilers/solc/mod.rs | 367 --- crates/compilers/src/compilers/vyper/error.rs | 24 - crates/compilers/src/compilers/vyper/input.rs | 55 - crates/compilers/src/compilers/vyper/mod.rs | 212 -- .../compilers/src/compilers/vyper/output.rs | 15 - .../compilers/src/compilers/vyper/parser.rs | 224 -- .../compilers/src/compilers/vyper/settings.rs | 33 - crates/compilers/src/config.rs | 1146 --------- crates/compilers/src/filter.rs | 180 -- crates/compilers/src/flatten.rs | 889 ------- crates/compilers/src/lib.rs | 1020 -------- crates/compilers/src/project_util/mock.rs | 630 ----- crates/compilers/src/project_util/mod.rs | 552 ---- crates/compilers/src/report/compiler.rs | 244 -- crates/compilers/src/report/mod.rs | 501 ---- crates/compilers/src/resolver/mod.rs | 1109 -------- crates/compilers/src/resolver/parse.rs | 329 --- crates/compilers/src/resolver/tree.rs | 173 -- .../src/zksync/artifact_output/zk.rs | 272 -- crates/compilers/src/zksync/compile/mod.rs | 2 - .../src/zksync/compile/output/contracts.rs | 239 -- .../src/zksync/compile/output/mod.rs | 589 ----- .../compilers/src/zksync/compile/project.rs | 400 --- crates/compilers/src/zksync/mod.rs | 182 -- crates/core/Cargo.toml | 43 - crates/core/src/error.rs | 130 - crates/core/src/lib.rs | 7 - crates/core/src/utils.rs | 918 ------- src/artifacts/mod.rs | 1 + .../src => src/artifacts/zksolc}/contract.rs | 10 +- .../src => src/artifacts/zksolc}/error.rs | 0 .../src/lib.rs => src/artifacts/zksolc/mod.rs | 2 +- .../artifacts/zksolc}/output_selection.rs | 0 src/compilers/compilers/mod.rs | 1 + .../compilers}/compilers/zksolc/input.rs | 3 +- .../compilers}/compilers/zksolc/mod.rs | 93 +- .../compilers}/compilers/zksolc/settings.rs | 9 +- src/compilers/mod.rs | 2 + {crates => src}/compilers/tests/mocked.rs | 0 {crates => src}/compilers/tests/project.rs | 0 {crates => src}/compilers/tests/zksync.rs | 0 .../compilers}/zksync/artifact_output/mod.rs | 0 src/compilers/zksync/artifact_output/zk.rs | 146 ++ .../zksync/artifact_output/zk/bytecode.rs | 3 +- src/compilers/zksync/mod.rs | 1 + src/lib.rs | 2 + test-data/zksync/yul-sample/SimpleStore.yul | 11 - .../zksync/yul-sample/SimpleStore.yul.json | 1 + 94 files changed, 255 insertions(+), 28028 deletions(-) delete mode 100644 crates/artifacts/artifacts/Cargo.toml delete mode 100644 crates/artifacts/artifacts/src/lib.rs delete mode 100644 crates/artifacts/solc/Cargo.toml delete mode 100644 crates/artifacts/solc/src/ast/lowfidelity.rs delete mode 100644 crates/artifacts/solc/src/ast/macros.rs delete mode 100644 crates/artifacts/solc/src/ast/misc.rs delete mode 100644 crates/artifacts/solc/src/ast/mod.rs delete mode 100644 crates/artifacts/solc/src/ast/utils.rs delete mode 100644 crates/artifacts/solc/src/ast/visitor.rs delete mode 100644 crates/artifacts/solc/src/ast/yul.rs delete mode 100644 crates/artifacts/solc/src/bytecode.rs delete mode 100644 crates/artifacts/solc/src/configurable.rs delete mode 100644 crates/artifacts/solc/src/contract.rs delete mode 100644 crates/artifacts/solc/src/error.rs delete mode 100644 crates/artifacts/solc/src/hh.rs delete mode 100644 crates/artifacts/solc/src/lib.rs delete mode 100644 crates/artifacts/solc/src/output_selection.rs delete mode 100644 crates/artifacts/solc/src/remappings.rs delete mode 100644 crates/artifacts/solc/src/serde_helpers.rs delete mode 100644 crates/artifacts/solc/src/sourcemap.rs delete mode 100644 crates/artifacts/solc/src/sources.rs delete mode 100644 crates/artifacts/vyper/Cargo.toml delete mode 100644 crates/artifacts/vyper/src/error.rs delete mode 100644 crates/artifacts/vyper/src/input.rs delete mode 100644 crates/artifacts/vyper/src/lib.rs delete mode 100644 crates/artifacts/vyper/src/output.rs delete mode 100644 crates/artifacts/vyper/src/settings.rs delete mode 100644 crates/artifacts/zksolc/Cargo.toml delete mode 100644 crates/compilers/Cargo.toml delete mode 120000 crates/compilers/README.md delete mode 100644 crates/compilers/src/artifact_output/configurable.rs delete mode 100644 crates/compilers/src/artifact_output/hh.rs delete mode 100644 crates/compilers/src/artifact_output/mod.rs delete mode 100644 crates/compilers/src/buildinfo.rs delete mode 100644 crates/compilers/src/cache.rs delete mode 100644 crates/compilers/src/compile/many.rs delete mode 100644 crates/compilers/src/compile/mod.rs delete mode 100644 crates/compilers/src/compile/output/contracts.rs delete mode 100644 crates/compilers/src/compile/output/info.rs delete mode 100644 crates/compilers/src/compile/output/mod.rs delete mode 100644 crates/compilers/src/compile/output/sources.rs delete mode 100644 crates/compilers/src/compile/project.rs delete mode 100644 crates/compilers/src/compilers/mod.rs delete mode 100644 crates/compilers/src/compilers/multi.rs delete mode 100644 crates/compilers/src/compilers/solc/compiler.rs delete mode 100644 crates/compilers/src/compilers/solc/mod.rs delete mode 100644 crates/compilers/src/compilers/vyper/error.rs delete mode 100644 crates/compilers/src/compilers/vyper/input.rs delete mode 100644 crates/compilers/src/compilers/vyper/mod.rs delete mode 100644 crates/compilers/src/compilers/vyper/output.rs delete mode 100644 crates/compilers/src/compilers/vyper/parser.rs delete mode 100644 crates/compilers/src/compilers/vyper/settings.rs delete mode 100644 crates/compilers/src/config.rs delete mode 100644 crates/compilers/src/filter.rs delete mode 100644 crates/compilers/src/flatten.rs delete mode 100644 crates/compilers/src/lib.rs delete mode 100644 crates/compilers/src/project_util/mock.rs delete mode 100644 crates/compilers/src/project_util/mod.rs delete mode 100644 crates/compilers/src/report/compiler.rs delete mode 100644 crates/compilers/src/report/mod.rs delete mode 100644 crates/compilers/src/resolver/mod.rs delete mode 100644 crates/compilers/src/resolver/parse.rs delete mode 100644 crates/compilers/src/resolver/tree.rs delete mode 100644 crates/compilers/src/zksync/artifact_output/zk.rs delete mode 100644 crates/compilers/src/zksync/compile/mod.rs delete mode 100644 crates/compilers/src/zksync/compile/output/contracts.rs delete mode 100644 crates/compilers/src/zksync/compile/output/mod.rs delete mode 100644 crates/compilers/src/zksync/compile/project.rs delete mode 100644 crates/compilers/src/zksync/mod.rs delete mode 100644 crates/core/Cargo.toml delete mode 100644 crates/core/src/error.rs delete mode 100644 crates/core/src/lib.rs delete mode 100644 crates/core/src/utils.rs create mode 100644 src/artifacts/mod.rs rename {crates/artifacts/zksolc/src => src/artifacts/zksolc}/contract.rs (94%) rename {crates/artifacts/zksolc/src => src/artifacts/zksolc}/error.rs (100%) rename crates/artifacts/zksolc/src/lib.rs => src/artifacts/zksolc/mod.rs (99%) rename {crates/artifacts/zksolc/src => src/artifacts/zksolc}/output_selection.rs (100%) create mode 100644 src/compilers/compilers/mod.rs rename {crates/compilers/src => src/compilers}/compilers/zksolc/input.rs (99%) rename {crates/compilers/src => src/compilers}/compilers/zksolc/mod.rs (91%) rename {crates/compilers/src => src/compilers}/compilers/zksolc/settings.rs (98%) create mode 100644 src/compilers/mod.rs rename {crates => src}/compilers/tests/mocked.rs (100%) rename {crates => src}/compilers/tests/project.rs (100%) rename {crates => src}/compilers/tests/zksync.rs (100%) rename {crates/compilers/src => src/compilers}/zksync/artifact_output/mod.rs (100%) create mode 100644 src/compilers/zksync/artifact_output/zk.rs rename {crates/compilers/src => src/compilers}/zksync/artifact_output/zk/bytecode.rs (90%) create mode 100644 src/compilers/zksync/mod.rs create mode 100644 src/lib.rs delete mode 100644 test-data/zksync/yul-sample/SimpleStore.yul create mode 100644 test-data/zksync/yul-sample/SimpleStore.yul.json diff --git a/Cargo.toml b/Cargo.toml index 5a1d82f4..e9c520bd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,8 +1,6 @@ -[workspace] -members = ["crates/artifacts/*", "crates/core", "crates/compilers"] -resolver = "2" - -[workspace.package] +[package] +name = "foundry-compilers-zksync" +description = "ZKSync extensions for foundry-compilers" authors = ["Foundry Maintainers"] version = "0.11.6" rust-version = "1.70" @@ -11,31 +9,29 @@ license = "MIT OR Apache-2.0" repository = "https://github.com/foundry-rs/compilers" homepage = "https://github.com/foundry-rs/compilers" documentation = "https://docs.rs/foundry-compilers" -description = "Utilities for working with EVM language compilers" keywords = ["foundry", "solidity", "solc", "ethereum", "ethers"] edition = "2021" exclude = [".github/", "scripts/", "test-data/"] -[workspace.lints.clippy] +[lints.clippy] dbg-macro = "warn" manual-string-new = "warn" uninlined-format-args = "warn" use-self = "warn" -[workspace.lints.rust] +[lints.rust] rust-2018-idioms = "deny" # unreachable-pub = "warn" unused-must-use = "deny" -[workspace.lints.rustdoc] +[lints.rustdoc] all = "warn" -[workspace.dependencies] -foundry-compilers = { path = "crates/compilers", version = "0.11.6" } -foundry-compilers-artifacts = { path = "crates/artifacts/artifacts", version = "0.11.6" } -foundry-compilers-artifacts-solc = { path = "crates/artifacts/solc", version = "0.11.6" } -foundry-compilers-artifacts-vyper = { path = "crates/artifacts/vyper", version = "0.11.6" } -foundry-compilers-core = { path = "crates/core", version = "0.11.6" } +[dependencies] +foundry-compilers = { path = "../foundry-upstream/compilers/crates/compilers", features = ["svm-solc"] } +foundry-compilers-artifacts = { path = "../foundry-upstream/compilers/crates/artifacts/artifacts", version = "0.11.6" } +foundry-compilers-artifacts-solc = { path = "../foundry-upstream/compilers/crates/artifacts/solc", version = "0.11.6" } +foundry-compilers-core = { path = "../foundry-upstream/compilers/crates/core", version = "0.11.6" } alloy-json-abi = { version = "0.8", features = ["serde_json"] } alloy-primitives = { version = "0.8", features = ["serde", "rand"] } @@ -66,5 +62,15 @@ tokio = { version = "1.35", features = ["rt-multi-thread"] } snapbox = "0.6.9" # zksync -foundry-compilers-artifacts-zksolc = { path = "crates/artifacts/zksolc", version = "0.11.1" } globset = "0.4" +dirs = "5.0.1" +itertools = "0.13.0" +fs4 = "0.8.2" +reqwest = "0.12.9" +fd-lock = "4.0.2" + +[features] + +project-util = [ + "foundry-compilers-core/project-util", +] diff --git a/crates/artifacts/artifacts/Cargo.toml b/crates/artifacts/artifacts/Cargo.toml deleted file mode 100644 index 90b07ef9..00000000 --- a/crates/artifacts/artifacts/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "foundry-compilers-artifacts" -description = "Rust bindings for compilers JSON artifacts" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -foundry-compilers-artifacts-solc.workspace = true -foundry-compilers-artifacts-vyper.workspace = true -foundry-compilers-artifacts-zksolc.workspace = true - -[features] -async = ["foundry-compilers-artifacts-solc/async"] diff --git a/crates/artifacts/artifacts/src/lib.rs b/crates/artifacts/artifacts/src/lib.rs deleted file mode 100644 index 1bdd7035..00000000 --- a/crates/artifacts/artifacts/src/lib.rs +++ /dev/null @@ -1,9 +0,0 @@ -//! Meta crate reexporting all artifacts types. - -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -pub use foundry_compilers_artifacts_solc as solc; -pub use foundry_compilers_artifacts_vyper as vyper; -pub use foundry_compilers_artifacts_zksolc as zksolc; -pub use solc::*; diff --git a/crates/artifacts/solc/Cargo.toml b/crates/artifacts/solc/Cargo.toml deleted file mode 100644 index 33a6361e..00000000 --- a/crates/artifacts/solc/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -[package] -name = "foundry-compilers-artifacts-solc" -description = "Rust bindings for Solc JSON artifacts" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -foundry-compilers-core.workspace = true - -alloy-json-abi.workspace = true -alloy-primitives.workspace = true -futures-util = { workspace = true, optional = true } -md-5.workspace = true -rayon.workspace = true -semver.workspace = true -serde_json.workspace = true -serde.workspace = true -thiserror.workspace = true -tokio = { workspace = true, optional = true } -tracing.workspace = true -walkdir.workspace = true -yansi.workspace = true -serde_repr = "0.1" - -[target.'cfg(windows)'.dependencies] -path-slash.workspace = true - -[dev-dependencies] -serde_path_to_error = "0.1" -similar-asserts.workspace = true -foundry-compilers-core = { workspace = true, features = ["test-utils"] } - -[features] -async = ["dep:tokio", "futures-util", "tokio/fs"] diff --git a/crates/artifacts/solc/src/ast/lowfidelity.rs b/crates/artifacts/solc/src/ast/lowfidelity.rs deleted file mode 100644 index 1d4fc75c..00000000 --- a/crates/artifacts/solc/src/ast/lowfidelity.rs +++ /dev/null @@ -1,219 +0,0 @@ -//! Bindings for solc's `ast` output field - -use crate::serde_helpers; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; -use std::{collections::BTreeMap, fmt, fmt::Write, str::FromStr}; - -/// Represents the AST field in the solc output -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Ast { - #[serde(rename = "absolutePath")] - pub absolute_path: String, - pub id: usize, - #[serde(default, rename = "exportedSymbols")] - pub exported_symbols: BTreeMap>, - #[serde(rename = "nodeType")] - pub node_type: NodeType, - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - #[serde(default)] - pub nodes: Vec, - - /// Node attributes that were not deserialized. - #[serde(flatten)] - pub other: BTreeMap, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Node { - /// The node ID. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub id: Option, - - /// The node type. - #[serde(rename = "nodeType")] - pub node_type: NodeType, - - /// The location of the node in the source file. - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - - /// Child nodes for some node types. - #[serde(default)] - pub nodes: Vec, - - /// Body node for some node types. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub body: Option>, - - /// Node attributes that were not deserialized. - #[serde(flatten)] - pub other: BTreeMap, -} - -impl Node { - /// Deserialize a serialized node attribute. - pub fn attribute(&self, key: &str) -> Option { - // TODO: Can we avoid this clone? - self.other.get(key).and_then(|v| serde_json::from_value(v.clone()).ok()) - } -} - -/// Represents the source location of a node: `::`. -/// -/// The `length` and `index` can be -1 which is represented as `None` -#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub struct SourceLocation { - pub start: usize, - pub length: Option, - pub index: Option, -} - -impl FromStr for SourceLocation { - type Err = String; - - fn from_str(s: &str) -> Result { - let invalid_location = move || format!("{s} invalid source location"); - - let mut split = s.split(':'); - let start = split - .next() - .ok_or_else(invalid_location)? - .parse::() - .map_err(|_| invalid_location())?; - let length = split - .next() - .ok_or_else(invalid_location)? - .parse::() - .map_err(|_| invalid_location())?; - let index = split - .next() - .ok_or_else(invalid_location)? - .parse::() - .map_err(|_| invalid_location())?; - - let length = if length < 0 { None } else { Some(length as usize) }; - let index = if index < 0 { None } else { Some(index as usize) }; - - Ok(Self { start, length, index }) - } -} - -impl fmt::Display for SourceLocation { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.start.fmt(f)?; - f.write_char(':')?; - if let Some(length) = self.length { - length.fmt(f)?; - } else { - f.write_str("-1")?; - } - f.write_char(':')?; - if let Some(index) = self.index { - index.fmt(f)?; - } else { - f.write_str("-1")?; - } - Ok(()) - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub enum NodeType { - // Expressions - Assignment, - BinaryOperation, - Conditional, - ElementaryTypeNameExpression, - FunctionCall, - FunctionCallOptions, - Identifier, - IndexAccess, - IndexRangeAccess, - Literal, - MemberAccess, - NewExpression, - TupleExpression, - UnaryOperation, - - // Statements - Block, - Break, - Continue, - DoWhileStatement, - EmitStatement, - ExpressionStatement, - ForStatement, - IfStatement, - InlineAssembly, - PlaceholderStatement, - Return, - RevertStatement, - TryStatement, - UncheckedBlock, - VariableDeclarationStatement, - VariableDeclaration, - WhileStatement, - - // Yul statements - YulAssignment, - YulBlock, - YulBreak, - YulCase, - YulContinue, - YulExpressionStatement, - YulLeave, - YulForLoop, - YulFunctionDefinition, - YulIf, - YulSwitch, - YulVariableDeclaration, - - // Yul expressions - YulFunctionCall, - YulIdentifier, - YulLiteral, - - // Yul literals - YulLiteralValue, - YulHexValue, - YulTypedName, - - // Definitions - ContractDefinition, - FunctionDefinition, - EventDefinition, - ErrorDefinition, - ModifierDefinition, - StructDefinition, - EnumDefinition, - UserDefinedValueTypeDefinition, - - // Directives - PragmaDirective, - ImportDirective, - UsingForDirective, - - // Misc - SourceUnit, - InheritanceSpecifier, - ElementaryTypeName, - FunctionTypeName, - ParameterList, - TryCatchClause, - ModifierInvocation, - - /// An unknown AST node type. - Other(String), -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_parse_ast() { - let ast = include_str!("../../../../../test-data/ast/ast-erc4626.json"); - let _ast: Ast = serde_json::from_str(ast).unwrap(); - } -} diff --git a/crates/artifacts/solc/src/ast/macros.rs b/crates/artifacts/solc/src/ast/macros.rs deleted file mode 100644 index ad3b8cf9..00000000 --- a/crates/artifacts/solc/src/ast/macros.rs +++ /dev/null @@ -1,104 +0,0 @@ -/// Macro that expands to a struct with common AST node fields. -macro_rules! ast_node { - ( - $(#[$struct_meta:meta])* - struct $name:ident { - $( - $(#[$field_meta:meta])* - $field:ident: $ty:ty - ),* $(,)? - } - ) => { - $(#[$struct_meta])* - #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] - #[serde(rename_all = "camelCase")] - pub struct $name { - pub id: usize, - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - $( - $(#[$field_meta])* - pub $field: $ty - ),* - } - }; -} - -/// A macro that expands to a struct with common expression node fields. -macro_rules! expr_node { - ( - $(#[$struct_meta:meta])* - struct $name:ident { - $( - $(#[$field_meta:meta])* - $field:ident: $ty:ty - ),* $(,)* - } - ) => { - ast_node!( - $(#[$struct_meta])* - struct $name { - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - argument_types: Vec, - #[serde(default)] - is_constant: bool, - #[serde(default)] - is_l_value: bool, - #[serde(default)] - is_pure: bool, - #[serde(default)] - l_value_requested: bool, - type_descriptions: TypeDescriptions, - $( - $(#[$field_meta])* - $field: $ty - ),* - } - ); - } -} - -/// A macro that expands to a struct with common statement node fields. -macro_rules! stmt_node { - ( - $(#[$struct_meta:meta])* - struct $name:ident { - $( - $(#[$field_meta:meta])* - $field:ident: $ty:ty - ),* $(,)* - } - ) => { - ast_node!( - $(#[$struct_meta])* - struct $name { - // TODO - documentation: Option, - $( - $(#[$field_meta])* - $field: $ty - ),* - } - ); - } -} - -/// A macro that expands to an enum where each variant also contains a struct of the same name. -/// -/// The inner value of each variant is boxed since AST types are inherently recursive. -macro_rules! node_group { - ($group:ident; $( $name:ident ),* $(,)*) => { - #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] - #[serde(tag = "nodeType")] - pub enum $group { - $( - $name(Box<$name>), - )* - } - }; -} - -pub(crate) use ast_node; -pub(crate) use expr_node; -pub(crate) use node_group; -pub(crate) use stmt_node; diff --git a/crates/artifacts/solc/src/ast/misc.rs b/crates/artifacts/solc/src/ast/misc.rs deleted file mode 100644 index 6ec3187b..00000000 --- a/crates/artifacts/solc/src/ast/misc.rs +++ /dev/null @@ -1,113 +0,0 @@ -use serde::{Deserialize, Serialize}; -use std::{fmt, fmt::Write, str::FromStr}; - -/// Represents the source location of a node: `::`. -/// -/// The `start`, `length` and `index` can be -1 which is represented as `None` -#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub struct SourceLocation { - pub start: Option, - pub length: Option, - pub index: Option, -} - -impl FromStr for SourceLocation { - type Err = String; - - fn from_str(s: &str) -> Result { - let invalid_location = move || format!("{s} invalid source location"); - - let mut split = s.split(':'); - let start = split - .next() - .ok_or_else(invalid_location)? - .parse::() - .map_err(|_| invalid_location())?; - let length = split - .next() - .ok_or_else(invalid_location)? - .parse::() - .map_err(|_| invalid_location())?; - let index = split - .next() - .ok_or_else(invalid_location)? - .parse::() - .map_err(|_| invalid_location())?; - - let start = if start < 0 { None } else { Some(start as usize) }; - let length = if length < 0 { None } else { Some(length as usize) }; - let index = if index < 0 { None } else { Some(index as usize) }; - - Ok(Self { start, length, index }) - } -} - -impl fmt::Display for SourceLocation { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(start) = self.start { - start.fmt(f)?; - } else { - f.write_str("-1")?; - } - f.write_char(':')?; - if let Some(length) = self.length { - length.fmt(f)?; - } else { - f.write_str("-1")?; - } - f.write_char(':')?; - if let Some(index) = self.index { - index.fmt(f)?; - } else { - f.write_str("-1")?; - } - Ok(()) - } -} - -/// Function mutability specifier. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum StateMutability { - Payable, - Pure, - Nonpayable, - View, -} - -/// Variable mutability specifier. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum Mutability { - Mutable, - Immutable, - Constant, -} - -/// Storage location specifier. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum StorageLocation { - Calldata, - Default, - Memory, - Storage, -} - -/// Visibility specifier. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum Visibility { - External, - Public, - Internal, - Private, -} - -/// A type description. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct TypeDescriptions { - pub type_identifier: Option, - pub type_string: Option, -} diff --git a/crates/artifacts/solc/src/ast/mod.rs b/crates/artifacts/solc/src/ast/mod.rs deleted file mode 100644 index c5ca8492..00000000 --- a/crates/artifacts/solc/src/ast/mod.rs +++ /dev/null @@ -1,1136 +0,0 @@ -//! Bindings for the Solidity and Yul ASTs. -//! -//! The Yul AST bindings are available in the [yul] module. -//! -//! To gain an overview of the AST, it might be helpful to start at the entry point of a complete -//! Solidity AST: the [SourceUnit] node. -//! -//! # Version Support -//! -//! These types should be compatible with at least Solidity 0.5.x and above, but may also support -//! 0.4.x-0.5.x in most cases. -//! -//! The legacy Solidity AST is not supported. - -mod macros; -mod misc; -pub use misc::*; -pub mod utils; -pub mod visitor; - -/// A low fidelity representation of the AST. -pub(crate) mod lowfidelity; -pub use lowfidelity::{Ast, Node, NodeType, SourceLocation as LowFidelitySourceLocation}; - -/// Types for the Yul AST. -/// -/// The Yul AST is embedded into the Solidity AST for inline assembly blocks. -pub mod yul; - -use crate::serde_helpers; -use core::fmt; -use macros::{ast_node, expr_node, node_group, stmt_node}; -use serde::{Deserialize, Serialize}; -use std::collections::BTreeMap; -use yul::YulBlock; - -ast_node!( - /// The root node of a Solidity AST. - struct SourceUnit { - #[serde(rename = "absolutePath")] - absolute_path: String, - #[serde(default, rename = "exportedSymbols")] - exported_symbols: BTreeMap>, - #[serde(default)] - license: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - nodes: Vec, - } -); - -node_group! { - SourceUnitPart; - - PragmaDirective, - ImportDirective, - UsingForDirective, - VariableDeclaration, - EnumDefinition, - ErrorDefinition, - FunctionDefinition, - StructDefinition, - UserDefinedValueTypeDefinition, - ContractDefinition, -} - -node_group! { - Expression; - - Assignment, - BinaryOperation, - Conditional, - ElementaryTypeNameExpression, - FunctionCall, - FunctionCallOptions, - Identifier, - IndexAccess, - IndexRangeAccess, - Literal, - MemberAccess, - NewExpression, - TupleExpression, - UnaryOperation, -} - -node_group! { - Statement; - - Block, - Break, - Continue, - DoWhileStatement, - EmitStatement, - ExpressionStatement, - ForStatement, - IfStatement, - InlineAssembly, - PlaceholderStatement, - Return, - RevertStatement, - TryStatement, - UncheckedBlock, - VariableDeclarationStatement, - WhileStatement, - -} - -node_group! { - ContractDefinitionPart; - - EnumDefinition, - ErrorDefinition, - EventDefinition, - FunctionDefinition, - ModifierDefinition, - StructDefinition, - UserDefinedValueTypeDefinition, - UsingForDirective, - VariableDeclaration, -} - -node_group! { - TypeName; - - ArrayTypeName, - ElementaryTypeName, - FunctionTypeName, - Mapping, - UserDefinedTypeName, -} - -// TODO: Better name -node_group! { - UserDefinedTypeNameOrIdentifierPath; - - UserDefinedTypeName, - IdentifierPath, -} - -// TODO: Better name -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum BlockOrStatement { - Statement(Statement), - Block(Block), -} - -// TODO: Better name -node_group! { - ExpressionOrVariableDeclarationStatement; - - ExpressionStatement, - VariableDeclarationStatement -} - -// TODO: Better name -node_group! { - IdentifierOrIdentifierPath; - - Identifier, - IdentifierPath -} - -ast_node!( - /// A contract definition. - struct ContractDefinition { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - #[serde(default, rename = "abstract")] - is_abstract: bool, - base_contracts: Vec, - canonical_name: Option, - contract_dependencies: Vec, - #[serde(rename = "contractKind")] - kind: ContractKind, - documentation: Option, - fully_implemented: bool, - linearized_base_contracts: Vec, - nodes: Vec, - scope: usize, - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - used_errors: Vec, - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - used_events: Vec, - #[serde(default, rename = "internalFunctionIDs")] - internal_function_ids: BTreeMap, - } -); - -/// All Solidity contract kinds. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum ContractKind { - /// A normal contract. - Contract, - /// An interface. - Interface, - /// A library. - Library, -} - -ast_node!( - /// An inheritance specifier. - struct InheritanceSpecifier { - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - arguments: Vec, - base_name: UserDefinedTypeNameOrIdentifierPath, - } -); - -expr_node!( - /// An assignment expression. - struct Assignment { - #[serde(rename = "leftHandSide")] - lhs: Expression, - operator: AssignmentOperator, - #[serde(rename = "rightHandSide")] - rhs: Expression, - } -); - -/// Assignment operators. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub enum AssignmentOperator { - /// Simple assignment (`=`) - #[serde(rename = "=")] - Assign, - /// Add and assign (`+=`) - #[serde(rename = "+=")] - AddAssign, - /// Subtract and assign (`-=`) - #[serde(rename = "-=")] - SubAssign, - /// Multiply and assign (`*=`) - #[serde(rename = "*=")] - MulAssign, - /// Divide and assign (`/=`) - #[serde(rename = "/=")] - DivAssign, - /// Modulo and assign (`%=`) - #[serde(rename = "%=")] - ModAssign, - /// Bitwise or and assign (`|=`) - #[serde(rename = "|=")] - OrAssign, - /// Bitwise and and assign (`&=`) - #[serde(rename = "&=")] - AndAssign, - /// Bitwise xor and assign (`^=`) - #[serde(rename = "^=")] - XorAssign, - /// Right shift and assign (`>>=`) - #[serde(rename = ">>=")] - ShrAssign, - /// Left shift and assign (`<<=`) - #[serde(rename = "<<=")] - ShlAssign, -} - -expr_node!( - /// A binary operation. - struct BinaryOperation { - common_type: TypeDescriptions, - #[serde(rename = "leftExpression")] - lhs: Expression, - operator: BinaryOperator, - #[serde(rename = "rightExpression")] - rhs: Expression, - } -); - -/// Binary operators. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub enum BinaryOperator { - /// Addition (`+`) - #[serde(rename = "+")] - Add, - /// Subtraction (`-`) - #[serde(rename = "-")] - Sub, - /// Multiplication (`*`) - #[serde(rename = "*")] - Mul, - /// Division (`/`) - #[serde(rename = "/")] - Div, - /// Modulo (`%`) - #[serde(rename = "%")] - Mod, - /// Exponentiation (`**`) - #[serde(rename = "**")] - Pow, - /// Logical and (`&&`) - #[serde(rename = "&&")] - And, - /// Logical or (`||`) - #[serde(rename = "||")] - Or, - /// Not equals (`!=`) - #[serde(rename = "!=")] - NotEqual, - /// Equals (`==`) - #[serde(rename = "==")] - Equal, - /// Less than (`<`) - #[serde(rename = "<")] - LessThan, - /// Less than or equal (`<=`) - #[serde(rename = "<=")] - LessThanOrEqual, - /// Greater than (`>`) - #[serde(rename = ">")] - GreaterThan, - /// Greater than or equal (`>=`) - #[serde(rename = ">=")] - GreaterThanOrEqual, - /// Bitwise xor (`^`) - #[serde(rename = "^")] - Xor, - /// Bitwise not (`~`) - #[serde(rename = "~")] - BitNot, - /// Bitwise and (`&`) - #[serde(rename = "&")] - BitAnd, - /// Bitwise or (`|`) - #[serde(rename = "|")] - BitOr, - /// Shift left (`<<`) - #[serde(rename = "<<")] - Shl, - /// Shift right (`>>`) - #[serde(rename = ">>")] - Shr, -} - -expr_node!( - /// A conditional expression. - struct Conditional { - /// The condition. - condition: Expression, - /// The expression to evaluate if falsy. - false_expression: Expression, - /// The expression to evaluate if truthy. - true_expression: Expression, - } -); - -expr_node!( - struct ElementaryTypeNameExpression { - type_name: ElementaryOrRawTypeName, - } -); - -// TODO: Better name -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum ElementaryOrRawTypeName { - /// An [ElementaryTypeName] node that describes the type. - /// - /// This variant applies to newer compiler versions. - ElementaryTypeName(ElementaryTypeName), - /// A string representing the type name. - /// - /// This variant applies to older compiler versions. - Raw(String), -} - -ast_node!( - struct ElementaryTypeName { - type_descriptions: TypeDescriptions, - name: String, - state_mutability: Option, - } -); - -expr_node!( - /// A function call expression. - struct FunctionCall { - arguments: Vec, - expression: Expression, - kind: FunctionCallKind, - names: Vec, - #[serde(default)] - try_call: bool, - } -); - -/// Function call kinds. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum FunctionCallKind { - /// A regular function call. - FunctionCall, - /// A type conversion (e.g. `bytes(x)`). - TypeConversion, - /// A struct constructor call (e.g. `MyStruct({ ... })`). - StructConstructorCall, -} - -expr_node!( - /// A function call options expression (e.g. `x.f{gas: 1}`). - struct FunctionCallOptions { - expression: Expression, - names: Vec, - options: Vec, - } -); - -ast_node!( - /// An identifier. - struct Identifier { - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - argument_types: Vec, - name: String, - overloaded_declarations: Vec, - referenced_declaration: Option, - type_descriptions: TypeDescriptions, - } -); - -expr_node!( - /// An index access. - struct IndexAccess { - base_expression: Expression, - index_expression: Option, - } -); - -expr_node!( - /// An index range access. - struct IndexRangeAccess { - base_expression: Expression, - start_expression: Option, - end_expression: Option, - } -); - -expr_node!( - /// A literal value. - struct Literal { - // TODO - hex_value: String, - kind: LiteralKind, - subdenomination: Option, // TODO - value: Option, // TODO - } -); - -/// Literal kinds. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum LiteralKind { - /// A boolean. - Bool, - /// A number. - Number, - /// A string. - String, - /// A hexadecimal string. - HexString, - /// A unicode string. - UnicodeString, -} - -expr_node!( - /// Member access. - struct MemberAccess { - expression: Expression, - member_name: String, - referenced_declaration: Option, - } -); - -expr_node!( - /// A `new` expression. - struct NewExpression { - type_name: TypeName, - } -); - -ast_node!( - /// An array type name. - struct ArrayTypeName { - type_descriptions: TypeDescriptions, - base_type: TypeName, - length: Option, - } -); - -ast_node!( - /// A function type name. - struct FunctionTypeName { - type_descriptions: TypeDescriptions, - parameter_types: ParameterList, - return_parameter_types: ParameterList, - state_mutability: StateMutability, - visibility: Visibility, - } -); - -ast_node!( - /// A parameter list. - struct ParameterList { - parameters: Vec, - } -); - -ast_node!( - /// A variable declaration. - struct VariableDeclaration { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - base_functions: Vec, - /// Marks whether or not the variable is a constant before Solidity 0.7.x. - /// - /// After 0.7.x you must use `mutability`. For cross-version compatibility use - /// [`VariableDeclaration::mutability()`]. - #[serde(default)] - constant: bool, - /// Marks whether or not the variable is a state variable before Solidity 0.7.x. - /// - /// After 0.7.x you must use `mutability`. For cross-version compatibility use - /// [`VariableDeclaration::mutability()`]. - #[serde(default)] - state_variable: bool, - documentation: Option, - function_selector: Option, // TODO - #[serde(default)] - indexed: bool, - /// Marks the variable's mutability from Solidity 0.7.x onwards. - /// For cross-version compatibility use [`VariableDeclaration::mutability()`]. - #[serde(default)] - mutability: Option, - overrides: Option, - scope: usize, - storage_location: StorageLocation, - type_descriptions: TypeDescriptions, - type_name: Option, - value: Option, - visibility: Visibility, - } -); - -impl VariableDeclaration { - /// Returns the mutability of the variable that was declared. - /// - /// This is a helper to check variable mutability across Solidity versions. - pub fn mutability(&self) -> &Mutability { - if let Some(mutability) = &self.mutability { - mutability - } else if self.constant { - &Mutability::Constant - } else if self.state_variable { - &Mutability::Mutable - } else { - unreachable!() - } - } -} - -ast_node!( - /// Structured documentation (NatSpec). - struct StructuredDocumentation { - text: String, - } -); - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum Documentation { - Structured(StructuredDocumentation), - Raw(String), -} - -ast_node!( - /// An override specifier. - struct OverrideSpecifier { - overrides: Vec, - } -); - -ast_node!( - /// A user defined type name. - struct UserDefinedTypeName { - type_descriptions: TypeDescriptions, - contract_scope: Option, // TODO - name: Option, - path_node: Option, - referenced_declaration: isize, - } -); - -ast_node!( - /// An identifier path. - struct IdentifierPath { - name: String, - referenced_declaration: isize, - } -); - -ast_node!( - /// A mapping type. - struct Mapping { - type_descriptions: TypeDescriptions, - key_type: TypeName, - value_type: TypeName, - } -); - -expr_node!( - /// A tuple expression. - struct TupleExpression { - components: Vec>, - is_inline_array: bool, - } -); - -expr_node!( - /// A unary operation. - struct UnaryOperation { - operator: UnaryOperator, - /// Whether the unary operator is before or after the expression (e.g. `x++` vs. `++x`) - prefix: bool, - sub_expression: Expression, - } -); - -/// Unary operators. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub enum UnaryOperator { - /// Increment (`++`) - #[serde(rename = "++")] - Increment, - /// Decrement (`--`) - #[serde(rename = "--")] - Decrement, - /// Negate (`-`) - #[serde(rename = "-")] - Negate, - /// Not (`!`) - #[serde(rename = "!")] - Not, - /// Bitwise not (`~`) - #[serde(rename = "~")] - BitNot, - /// `delete` - #[serde(rename = "delete")] - Delete, -} - -ast_node!( - /// An enum definition. - struct EnumDefinition { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - canonical_name: String, - members: Vec, - } -); - -ast_node!( - /// An enum value. - struct EnumValue { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - } -); - -ast_node!( - /// A custom error definition. - struct ErrorDefinition { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - documentation: Option, - error_selector: Option, // TODO - parameters: ParameterList, - } -); - -ast_node!( - /// An event definition. - struct EventDefinition { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - anonymous: bool, - event_selector: Option, // TODO - documentation: Option, - parameters: ParameterList, - } -); - -ast_node!( - /// A function definition. - struct FunctionDefinition { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - base_functions: Vec, - body: Option, - documentation: Option, - function_selector: Option, // TODO - implemented: bool, - modifiers: Vec, - overrides: Option, - parameters: ParameterList, - return_parameters: ParameterList, - scope: usize, - visibility: Visibility, - /// The kind of function this node defines. Only valid for Solidity versions 0.5.x and - /// above. - /// - /// For cross-version compatibility use [`FunctionDefinition::kind()`]. - kind: Option, - /// The state mutability of the function. - /// - /// Note: This was introduced in Solidity 0.5.x. For cross-version compatibility use - /// [`FunctionDefinition::state_mutability()`]. - #[serde(default)] - state_mutability: Option, - #[serde(default, rename = "virtual")] - is_virtual: bool, - /// Whether or not this function is the constructor. Only valid for Solidity versions below - /// 0.5.x. - /// - /// After 0.5.x you must use `kind`. For cross-version compatibility use - /// [`FunctionDefinition::kind()`]. - #[serde(default)] - is_constructor: bool, - /// Whether or not this function is constant (view or pure). Only valid for Solidity - /// versions below 0.5.x. - /// - /// After 0.5.x you must use `state_mutability`. For cross-version compatibility use - /// [`FunctionDefinition::state_mutability()`]. - #[serde(default)] - is_declared_const: bool, - /// Whether or not this function is payable. Only valid for Solidity versions below - /// 0.5.x. - /// - /// After 0.5.x you must use `state_mutability`. For cross-version compatibility use - /// [`FunctionDefinition::state_mutability()`]. - #[serde(default)] - is_payable: bool, - } -); - -impl FunctionDefinition { - /// The kind of function this node defines. - pub fn kind(&self) -> &FunctionKind { - if let Some(kind) = &self.kind { - kind - } else if self.is_constructor { - &FunctionKind::Constructor - } else { - &FunctionKind::Function - } - } - - /// The state mutability of the function. - /// - /// Note: Before Solidity 0.5.x, this is an approximation, as there was no distinction between - /// `view` and `pure`. - pub fn state_mutability(&self) -> &StateMutability { - if let Some(state_mutability) = &self.state_mutability { - state_mutability - } else if self.is_declared_const { - &StateMutability::View - } else if self.is_payable { - &StateMutability::Payable - } else { - &StateMutability::Nonpayable - } - } -} - -/// Function kinds. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum FunctionKind { - /// A contract function. - Function, - /// A receive function. - Receive, - /// A constructor. - Constructor, - /// A fallback function. - Fallback, - /// A free-standing function. - FreeFunction, -} - -ast_node!( - /// A block of statements. - struct Block { - documentation: Option, - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - statements: Vec, - } -); - -stmt_node!( - /// The break keyword. - struct Break {} -); - -stmt_node!( - /// The continue keyword. - struct Continue {} -); - -stmt_node!( - /// A do while statement. - struct DoWhileStatement { - body: Block, - condition: Expression, - } -); - -stmt_node!( - /// An emit statement. - struct EmitStatement { - event_call: FunctionCall, - } -); - -stmt_node!( - /// An expression statement. - struct ExpressionStatement { - expression: Expression, - } -); - -stmt_node!( - /// A for statement. - struct ForStatement { - body: BlockOrStatement, - condition: Option, - initialization_expression: Option, - loop_expression: Option, - } -); - -stmt_node!( - /// A variable declaration statement. - struct VariableDeclarationStatement { - assignments: Vec>, - declarations: Vec>, - initial_value: Option, - } -); - -stmt_node!( - /// An if statement. - struct IfStatement { - condition: Expression, - false_body: Option, - true_body: BlockOrStatement, - } -); - -ast_node!( - /// A block of inline assembly. - /// - /// Refer to the [yul] module for Yul AST nodes. - struct InlineAssembly { - documentation: Option, - #[serde(rename = "AST")] - ast: Option, - operations: Option, - // TODO: We need this camel case for the AST, but pascal case other places in ethers-solc - //evm_version: EvmVersion, - #[serde(deserialize_with = "utils::deserialize_external_assembly_references")] - external_references: Vec, - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - flags: Vec, - } -); - -/// A reference to an external variable or slot in an inline assembly block. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ExternalInlineAssemblyReference { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub declaration: usize, - #[serde(default)] - pub offset: bool, - #[serde(default)] - pub slot: bool, - #[serde(default)] - pub length: bool, - pub value_size: usize, - pub suffix: Option, -} - -/// An assembly reference suffix. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum AssemblyReferenceSuffix { - /// The reference refers to a storage slot. - Slot, - /// The reference refers to an offset. - Offset, - /// The reference refers to a length. - Length, -} - -impl fmt::Display for AssemblyReferenceSuffix { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Slot => f.write_str("slot"), - Self::Offset => f.write_str("offset"), - Self::Length => f.write_str("length"), - } - } -} - -/// Inline assembly flags. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub enum InlineAssemblyFlag { - #[serde(rename = "memory-safe")] - MemorySafe, -} - -stmt_node!( - /// A placeholder statement (`_`) - struct PlaceholderStatement {} -); - -stmt_node!( - /// A return statement. - struct Return { - expression: Option, - function_return_parameters: Option, - } -); - -stmt_node!( - /// A revert statement. - struct RevertStatement { - error_call: FunctionCall, - } -); - -stmt_node!( - /// A try/catch statement. - struct TryStatement { - clauses: Vec, - external_call: FunctionCall, - } -); - -ast_node!( - /// A try/catch clause. - struct TryCatchClause { - block: Block, - error_name: String, - parameters: Option, - } -); - -stmt_node!( - /// An unchecked block. - struct UncheckedBlock { - statements: Vec, - } -); - -stmt_node!( - /// A while statement. - struct WhileStatement { - body: BlockOrStatement, - condition: Expression, - } -); - -ast_node!( - /// A modifier or base constructor invocation. - struct ModifierInvocation { - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - arguments: Vec, - kind: Option, - modifier_name: IdentifierOrIdentifierPath, - } -); - -/// Modifier invocation kinds. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum ModifierInvocationKind { - /// A regular modifier invocation. - ModifierInvocation, - /// A base constructor invocation. - BaseConstructorSpecifier, -} - -ast_node!( - /// A modifier definition. - struct ModifierDefinition { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - base_modifiers: Vec, - body: Option, - documentation: Option, - overrides: Option, - parameters: ParameterList, - #[serde(default, rename = "virtual")] - is_virtual: bool, - visibility: Visibility, - } -); - -ast_node!( - /// A struct definition. - struct StructDefinition { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - canonical_name: String, - members: Vec, - scope: usize, - visibility: Visibility, - } -); - -ast_node!( - /// A user defined value type definition. - struct UserDefinedValueTypeDefinition { - name: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - canonical_name: Option, - underlying_type: TypeName, - } -); - -ast_node!( - /// A using for directive. - struct UsingForDirective { - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - function_list: Vec, - #[serde(default)] - global: bool, - library_name: Option, - type_name: Option, - } -); - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum UsingForFunctionItem { - Function(FunctionIdentifierPath), - OverloadedOperator(OverloadedOperator), -} - -/// A wrapper around [IdentifierPath] for the [UsingForDirective]. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct FunctionIdentifierPath { - pub function: IdentifierPath, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct OverloadedOperator { - pub definition: IdentifierPath, - pub operator: String, -} - -ast_node!( - /// An import directive. - struct ImportDirective { - absolute_path: String, - file: String, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - name_location: Option, - scope: usize, - source_unit: usize, - symbol_aliases: Vec, - unit_alias: String, - } -); - -/// A symbol alias. -/// -/// Symbol aliases can be defined using the [ImportDirective]. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct SymbolAlias { - pub foreign: Identifier, - pub local: Option, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - pub name_location: Option, -} - -ast_node!( - /// A pragma directive. - struct PragmaDirective { - literals: Vec, - } -); - -#[cfg(test)] -mod tests { - use super::*; - use std::{fs, path::Path}; - - #[test] - fn can_parse_ast() { - fs::read_dir(Path::new(env!("CARGO_MANIFEST_DIR")).join("../../../test-data").join("ast")) - .unwrap() - .for_each(|path| { - let path = path.unwrap().path(); - let path_str = path.to_string_lossy(); - - let input = fs::read_to_string(&path).unwrap(); - let deserializer = &mut serde_json::Deserializer::from_str(&input); - let result: Result = serde_path_to_error::deserialize(deserializer); - match result { - Err(e) => { - println!("... {path_str} fail: {e}"); - panic!(); - } - Ok(_) => { - println!("... {path_str} ok"); - } - } - }) - } -} diff --git a/crates/artifacts/solc/src/ast/utils.rs b/crates/artifacts/solc/src/ast/utils.rs deleted file mode 100644 index 8dd2a620..00000000 --- a/crates/artifacts/solc/src/ast/utils.rs +++ /dev/null @@ -1,26 +0,0 @@ -use super::ExternalInlineAssemblyReference; -use serde::{Deserialize, Deserializer, Serialize}; -use std::collections::BTreeMap; - -pub fn deserialize_external_assembly_references<'de, D>( - deserializer: D, -) -> Result, D::Error> -where - D: Deserializer<'de>, -{ - #[derive(Serialize, Deserialize)] - #[serde(untagged)] - enum ExternalReferencesHelper { - Plain(Vec), - /// Older solc versions produce external references as arrays of mappings {"variable" => - /// external reference object}, so we have to handle this. - Map(Vec>), - } - - ExternalReferencesHelper::deserialize(deserializer).map(|v| match v { - ExternalReferencesHelper::Plain(vec) => vec, - ExternalReferencesHelper::Map(vec) => { - vec.into_iter().flat_map(|v| v.into_values()).collect() - } - }) -} diff --git a/crates/artifacts/solc/src/ast/visitor.rs b/crates/artifacts/solc/src/ast/visitor.rs deleted file mode 100644 index d8064ff0..00000000 --- a/crates/artifacts/solc/src/ast/visitor.rs +++ /dev/null @@ -1,623 +0,0 @@ -use super::*; - -pub trait Visitor { - fn visit_source_unit(&mut self, _source_unit: &SourceUnit) {} - fn visit_import_directive(&mut self, _directive: &ImportDirective) {} - fn visit_pragma_directive(&mut self, _directive: &PragmaDirective) {} - fn visit_block(&mut self, _block: &Block) {} - fn visit_statement(&mut self, _statement: &Statement) {} - fn visit_expression(&mut self, _expression: &Expression) {} - fn visit_function_call(&mut self, _function_call: &FunctionCall) {} - fn visit_user_defined_type_name(&mut self, _type_name: &UserDefinedTypeName) {} - fn visit_identifier_path(&mut self, _identifier_path: &IdentifierPath) {} - fn visit_type_name(&mut self, _type_name: &TypeName) {} - fn visit_parameter_list(&mut self, _parameter_list: &ParameterList) {} - fn visit_function_definition(&mut self, _definition: &FunctionDefinition) {} - fn visit_enum_definition(&mut self, _definition: &EnumDefinition) {} - fn visit_error_definition(&mut self, _definition: &ErrorDefinition) {} - fn visit_event_definition(&mut self, _definition: &EventDefinition) {} - fn visit_struct_definition(&mut self, _definition: &StructDefinition) {} - fn visit_modifier_definition(&mut self, _definition: &ModifierDefinition) {} - fn visit_variable_declaration(&mut self, _declaration: &VariableDeclaration) {} - fn visit_overrides(&mut self, _specifier: &OverrideSpecifier) {} - fn visit_user_defined_value_type(&mut self, _value_type: &UserDefinedValueTypeDefinition) {} - fn visit_contract_definition(&mut self, _definition: &ContractDefinition) {} - fn visit_using_for(&mut self, _directive: &UsingForDirective) {} - fn visit_unary_operation(&mut self, _unary_op: &UnaryOperation) {} - fn visit_binary_operation(&mut self, _binary_op: &BinaryOperation) {} - fn visit_conditional(&mut self, _conditional: &Conditional) {} - fn visit_tuple_expression(&mut self, _tuple_expression: &TupleExpression) {} - fn visit_new_expression(&mut self, _new_expression: &NewExpression) {} - fn visit_assignment(&mut self, _assignment: &Assignment) {} - fn visit_identifier(&mut self, _identifier: &Identifier) {} - fn visit_index_access(&mut self, _index_access: &IndexAccess) {} - fn visit_index_range_access(&mut self, _index_range_access: &IndexRangeAccess) {} - fn visit_while_statement(&mut self, _while_statement: &WhileStatement) {} - fn visit_for_statement(&mut self, _for_statement: &ForStatement) {} - fn visit_if_statement(&mut self, _if_statement: &IfStatement) {} - fn visit_do_while_statement(&mut self, _do_while_statement: &DoWhileStatement) {} - fn visit_emit_statement(&mut self, _emit_statement: &EmitStatement) {} - fn visit_unchecked_block(&mut self, _unchecked_block: &UncheckedBlock) {} - fn visit_try_statement(&mut self, _try_statement: &TryStatement) {} - fn visit_revert_statement(&mut self, _revert_statement: &RevertStatement) {} - fn visit_member_access(&mut self, _member_access: &MemberAccess) {} - fn visit_mapping(&mut self, _mapping: &Mapping) {} - fn visit_elementary_type_name(&mut self, _elementary_type_name: &ElementaryTypeName) {} - fn visit_literal(&mut self, _literal: &Literal) {} - fn visit_function_type_name(&mut self, _function_type_name: &FunctionTypeName) {} - fn visit_array_type_name(&mut self, _array_type_name: &ArrayTypeName) {} - fn visit_function_call_options(&mut self, _function_call: &FunctionCallOptions) {} - fn visit_return(&mut self, _return: &Return) {} - fn visit_inheritance_specifier(&mut self, _specifier: &InheritanceSpecifier) {} - fn visit_modifier_invocation(&mut self, _invocation: &ModifierInvocation) {} - fn visit_inline_assembly(&mut self, _assembly: &InlineAssembly) {} - fn visit_external_assembly_reference(&mut self, _ref: &ExternalInlineAssemblyReference) {} -} - -pub trait Walk { - fn walk(&self, visitor: &mut dyn Visitor); -} - -macro_rules! impl_walk { - // Implement `Walk` for a type, calling the given function. - ($ty:ty, | $val:ident, $visitor:ident | $e:expr) => { - impl Walk for $ty { - fn walk(&self, visitor: &mut dyn Visitor) { - let $val = self; - let $visitor = visitor; - $e - } - } - }; - ($ty:ty, $func:ident) => { - impl_walk!($ty, |obj, visitor| { - visitor.$func(obj); - }); - }; - ($ty:ty, $func:ident, | $val:ident, $visitor:ident | $e:expr) => { - impl_walk!($ty, |$val, $visitor| { - $visitor.$func($val); - $e - }); - }; -} - -impl_walk!(SourceUnit, visit_source_unit, |source_unit, visitor| { - source_unit.nodes.iter().for_each(|part| { - part.walk(visitor); - }); -}); - -impl_walk!(SourceUnitPart, |part, visitor| { - match part { - SourceUnitPart::ContractDefinition(contract) => { - contract.walk(visitor); - } - SourceUnitPart::UsingForDirective(directive) => { - directive.walk(visitor); - } - SourceUnitPart::ErrorDefinition(error) => { - error.walk(visitor); - } - SourceUnitPart::StructDefinition(struct_) => { - struct_.walk(visitor); - } - SourceUnitPart::VariableDeclaration(declaration) => { - declaration.walk(visitor); - } - SourceUnitPart::FunctionDefinition(function) => { - function.walk(visitor); - } - SourceUnitPart::UserDefinedValueTypeDefinition(value_type) => { - value_type.walk(visitor); - } - SourceUnitPart::ImportDirective(directive) => { - directive.walk(visitor); - } - SourceUnitPart::EnumDefinition(enum_) => { - enum_.walk(visitor); - } - SourceUnitPart::PragmaDirective(directive) => { - directive.walk(visitor); - } - } -}); - -impl_walk!(ContractDefinition, visit_contract_definition, |contract, visitor| { - contract.base_contracts.iter().for_each(|base_contract| { - base_contract.walk(visitor); - }); - - for part in &contract.nodes { - match part { - ContractDefinitionPart::FunctionDefinition(function) => { - function.walk(visitor); - } - ContractDefinitionPart::ErrorDefinition(error) => { - error.walk(visitor); - } - ContractDefinitionPart::EventDefinition(event) => { - event.walk(visitor); - } - ContractDefinitionPart::StructDefinition(struct_) => { - struct_.walk(visitor); - } - ContractDefinitionPart::VariableDeclaration(declaration) => { - declaration.walk(visitor); - } - ContractDefinitionPart::ModifierDefinition(modifier) => { - modifier.walk(visitor); - } - ContractDefinitionPart::UserDefinedValueTypeDefinition(definition) => { - definition.walk(visitor); - } - ContractDefinitionPart::UsingForDirective(directive) => { - directive.walk(visitor); - } - ContractDefinitionPart::EnumDefinition(enum_) => { - enum_.walk(visitor); - } - } - } -}); - -impl_walk!(Expression, visit_expression, |expr, visitor| { - match expr { - Expression::FunctionCall(expression) => { - expression.walk(visitor); - } - Expression::MemberAccess(member_access) => { - member_access.walk(visitor); - } - Expression::IndexAccess(index_access) => { - index_access.walk(visitor); - } - Expression::UnaryOperation(unary_op) => { - unary_op.walk(visitor); - } - Expression::BinaryOperation(expression) => { - expression.walk(visitor); - } - Expression::Conditional(expression) => { - expression.walk(visitor); - } - Expression::TupleExpression(tuple) => { - tuple.walk(visitor); - } - Expression::NewExpression(expression) => { - expression.walk(visitor); - } - Expression::Assignment(expression) => { - expression.walk(visitor); - } - Expression::Identifier(identifier) => { - identifier.walk(visitor); - } - Expression::FunctionCallOptions(function_call) => { - function_call.walk(visitor); - } - Expression::IndexRangeAccess(range_access) => { - range_access.walk(visitor); - } - Expression::Literal(literal) => { - literal.walk(visitor); - } - Expression::ElementaryTypeNameExpression(type_name) => { - type_name.walk(visitor); - } - } -}); - -impl_walk!(Statement, visit_statement, |statement, visitor| { - match statement { - Statement::Block(block) => { - block.walk(visitor); - } - Statement::WhileStatement(statement) => { - statement.walk(visitor); - } - Statement::ForStatement(statement) => { - statement.walk(visitor); - } - Statement::IfStatement(statement) => { - statement.walk(visitor); - } - Statement::DoWhileStatement(statement) => { - statement.walk(visitor); - } - Statement::EmitStatement(statement) => { - statement.walk(visitor); - } - Statement::VariableDeclarationStatement(statement) => { - statement.walk(visitor); - } - Statement::ExpressionStatement(statement) => { - statement.walk(visitor); - } - Statement::UncheckedBlock(statement) => { - statement.walk(visitor); - } - Statement::TryStatement(statement) => { - statement.walk(visitor); - } - Statement::RevertStatement(statement) => { - statement.walk(visitor); - } - Statement::Return(statement) => { - statement.walk(visitor); - } - Statement::InlineAssembly(assembly) => { - assembly.walk(visitor); - } - Statement::Break(_) | Statement::Continue(_) | Statement::PlaceholderStatement(_) => {} - } -}); - -impl_walk!(FunctionDefinition, visit_function_definition, |function, visitor| { - function.parameters.walk(visitor); - function.return_parameters.walk(visitor); - - if let Some(overrides) = &function.overrides { - overrides.walk(visitor); - } - - if let Some(body) = &function.body { - body.walk(visitor); - } - - function.modifiers.iter().for_each(|m| m.walk(visitor)); -}); - -impl_walk!(ErrorDefinition, visit_error_definition, |error, visitor| { - error.parameters.walk(visitor); -}); - -impl_walk!(EventDefinition, visit_event_definition, |event, visitor| { - event.parameters.walk(visitor); -}); - -impl_walk!(StructDefinition, visit_struct_definition, |struct_, visitor| { - struct_.members.iter().for_each(|member| member.walk(visitor)); -}); - -impl_walk!(ModifierDefinition, visit_modifier_definition, |modifier, visitor| { - if let Some(body) = &modifier.body { - body.walk(visitor); - } - if let Some(override_) = &modifier.overrides { - override_.walk(visitor); - } - modifier.parameters.walk(visitor); -}); - -impl_walk!(VariableDeclaration, visit_variable_declaration, |declaration, visitor| { - if let Some(value) = &declaration.value { - value.walk(visitor); - } - - if let Some(type_name) = &declaration.type_name { - type_name.walk(visitor); - } -}); - -impl_walk!(OverrideSpecifier, visit_overrides, |override_, visitor| { - override_.overrides.iter().for_each(|type_name| { - type_name.walk(visitor); - }); -}); - -impl_walk!(UserDefinedValueTypeDefinition, visit_user_defined_value_type, |value_type, visitor| { - value_type.underlying_type.walk(visitor); -}); - -impl_walk!(FunctionCallOptions, visit_function_call_options, |function_call, visitor| { - function_call.expression.walk(visitor); - function_call.options.iter().for_each(|option| { - option.walk(visitor); - }); -}); - -impl_walk!(Return, visit_return, |return_, visitor| { - if let Some(expr) = return_.expression.as_ref() { - expr.walk(visitor); - } -}); - -impl_walk!(UsingForDirective, visit_using_for, |directive, visitor| { - if let Some(type_name) = &directive.type_name { - type_name.walk(visitor); - } - if let Some(library_name) = &directive.library_name { - library_name.walk(visitor); - } - for function in &directive.function_list { - function.walk(visitor); - } -}); - -impl_walk!(UnaryOperation, visit_unary_operation, |unary_op, visitor| { - unary_op.sub_expression.walk(visitor); -}); - -impl_walk!(BinaryOperation, visit_binary_operation, |binary_op, visitor| { - binary_op.lhs.walk(visitor); - binary_op.rhs.walk(visitor); -}); - -impl_walk!(Conditional, visit_conditional, |conditional, visitor| { - conditional.condition.walk(visitor); - conditional.true_expression.walk(visitor); - conditional.false_expression.walk(visitor); -}); - -impl_walk!(TupleExpression, visit_tuple_expression, |tuple_expression, visitor| { - tuple_expression.components.iter().filter_map(|component| component.as_ref()).for_each( - |component| { - component.walk(visitor); - }, - ); -}); - -impl_walk!(NewExpression, visit_new_expression, |new_expression, visitor| { - new_expression.type_name.walk(visitor); -}); - -impl_walk!(Assignment, visit_assignment, |assignment, visitor| { - assignment.lhs.walk(visitor); - assignment.rhs.walk(visitor); -}); -impl_walk!(IfStatement, visit_if_statement, |if_statement, visitor| { - if_statement.condition.walk(visitor); - if_statement.true_body.walk(visitor); - - if let Some(false_body) = &if_statement.false_body { - false_body.walk(visitor); - } -}); - -impl_walk!(IndexAccess, visit_index_access, |index_access, visitor| { - index_access.base_expression.walk(visitor); - if let Some(index_expression) = &index_access.index_expression { - index_expression.walk(visitor); - } -}); - -impl_walk!(IndexRangeAccess, visit_index_range_access, |index_range_access, visitor| { - index_range_access.base_expression.walk(visitor); - if let Some(start_expression) = &index_range_access.start_expression { - start_expression.walk(visitor); - } - if let Some(end_expression) = &index_range_access.end_expression { - end_expression.walk(visitor); - } -}); - -impl_walk!(WhileStatement, visit_while_statement, |while_statement, visitor| { - while_statement.condition.walk(visitor); - while_statement.body.walk(visitor); -}); - -impl_walk!(ForStatement, visit_for_statement, |for_statement, visitor| { - for_statement.body.walk(visitor); - if let Some(condition) = &for_statement.condition { - condition.walk(visitor); - } - - if let Some(loop_expression) = &for_statement.loop_expression { - loop_expression.walk(visitor); - } - - if let Some(initialization_expr) = &for_statement.initialization_expression { - initialization_expr.walk(visitor); - } -}); - -impl_walk!(DoWhileStatement, visit_do_while_statement, |do_while_statement, visitor| { - do_while_statement.body.walk(visitor); - do_while_statement.condition.walk(visitor); -}); - -impl_walk!(EmitStatement, visit_emit_statement, |emit_statement, visitor| { - emit_statement.event_call.walk(visitor); -}); - -impl_walk!(VariableDeclarationStatement, |stmt, visitor| { - stmt.declarations.iter().filter_map(|d| d.as_ref()).for_each(|declaration| { - declaration.walk(visitor); - }); - if let Some(initial_value) = &stmt.initial_value { - initial_value.walk(visitor); - } -}); - -impl_walk!(UncheckedBlock, visit_unchecked_block, |unchecked_block, visitor| { - unchecked_block.statements.iter().for_each(|statement| { - statement.walk(visitor); - }); -}); - -impl_walk!(TryStatement, visit_try_statement, |try_statement, visitor| { - try_statement.clauses.iter().for_each(|clause| { - clause.block.walk(visitor); - - if let Some(parameter_list) = &clause.parameters { - parameter_list.walk(visitor); - } - }); - - try_statement.external_call.walk(visitor); -}); - -impl_walk!(RevertStatement, visit_revert_statement, |revert_statement, visitor| { - revert_statement.error_call.walk(visitor); -}); - -impl_walk!(MemberAccess, visit_member_access, |member_access, visitor| { - member_access.expression.walk(visitor); -}); - -impl_walk!(FunctionCall, visit_function_call, |function_call, visitor| { - function_call.expression.walk(visitor); - function_call.arguments.iter().for_each(|argument| { - argument.walk(visitor); - }); -}); - -impl_walk!(Block, visit_block, |block, visitor| { - block.statements.iter().for_each(|statement| { - statement.walk(visitor); - }); -}); - -impl_walk!(UserDefinedTypeName, visit_user_defined_type_name, |type_name, visitor| { - if let Some(path_node) = &type_name.path_node { - path_node.walk(visitor); - } -}); - -impl_walk!(TypeName, visit_type_name, |type_name, visitor| { - match type_name { - TypeName::ElementaryTypeName(type_name) => { - type_name.walk(visitor); - } - TypeName::UserDefinedTypeName(type_name) => { - type_name.walk(visitor); - } - TypeName::Mapping(mapping) => { - mapping.walk(visitor); - } - TypeName::ArrayTypeName(array) => { - array.walk(visitor); - } - TypeName::FunctionTypeName(function) => { - function.walk(visitor); - } - } -}); - -impl_walk!(FunctionTypeName, visit_function_type_name, |function, visitor| { - function.parameter_types.walk(visitor); - function.return_parameter_types.walk(visitor); -}); - -impl_walk!(ParameterList, visit_parameter_list, |parameter_list, visitor| { - parameter_list.parameters.iter().for_each(|parameter| { - parameter.walk(visitor); - }); -}); - -impl_walk!(Mapping, visit_mapping, |mapping, visitor| { - mapping.key_type.walk(visitor); - mapping.value_type.walk(visitor); -}); - -impl_walk!(ArrayTypeName, visit_array_type_name, |array, visitor| { - array.base_type.walk(visitor); - if let Some(length) = &array.length { - length.walk(visitor); - } -}); - -impl_walk!(InheritanceSpecifier, visit_inheritance_specifier, |specifier, visitor| { - specifier.base_name.walk(visitor); - specifier.arguments.iter().for_each(|arg| { - arg.walk(visitor); - }); -}); - -impl_walk!(ModifierInvocation, visit_modifier_invocation, |invocation, visitor| { - invocation.arguments.iter().for_each(|arg| arg.walk(visitor)); - invocation.modifier_name.walk(visitor); -}); - -impl_walk!(InlineAssembly, visit_inline_assembly, |assembly, visitor| { - assembly.external_references.iter().for_each(|reference| { - reference.walk(visitor); - }); -}); - -impl_walk!(ExternalInlineAssemblyReference, visit_external_assembly_reference); - -impl_walk!(ElementaryTypeName, visit_elementary_type_name); -impl_walk!(Literal, visit_literal); -impl_walk!(ImportDirective, visit_import_directive); -impl_walk!(PragmaDirective, visit_pragma_directive); -impl_walk!(IdentifierPath, visit_identifier_path); -impl_walk!(EnumDefinition, visit_enum_definition); -impl_walk!(Identifier, visit_identifier); - -impl_walk!(UserDefinedTypeNameOrIdentifierPath, |type_name, visitor| { - match type_name { - UserDefinedTypeNameOrIdentifierPath::UserDefinedTypeName(type_name) => { - type_name.walk(visitor); - } - UserDefinedTypeNameOrIdentifierPath::IdentifierPath(identifier_path) => { - identifier_path.walk(visitor); - } - } -}); - -impl_walk!(BlockOrStatement, |block_or_statement, visitor| { - match block_or_statement { - BlockOrStatement::Block(block) => { - block.walk(visitor); - } - BlockOrStatement::Statement(statement) => { - statement.walk(visitor); - } - } -}); - -impl_walk!(ExpressionOrVariableDeclarationStatement, |val, visitor| { - match val { - ExpressionOrVariableDeclarationStatement::ExpressionStatement(expression) => { - expression.walk(visitor); - } - ExpressionOrVariableDeclarationStatement::VariableDeclarationStatement(stmt) => { - stmt.walk(visitor); - } - } -}); - -impl_walk!(IdentifierOrIdentifierPath, |val, visitor| { - match val { - IdentifierOrIdentifierPath::Identifier(ident) => { - ident.walk(visitor); - } - IdentifierOrIdentifierPath::IdentifierPath(path) => { - path.walk(visitor); - } - } -}); - -impl_walk!(ExpressionStatement, |expression_statement, visitor| { - expression_statement.expression.walk(visitor); -}); - -impl_walk!(ElementaryTypeNameExpression, |type_name, visitor| { - type_name.type_name.walk(visitor); -}); - -impl_walk!(ElementaryOrRawTypeName, |type_name, visitor| { - match type_name { - ElementaryOrRawTypeName::ElementaryTypeName(type_name) => { - type_name.walk(visitor); - } - ElementaryOrRawTypeName::Raw(_) => {} - } -}); - -impl_walk!(UsingForFunctionItem, |item, visitor| { - match item { - UsingForFunctionItem::Function(func) => { - func.function.walk(visitor); - } - UsingForFunctionItem::OverloadedOperator(operator) => { - operator.walk(visitor); - } - } -}); - -impl_walk!(OverloadedOperator, |operator, visitor| { - operator.definition.walk(visitor); -}); diff --git a/crates/artifacts/solc/src/ast/yul.rs b/crates/artifacts/solc/src/ast/yul.rs deleted file mode 100644 index 57dacb98..00000000 --- a/crates/artifacts/solc/src/ast/yul.rs +++ /dev/null @@ -1,192 +0,0 @@ -use super::{macros::node_group, misc::SourceLocation}; -use crate::serde_helpers; -use serde::{Deserialize, Serialize}; - -node_group! { - YulStatement; - - YulAssignment, - YulBlock, - YulBreak, - YulContinue, - YulExpressionStatement, - YulLeave, - YulForLoop, - YulFunctionDefinition, - YulIf, - YulSwitch, - YulVariableDeclaration, -} - -node_group! { - YulExpression; - - YulFunctionCall, - YulIdentifier, - YulLiteral, -} - -/// A Yul block. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct YulBlock { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub statements: Vec, -} - -/// A Yul assignment statement. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct YulAssignment { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub value: YulExpression, - pub variable_names: Vec, -} - -/// A Yul function call. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct YulFunctionCall { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub arguments: Vec, - pub function_name: YulIdentifier, -} - -/// A Yul identifier. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct YulIdentifier { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub name: String, -} - -/// A literal Yul value. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct YulLiteral { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub hex_value: Option, // TODO - pub value: Option, // TODO - pub kind: YulLiteralKind, - pub type_name: Option, // TODO -} - -/// Yul literal value kinds. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum YulLiteralKind { - /// A number literal. - Number, - /// A string literal. - String, - /// A boolean literal. - Bool, -} - -/// A Yul keyword. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct YulKeyword { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, -} - -/// The Yul break keyword. -pub type YulBreak = YulKeyword; -/// The Yul continue keyword. -pub type YulContinue = YulKeyword; -/// The Yul leave keyword. -pub type YulLeave = YulKeyword; - -/// A Yul expression statement. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct YulExpressionStatement { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub expression: YulExpression, -} - -/// A Yul for loop. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct YulForLoop { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub body: YulBlock, - pub condition: YulExpression, - pub post: YulBlock, - pub pre: YulBlock, -} - -/// A Yul function definition. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct YulFunctionDefinition { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub body: YulBlock, - pub name: String, - #[serde(default)] - pub parameters: Vec, - #[serde(default)] - pub return_variables: Vec, -} - -/// A Yul type name. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct YulTypedName { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub name: String, - #[serde(rename = "type")] - pub type_name: String, // TODO -} - -/// A Yul if statement. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct YulIf { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub body: YulBlock, - pub condition: YulExpression, -} - -/// A Yul switch statement. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct YulSwitch { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub cases: Vec, - pub expression: YulExpression, -} - -/// A Yul switch statement case. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct YulCase { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub body: YulBlock, - pub value: YulCaseValue, -} - -/// A Yul switch case value. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum YulCaseValue { - /// A case defined by a literal value. - YulLiteral(YulLiteral), - /// The default case - // TODO: How do we make this only match "default"? - Default(String), -} - -/// A Yul variable declaration. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct YulVariableDeclaration { - #[serde(with = "serde_helpers::display_from_str")] - pub src: SourceLocation, - pub value: Option, - pub variables: Vec, -} diff --git a/crates/artifacts/solc/src/bytecode.rs b/crates/artifacts/solc/src/bytecode.rs deleted file mode 100644 index edbd3b32..00000000 --- a/crates/artifacts/solc/src/bytecode.rs +++ /dev/null @@ -1,503 +0,0 @@ -//! Bytecode related types. - -use crate::{ - serde_helpers, - sourcemap::{self, SourceMap, SyntaxError}, - FunctionDebugData, GeneratedSource, Offsets, -}; -use alloy_primitives::{hex, Address, Bytes}; -use foundry_compilers_core::utils; -use serde::{Deserialize, Serialize, Serializer}; -use std::collections::BTreeMap; - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Bytecode { - /// Debugging information at function level - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub function_debug_data: BTreeMap, - /// The bytecode as a hex string. - #[serde(serialize_with = "serialize_bytecode_without_prefix")] - pub object: BytecodeObject, - /// Opcodes list (string) - #[serde(default, skip_serializing_if = "Option::is_none")] - pub opcodes: Option, - /// The source mapping as a string. See the source mapping definition. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub source_map: Option, - /// Array of sources generated by the compiler. Currently only contains a - /// single Yul file. - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub generated_sources: Vec, - /// If given, this is an unlinked object. - #[serde(default)] - pub link_references: BTreeMap>>, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CompactBytecode { - /// The bytecode as a hex string. - pub object: BytecodeObject, - /// The source mapping as a string. See the source mapping definition. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub source_map: Option, - /// If given, this is an unlinked object. - #[serde(default)] - pub link_references: BTreeMap>>, -} - -impl CompactBytecode { - /// Returns a new `CompactBytecode` object that contains nothing, as it's the case for - /// interfaces and standalone solidity files that don't contain any contract definitions - pub fn empty() -> Self { - Self { object: Default::default(), source_map: None, link_references: Default::default() } - } - - /// Returns the parsed source map - /// - /// See also - pub fn source_map(&self) -> Option> { - self.source_map.as_ref().map(|map| sourcemap::parse(map)) - } - - /// Tries to link the bytecode object with the `file` and `library` name. - /// Replaces all library placeholders with the given address. - /// - /// Returns true if the bytecode object is fully linked, false otherwise - /// This is a noop if the bytecode object is already fully linked. - pub fn link(&mut self, file: &str, library: &str, address: Address) -> bool { - if !self.object.is_unlinked() { - return true; - } - - if let Some((key, mut contracts)) = self.link_references.remove_entry(file) { - if contracts.remove(library).is_some() { - self.object.link(file, library, address); - } - if !contracts.is_empty() { - self.link_references.insert(key, contracts); - } - if self.link_references.is_empty() { - return self.object.resolve().is_some(); - } - } - false - } - - /// Returns the bytes of the bytecode object. - pub fn bytes(&self) -> Option<&Bytes> { - self.object.as_bytes() - } - - /// Returns the underlying `Bytes` if the object is a valid bytecode. - pub fn into_bytes(self) -> Option { - self.object.into_bytes() - } -} - -impl From for CompactBytecode { - fn from(bcode: Bytecode) -> Self { - Self { - object: bcode.object, - source_map: bcode.source_map, - link_references: bcode.link_references, - } - } -} - -impl From for Bytecode { - fn from(bcode: CompactBytecode) -> Self { - Self { - object: bcode.object, - source_map: bcode.source_map, - link_references: bcode.link_references, - function_debug_data: Default::default(), - opcodes: Default::default(), - generated_sources: Default::default(), - } - } -} - -impl From for Bytecode { - fn from(object: BytecodeObject) -> Self { - Self { - object, - function_debug_data: Default::default(), - opcodes: Default::default(), - source_map: Default::default(), - generated_sources: Default::default(), - link_references: Default::default(), - } - } -} - -impl Bytecode { - /// Returns the parsed source map - /// - /// See also - pub fn source_map(&self) -> Option> { - self.source_map.as_ref().map(|map| sourcemap::parse(map)) - } - - /// Same as `Bytecode::link` but with fully qualified name (`file.sol:Math`) - pub fn link_fully_qualified(&mut self, name: &str, addr: Address) -> bool { - if let Some((file, lib)) = name.split_once(':') { - self.link(file, lib, addr) - } else { - false - } - } - - /// Tries to link the bytecode object with the `file` and `library` name. - /// Replaces all library placeholders with the given address. - /// - /// Returns true if the bytecode object is fully linked, false otherwise - /// This is a noop if the bytecode object is already fully linked. - pub fn link(&mut self, file: &str, library: &str, address: Address) -> bool { - if !self.object.is_unlinked() { - return true; - } - - if let Some((key, mut contracts)) = self.link_references.remove_entry(file) { - if contracts.remove(library).is_some() { - self.object.link(file, library, address); - } - if !contracts.is_empty() { - self.link_references.insert(key, contracts); - } - if self.link_references.is_empty() { - return self.object.resolve().is_some(); - } - } - false - } - - /// Links the bytecode object with all provided `(file, lib, addr)` - pub fn link_all(&mut self, libs: I) -> bool - where - I: IntoIterator, - S: AsRef, - T: AsRef, - { - for (file, lib, addr) in libs.into_iter() { - if self.link(file.as_ref(), lib.as_ref(), addr) { - return true; - } - } - false - } - - /// Links the bytecode object with all provided `(fully_qualified, addr)` - pub fn link_all_fully_qualified(&mut self, libs: I) -> bool - where - I: IntoIterator, - S: AsRef, - { - for (name, addr) in libs.into_iter() { - if self.link_fully_qualified(name.as_ref(), addr) { - return true; - } - } - false - } - - /// Returns a reference to the underlying `Bytes` if the object is a valid bytecode. - pub fn bytes(&self) -> Option<&Bytes> { - self.object.as_bytes() - } - - /// Returns the underlying `Bytes` if the object is a valid bytecode. - pub fn into_bytes(self) -> Option { - self.object.into_bytes() - } -} - -/// Represents the bytecode of a contracts that might be not fully linked yet. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum BytecodeObject { - /// Fully linked bytecode object. - #[serde(deserialize_with = "serde_helpers::deserialize_bytes")] - Bytecode(Bytes), - /// Bytecode as hex string that's not fully linked yet and contains library placeholders. - #[serde(with = "serde_helpers::string_bytes")] - Unlinked(String), -} - -impl BytecodeObject { - /// Returns a reference to the underlying `Bytes` if the object is a valid bytecode. - pub fn as_bytes(&self) -> Option<&Bytes> { - match self { - Self::Bytecode(bytes) => Some(bytes), - Self::Unlinked(_) => None, - } - } - - /// Returns the underlying `Bytes` if the object is a valid bytecode. - pub fn into_bytes(self) -> Option { - match self { - Self::Bytecode(bytes) => Some(bytes), - Self::Unlinked(_) => None, - } - } - - /// Returns the number of bytes of the fully linked bytecode. - /// - /// Returns `0` if this object is unlinked. - pub fn bytes_len(&self) -> usize { - self.as_bytes().map(|b| b.as_ref().len()).unwrap_or_default() - } - - /// Returns a reference to the underlying `String` if the object is unlinked. - pub fn as_str(&self) -> Option<&str> { - match self { - Self::Bytecode(_) => None, - Self::Unlinked(s) => Some(s.as_str()), - } - } - - /// Returns the unlinked `String` if the object is unlinked. - pub fn into_unlinked(self) -> Option { - match self { - Self::Bytecode(_) => None, - Self::Unlinked(code) => Some(code), - } - } - - /// Whether this object is still unlinked. - pub fn is_unlinked(&self) -> bool { - matches!(self, Self::Unlinked(_)) - } - - /// Whether this object a valid bytecode. - pub fn is_bytecode(&self) -> bool { - matches!(self, Self::Bytecode(_)) - } - - /// Returns `true` if the object is a valid bytecode and not empty. - /// - /// Returns `false` if the object is a valid but empty or unlinked bytecode. - pub fn is_non_empty_bytecode(&self) -> bool { - self.as_bytes().map(|c| !c.0.is_empty()).unwrap_or_default() - } - - /// Tries to resolve the unlinked string object a valid bytecode object in place. - /// - /// Returns the string if it is a valid - pub fn resolve(&mut self) -> Option<&Bytes> { - if let Self::Unlinked(unlinked) = self { - if let Ok(linked) = hex::decode(unlinked) { - *self = Self::Bytecode(linked.into()); - } - } - self.as_bytes() - } - - /// Links using the fully qualified name of a library. - /// - /// The fully qualified library name is the path of its source file and the library name - /// separated by `:` like `file.sol:Math` - /// - /// This will replace all occurrences of the library placeholder with the given address. - /// - /// See also: - pub fn link_fully_qualified(&mut self, name: &str, addr: Address) -> &mut Self { - if let Self::Unlinked(ref mut unlinked) = self { - let place_holder = utils::library_hash_placeholder(name); - // the address as hex without prefix - let hex_addr = hex::encode(addr); - - // the library placeholder used to be the fully qualified name of the library instead of - // the hash. This is also still supported by `solc` so we handle this as well - let fully_qualified_placeholder = utils::library_fully_qualified_placeholder(name); - - *unlinked = unlinked - .replace(&format!("__{fully_qualified_placeholder}__"), &hex_addr) - .replace(&format!("__{place_holder}__"), &hex_addr) - } - self - } - - /// Links using the `file` and `library` names as fully qualified name `:`. - /// - /// See [`link_fully_qualified`](Self::link_fully_qualified). - pub fn link(&mut self, file: &str, library: &str, addr: Address) -> &mut Self { - self.link_fully_qualified(&format!("{file}:{library}"), addr) - } - - /// Links the bytecode object with all provided `(file, lib, addr)`. - pub fn link_all(&mut self, libs: I) -> &mut Self - where - I: IntoIterator, - S: AsRef, - T: AsRef, - { - for (file, lib, addr) in libs.into_iter() { - self.link(file.as_ref(), lib.as_ref(), addr); - } - self - } - - /// Returns whether the bytecode contains a matching placeholder using the qualified name. - pub fn contains_fully_qualified_placeholder(&self, name: &str) -> bool { - if let Self::Unlinked(unlinked) = self { - unlinked.contains(&utils::library_hash_placeholder(name)) - || unlinked.contains(&utils::library_fully_qualified_placeholder(name)) - } else { - false - } - } - - /// Returns whether the bytecode contains a matching placeholder. - pub fn contains_placeholder(&self, file: &str, library: &str) -> bool { - self.contains_fully_qualified_placeholder(&format!("{file}:{library}")) - } -} - -// Returns an empty bytecode object -impl Default for BytecodeObject { - fn default() -> Self { - Self::Bytecode(Default::default()) - } -} - -impl AsRef<[u8]> for BytecodeObject { - fn as_ref(&self) -> &[u8] { - match self { - Self::Bytecode(code) => code.as_ref(), - Self::Unlinked(code) => code.as_bytes(), - } - } -} - -/// This will serialize the bytecode data without a `0x` prefix, which the `ethers::types::Bytes` -/// adds by default. -/// -/// This ensures that we serialize bytecode data in the same way as solc does, See also -pub fn serialize_bytecode_without_prefix( - bytecode: &BytecodeObject, - s: S, -) -> Result -where - S: Serializer, -{ - match bytecode { - BytecodeObject::Bytecode(code) => s.serialize_str(&hex::encode(code)), - BytecodeObject::Unlinked(code) => s.serialize_str(code.strip_prefix("0x").unwrap_or(code)), - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct DeployedBytecode { - #[serde(flatten)] - pub bytecode: Option, - #[serde( - default, - rename = "immutableReferences", - skip_serializing_if = "::std::collections::BTreeMap::is_empty" - )] - pub immutable_references: BTreeMap>, -} - -impl DeployedBytecode { - /// Returns a reference to the underlying `Bytes` if the object is a valid bytecode. - pub fn bytes(&self) -> Option<&Bytes> { - self.bytecode.as_ref().and_then(|bytecode| bytecode.object.as_bytes()) - } - - /// Returns the underlying `Bytes` if the object is a valid bytecode. - pub fn into_bytes(self) -> Option { - self.bytecode.and_then(|bytecode| bytecode.object.into_bytes()) - } -} - -impl From for DeployedBytecode { - fn from(bcode: Bytecode) -> Self { - Self { bytecode: Some(bcode), immutable_references: Default::default() } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CompactDeployedBytecode { - #[serde(flatten)] - pub bytecode: Option, - #[serde( - default, - rename = "immutableReferences", - skip_serializing_if = "::std::collections::BTreeMap::is_empty" - )] - pub immutable_references: BTreeMap>, -} - -impl CompactDeployedBytecode { - /// Returns a new `CompactDeployedBytecode` object that contains nothing, as it's the case for - /// interfaces and standalone solidity files that don't contain any contract definitions - pub fn empty() -> Self { - Self { bytecode: Some(CompactBytecode::empty()), immutable_references: Default::default() } - } - - /// Returns a reference to the underlying `Bytes` if the object is a valid bytecode. - pub fn bytes(&self) -> Option<&Bytes> { - self.bytecode.as_ref().and_then(|bytecode| bytecode.object.as_bytes()) - } - - /// Returns the underlying `Bytes` if the object is a valid bytecode. - pub fn into_bytes(self) -> Option { - self.bytecode.and_then(|bytecode| bytecode.object.into_bytes()) - } - - /// Returns the parsed source map - /// - /// See also - pub fn source_map(&self) -> Option> { - self.bytecode.as_ref().and_then(|bytecode| bytecode.source_map()) - } -} - -impl From for CompactDeployedBytecode { - fn from(bcode: DeployedBytecode) -> Self { - Self { - bytecode: bcode.bytecode.map(|d_bcode| d_bcode.into()), - immutable_references: bcode.immutable_references, - } - } -} - -impl From for DeployedBytecode { - fn from(bcode: CompactDeployedBytecode) -> Self { - Self { - bytecode: bcode.bytecode.map(|d_bcode| d_bcode.into()), - immutable_references: bcode.immutable_references, - } - } -} - -#[cfg(test)] -mod tests { - use crate::{ConfigurableContractArtifact, ContractBytecode}; - - #[test] - fn test_empty_bytecode() { - let empty = r#" - { - "abi": [], - "bytecode": { - "object": "0x", - "linkReferences": {} - }, - "deployedBytecode": { - "object": "0x", - "linkReferences": {} - } - } - "#; - - let artifact: ConfigurableContractArtifact = serde_json::from_str(empty).unwrap(); - let contract = artifact.into_contract_bytecode(); - let bytecode: ContractBytecode = contract.into(); - let bytecode = bytecode.unwrap(); - assert!(!bytecode.bytecode.object.is_unlinked()); - } -} diff --git a/crates/artifacts/solc/src/configurable.rs b/crates/artifacts/solc/src/configurable.rs deleted file mode 100644 index 5a354987..00000000 --- a/crates/artifacts/solc/src/configurable.rs +++ /dev/null @@ -1,115 +0,0 @@ -use crate::{ - Ast, CompactBytecode, CompactContract, CompactContractBytecode, CompactContractBytecodeCow, - CompactDeployedBytecode, DevDoc, Ewasm, FunctionDebugData, GasEstimates, GeneratedSource, - Metadata, Offsets, SourceFile, StorageLayout, UserDoc, -}; -use alloy_json_abi::JsonAbi; -use serde::{Deserialize, Serialize}; -use std::{borrow::Cow, collections::BTreeMap}; - -/// Represents the `Artifact` that `ConfigurableArtifacts` emits. -/// -/// This is essentially a superset of [`CompactContractBytecode`]. -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ConfigurableContractArtifact { - /// The Ethereum Contract ABI. If empty, it is represented as an empty - /// array. See - pub abi: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub bytecode: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub deployed_bytecode: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub assembly: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub legacy_assembly: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub opcodes: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub method_identifiers: Option>, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub generated_sources: Vec, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub function_debug_data: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub gas_estimates: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub raw_metadata: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub metadata: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub storage_layout: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub transient_storage_layout: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub userdoc: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub devdoc: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ir: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ir_optimized: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ir_optimized_ast: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ewasm: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ast: Option, - /// The identifier of the source file - #[serde(default, skip_serializing_if = "Option::is_none")] - pub id: Option, -} - -impl ConfigurableContractArtifact { - /// Returns the inner element that contains the core bytecode related information - pub fn into_contract_bytecode(self) -> CompactContractBytecode { - self.into() - } - - /// Looks for all link references in deployment and runtime bytecodes - pub fn all_link_references(&self) -> BTreeMap>> { - let mut links = BTreeMap::new(); - if let Some(bcode) = &self.bytecode { - links.extend(bcode.link_references.clone()); - } - - if let Some(d_bcode) = &self.deployed_bytecode { - if let Some(bcode) = &d_bcode.bytecode { - links.extend(bcode.link_references.clone()); - } - } - links - } - - /// Returns the source file of this artifact's contract - pub fn source_file(&self) -> Option { - self.id.map(|id| SourceFile { id, ast: self.ast.clone() }) - } -} - -impl From for CompactContractBytecode { - fn from(artifact: ConfigurableContractArtifact) -> Self { - Self { - abi: artifact.abi.map(Into::into), - bytecode: artifact.bytecode, - deployed_bytecode: artifact.deployed_bytecode, - } - } -} - -impl From for CompactContract { - fn from(artifact: ConfigurableContractArtifact) -> Self { - CompactContractBytecode::from(artifact).into() - } -} - -impl<'a> From<&'a ConfigurableContractArtifact> for CompactContractBytecodeCow<'a> { - fn from(artifact: &'a ConfigurableContractArtifact) -> Self { - CompactContractBytecodeCow { - abi: artifact.abi.as_ref().map(Cow::Borrowed), - bytecode: artifact.bytecode.as_ref().map(Cow::Borrowed), - deployed_bytecode: artifact.deployed_bytecode.as_ref().map(Cow::Borrowed), - } - } -} diff --git a/crates/artifacts/solc/src/contract.rs b/crates/artifacts/solc/src/contract.rs deleted file mode 100644 index 58931952..00000000 --- a/crates/artifacts/solc/src/contract.rs +++ /dev/null @@ -1,539 +0,0 @@ -//! Contract related types. - -use crate::{ - bytecode::{ - Bytecode, BytecodeObject, CompactBytecode, CompactDeployedBytecode, DeployedBytecode, - }, - serde_helpers, DevDoc, Evm, Ewasm, LosslessMetadata, Offsets, StorageLayout, UserDoc, -}; -use alloy_json_abi::JsonAbi; -use alloy_primitives::Bytes; -use serde::{Deserialize, Serialize}; -use std::{borrow::Cow, collections::BTreeMap}; - -/// Represents a compiled solidity contract -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Contract { - /// The Ethereum Contract Metadata. - /// See - pub abi: Option, - #[serde( - default, - skip_serializing_if = "Option::is_none", - with = "serde_helpers::json_string_opt" - )] - pub metadata: Option, - #[serde(default)] - pub userdoc: UserDoc, - #[serde(default)] - pub devdoc: DevDoc, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ir: Option, - #[serde(default, skip_serializing_if = "StorageLayout::is_empty")] - pub storage_layout: StorageLayout, - #[serde(default, skip_serializing_if = "StorageLayout::is_empty")] - pub transient_storage_layout: StorageLayout, - /// EVM-related outputs - #[serde(default, skip_serializing_if = "Option::is_none")] - pub evm: Option, - /// Ewasm related outputs - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ewasm: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ir_optimized: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ir_optimized_ast: Option, -} - -impl<'a> From<&'a Contract> for CompactContractBytecodeCow<'a> { - fn from(artifact: &'a Contract) -> Self { - let (bytecode, deployed_bytecode) = if let Some(ref evm) = artifact.evm { - ( - evm.bytecode.clone().map(Into::into).map(Cow::Owned), - evm.deployed_bytecode.clone().map(Into::into).map(Cow::Owned), - ) - } else { - (None, None) - }; - CompactContractBytecodeCow { - abi: artifact.abi.as_ref().map(Cow::Borrowed), - bytecode, - deployed_bytecode, - } - } -} - -/// Minimal representation of a contract with a present abi and bytecode. -/// -/// Unlike `CompactContractSome` which contains the `BytecodeObject`, this holds the whole -/// `Bytecode` object. -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ContractBytecode { - /// The Ethereum Contract ABI. If empty, it is represented as an empty - /// array. See - pub abi: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub bytecode: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub deployed_bytecode: Option, -} - -impl ContractBytecode { - /// Unwraps `self` into `ContractBytecodeSome`. - /// - /// # Panics - /// - /// Panics if any field is `None`. - #[track_caller] - pub fn unwrap(self) -> ContractBytecodeSome { - ContractBytecodeSome { - abi: self.abi.unwrap(), - bytecode: self.bytecode.unwrap(), - deployed_bytecode: self.deployed_bytecode.unwrap(), - } - } - - /// Looks for all link references in deployment and runtime bytecodes - pub fn all_link_references(&self) -> BTreeMap>> { - let mut links = BTreeMap::new(); - if let Some(bcode) = &self.bytecode { - links.extend(bcode.link_references.clone()); - } - - if let Some(d_bcode) = &self.deployed_bytecode { - if let Some(bcode) = &d_bcode.bytecode { - links.extend(bcode.link_references.clone()); - } - } - links - } -} - -impl From for ContractBytecode { - fn from(c: Contract) -> Self { - let (bytecode, deployed_bytecode) = if let Some(evm) = c.evm { - (evm.bytecode, evm.deployed_bytecode) - } else { - (None, None) - }; - - Self { abi: c.abi.map(Into::into), bytecode, deployed_bytecode } - } -} - -/// Minimal representation of a contract with a present abi and bytecode. -/// -/// Unlike `CompactContractSome` which contains the `BytecodeObject`, this holds the whole -/// `Bytecode` object. -#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CompactContractBytecode { - /// The Ethereum Contract ABI. If empty, it is represented as an empty - /// array. See - pub abi: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub bytecode: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub deployed_bytecode: Option, -} - -impl CompactContractBytecode { - /// Looks for all link references in deployment and runtime bytecodes - pub fn all_link_references(&self) -> BTreeMap>> { - let mut links = BTreeMap::new(); - if let Some(bcode) = &self.bytecode { - links.extend(bcode.link_references.clone()); - } - - if let Some(d_bcode) = &self.deployed_bytecode { - if let Some(bcode) = &d_bcode.bytecode { - links.extend(bcode.link_references.clone()); - } - } - links - } -} - -impl<'a> From<&'a CompactContractBytecode> for CompactContractBytecodeCow<'a> { - fn from(artifact: &'a CompactContractBytecode) -> Self { - CompactContractBytecodeCow { - abi: artifact.abi.as_ref().map(Cow::Borrowed), - bytecode: artifact.bytecode.as_ref().map(Cow::Borrowed), - deployed_bytecode: artifact.deployed_bytecode.as_ref().map(Cow::Borrowed), - } - } -} - -impl From for CompactContractBytecode { - fn from(c: Contract) -> Self { - let (bytecode, deployed_bytecode) = if let Some(evm) = c.evm { - let evm = evm.into_compact(); - (evm.bytecode, evm.deployed_bytecode) - } else { - (None, None) - }; - - Self { abi: c.abi.map(Into::into), bytecode, deployed_bytecode } - } -} - -impl From for CompactContractBytecode { - fn from(c: ContractBytecode) -> Self { - let (maybe_bcode, maybe_runtime) = match (c.bytecode, c.deployed_bytecode) { - (Some(bcode), Some(dbcode)) => (Some(bcode.into()), Some(dbcode.into())), - (None, Some(dbcode)) => (None, Some(dbcode.into())), - (Some(bcode), None) => (Some(bcode.into()), None), - (None, None) => (None, None), - }; - Self { abi: c.abi, bytecode: maybe_bcode, deployed_bytecode: maybe_runtime } - } -} - -impl From for ContractBytecode { - fn from(c: CompactContractBytecode) -> Self { - let (maybe_bcode, maybe_runtime) = match (c.bytecode, c.deployed_bytecode) { - (Some(bcode), Some(dbcode)) => (Some(bcode.into()), Some(dbcode.into())), - (None, Some(dbcode)) => (None, Some(dbcode.into())), - (Some(bcode), None) => (Some(bcode.into()), None), - (None, None) => (None, None), - }; - Self { abi: c.abi, bytecode: maybe_bcode, deployed_bytecode: maybe_runtime } - } -} - -/// A [CompactContractBytecode] that is either owns or borrows its content -#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CompactContractBytecodeCow<'a> { - pub abi: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub bytecode: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub deployed_bytecode: Option>, -} - -impl From> for CompactContract { - fn from(value: CompactContractBytecodeCow<'_>) -> Self { - Self { - abi: value.abi.map(Cow::into_owned), - bin: value.bytecode.map(|bytecode| match bytecode { - Cow::Owned(bytecode) => bytecode.object, - Cow::Borrowed(bytecode) => bytecode.object.clone(), - }), - bin_runtime: value - .deployed_bytecode - .and_then(|bytecode| match bytecode { - Cow::Owned(bytecode) => bytecode.bytecode, - Cow::Borrowed(bytecode) => bytecode.bytecode.clone(), - }) - .map(|bytecode| bytecode.object), - } - } -} - -impl From> for CompactContractBytecode { - fn from(value: CompactContractBytecodeCow<'_>) -> Self { - Self { - abi: value.abi.map(Cow::into_owned), - bytecode: value.bytecode.map(Cow::into_owned), - deployed_bytecode: value.deployed_bytecode.map(Cow::into_owned), - } - } -} - -impl<'a> From<&'a CompactContractBytecodeCow<'_>> for CompactContractBytecodeCow<'a> { - fn from(value: &'a CompactContractBytecodeCow<'_>) -> Self { - Self { - abi: value.abi.as_ref().map(|x| Cow::Borrowed(&**x)), - bytecode: value.bytecode.as_ref().map(|x| Cow::Borrowed(&**x)), - deployed_bytecode: value.deployed_bytecode.as_ref().map(|x| Cow::Borrowed(&**x)), - } - } -} - -/// Minimal representation of a contract with a present abi and bytecode. -/// -/// Unlike `CompactContractSome` which contains the `BytecodeObject`, this holds the whole -/// `Bytecode` object. -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct ContractBytecodeSome { - pub abi: JsonAbi, - pub bytecode: Bytecode, - pub deployed_bytecode: DeployedBytecode, -} - -impl TryFrom for ContractBytecodeSome { - type Error = ContractBytecode; - - fn try_from(value: ContractBytecode) -> Result { - if value.abi.is_none() || value.bytecode.is_none() || value.deployed_bytecode.is_none() { - return Err(value); - } - Ok(value.unwrap()) - } -} - -/// Minimal representation of a contract's artifact with a present abi and bytecode. -#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] -pub struct CompactContractSome { - /// The Ethereum Contract ABI. If empty, it is represented as an empty - /// array. See - pub abi: JsonAbi, - pub bin: BytecodeObject, - #[serde(rename = "bin-runtime")] - pub bin_runtime: BytecodeObject, -} - -impl TryFrom for CompactContractSome { - type Error = CompactContract; - - fn try_from(value: CompactContract) -> Result { - if value.abi.is_none() || value.bin.is_none() || value.bin_runtime.is_none() { - return Err(value); - } - Ok(value.unwrap()) - } -} - -/// The general purpose minimal representation of a contract's abi with bytecode -/// Unlike `CompactContractSome` all fields are optional so that every possible compiler output can -/// be represented by it -#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] -pub struct CompactContract { - /// The Ethereum Contract ABI. If empty, it is represented as an empty - /// array. See - pub abi: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub bin: Option, - #[serde(default, rename = "bin-runtime", skip_serializing_if = "Option::is_none")] - pub bin_runtime: Option, -} - -impl CompactContract { - /// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode - pub fn into_parts(self) -> (Option, Option, Option) { - ( - self.abi, - self.bin.and_then(|bin| bin.into_bytes()), - self.bin_runtime.and_then(|bin| bin.into_bytes()), - ) - } - - /// Returns the individual parts of this contract. - /// - /// If the values are `None`, then `Default` is returned. - pub fn into_parts_or_default(self) -> (JsonAbi, Bytes, Bytes) { - ( - self.abi.unwrap_or_default(), - self.bin.and_then(|bin| bin.into_bytes()).unwrap_or_default(), - self.bin_runtime.and_then(|bin| bin.into_bytes()).unwrap_or_default(), - ) - } - - /// Unwraps `self` into `CompactContractSome`. - /// - /// # Panics - /// - /// Panics if any field is `None`. - #[track_caller] - pub fn unwrap(self) -> CompactContractSome { - CompactContractSome { - abi: self.abi.unwrap(), - bin: self.bin.unwrap(), - bin_runtime: self.bin_runtime.unwrap(), - } - } - - /// Returns the `CompactContractSome` if any if the field equals `None` the `Default` value is - /// returned - /// - /// Unlike `unwrap`, this function does _not_ panic - pub fn unwrap_or_default(self) -> CompactContractSome { - CompactContractSome { - abi: self.abi.unwrap_or_default(), - bin: self.bin.unwrap_or_default(), - bin_runtime: self.bin_runtime.unwrap_or_default(), - } - } -} - -impl From for CompactContract { - fn from(mut val: serde_json::Value) -> Self { - if let Some(map) = val.as_object_mut() { - let abi = map.remove("abi").and_then(|val| serde_json::from_value(val).ok()); - let bin = map.remove("bin").and_then(|val| serde_json::from_value(val).ok()); - let bin_runtime = - map.remove("bin-runtime").and_then(|val| serde_json::from_value(val).ok()); - Self { abi, bin, bin_runtime } - } else { - Self::default() - } - } -} - -impl<'a> From<&'a serde_json::Value> for CompactContractBytecodeCow<'a> { - fn from(artifact: &'a serde_json::Value) -> Self { - let c = CompactContractBytecode::from(artifact.clone()); - CompactContractBytecodeCow { - abi: c.abi.map(Cow::Owned), - bytecode: c.bytecode.map(Cow::Owned), - deployed_bytecode: c.deployed_bytecode.map(Cow::Owned), - } - } -} - -impl From for CompactContractBytecode { - fn from(val: serde_json::Value) -> Self { - serde_json::from_value(val).unwrap_or_default() - } -} - -impl From for CompactContract { - fn from(c: ContractBytecode) -> Self { - let ContractBytecode { abi, bytecode, deployed_bytecode } = c; - Self { - abi, - bin: bytecode.map(|c| c.object), - bin_runtime: deployed_bytecode - .and_then(|deployed| deployed.bytecode.map(|code| code.object)), - } - } -} - -impl From for CompactContract { - fn from(c: CompactContractBytecode) -> Self { - let c: ContractBytecode = c.into(); - c.into() - } -} - -impl From for CompactContract { - fn from(c: ContractBytecodeSome) -> Self { - Self { - abi: Some(c.abi), - bin: Some(c.bytecode.object), - bin_runtime: c.deployed_bytecode.bytecode.map(|code| code.object), - } - } -} - -impl From for CompactContract { - fn from(c: Contract) -> Self { - ContractBytecode::from(c).into() - } -} - -impl From for CompactContract { - fn from(c: CompactContractSome) -> Self { - Self { abi: Some(c.abi), bin: Some(c.bin), bin_runtime: Some(c.bin_runtime) } - } -} - -impl<'a> From> for CompactContract { - fn from(c: CompactContractRef<'a>) -> Self { - Self { abi: c.abi.cloned(), bin: c.bin.cloned(), bin_runtime: c.bin_runtime.cloned() } - } -} - -impl<'a> From> for CompactContract { - fn from(c: CompactContractRefSome<'a>) -> Self { - Self { - abi: Some(c.abi.clone()), - bin: Some(c.bin.clone()), - bin_runtime: Some(c.bin_runtime.clone()), - } - } -} - -/// Minimal representation of a contract with a present abi and bytecode that borrows. -#[derive(Clone, Copy, Debug, Serialize)] -pub struct CompactContractRefSome<'a> { - pub abi: &'a JsonAbi, - pub bin: &'a BytecodeObject, - #[serde(rename = "bin-runtime")] - pub bin_runtime: &'a BytecodeObject, -} - -impl<'a> CompactContractRefSome<'a> { - /// Returns the individual parts of this contract. - /// - /// If the values are `None`, then `Default` is returned. - pub fn into_parts(self) -> (JsonAbi, Bytes, Bytes) { - CompactContract::from(self).into_parts_or_default() - } -} - -impl<'a> TryFrom> for CompactContractRefSome<'a> { - type Error = CompactContractRef<'a>; - - fn try_from(value: CompactContractRef<'a>) -> Result { - if value.abi.is_none() || value.bin.is_none() || value.bin_runtime.is_none() { - return Err(value); - } - Ok(value.unwrap()) - } -} - -/// Helper type to serialize while borrowing from `Contract` -#[derive(Clone, Copy, Debug, Serialize)] -pub struct CompactContractRef<'a> { - pub abi: Option<&'a JsonAbi>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub bin: Option<&'a BytecodeObject>, - #[serde(default, rename = "bin-runtime", skip_serializing_if = "Option::is_none")] - pub bin_runtime: Option<&'a BytecodeObject>, -} - -impl<'a> CompactContractRef<'a> { - /// Clones the referenced values and returns as tuples - pub fn into_parts(self) -> (Option, Option, Option) { - CompactContract::from(self).into_parts() - } - - /// Returns the individual parts of this contract. - /// - /// If the values are `None`, then `Default` is returned. - pub fn into_parts_or_default(self) -> (JsonAbi, Bytes, Bytes) { - CompactContract::from(self).into_parts_or_default() - } - - pub fn bytecode(&self) -> Option<&Bytes> { - self.bin.as_ref().and_then(|bin| bin.as_bytes()) - } - - pub fn runtime_bytecode(&self) -> Option<&Bytes> { - self.bin_runtime.as_ref().and_then(|bin| bin.as_bytes()) - } - - /// Unwraps `self` into `CompactContractRefSome`. - /// - /// # Panics - /// - /// Panics if any field is `None`. - #[track_caller] - pub fn unwrap(self) -> CompactContractRefSome<'a> { - CompactContractRefSome { - abi: self.abi.unwrap(), - bin: self.bin.unwrap(), - bin_runtime: self.bin_runtime.unwrap(), - } - } -} - -impl<'a> From<&'a Contract> for CompactContractRef<'a> { - fn from(c: &'a Contract) -> Self { - let (bin, bin_runtime) = if let Some(ref evm) = c.evm { - ( - evm.bytecode.as_ref().map(|c| &c.object), - evm.deployed_bytecode - .as_ref() - .and_then(|deployed| deployed.bytecode.as_ref().map(|evm| &evm.object)), - ) - } else { - (None, None) - }; - - Self { abi: c.abi.as_ref(), bin, bin_runtime } - } -} diff --git a/crates/artifacts/solc/src/error.rs b/crates/artifacts/solc/src/error.rs deleted file mode 100644 index bdde31d0..00000000 --- a/crates/artifacts/solc/src/error.rs +++ /dev/null @@ -1,423 +0,0 @@ -use super::serde_helpers; -use serde::{Deserialize, Serialize}; -use std::{fmt, ops::Range, str::FromStr}; -use yansi::{Color, Style}; - -#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub struct SourceLocation { - pub file: String, - pub start: i32, - pub end: i32, -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub struct SecondarySourceLocation { - pub file: Option, - pub start: Option, - pub end: Option, - pub message: Option, -} - -/// The severity of the error. -#[derive( - Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, -)] -#[serde(rename_all = "lowercase")] -pub enum Severity { - /// Solc `Error` - #[default] - Error, - /// Solc `Warning` - Warning, - /// Solc `Info` - Info, -} - -impl fmt::Display for Severity { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -impl FromStr for Severity { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "Error" | "error" => Ok(Self::Error), - "Warning" | "warning" => Ok(Self::Warning), - "Info" | "info" => Ok(Self::Info), - s => Err(format!("Invalid severity: {s}")), - } - } -} - -impl Severity { - /// Returns `true` if the severity is `Error`. - pub const fn is_error(&self) -> bool { - matches!(self, Self::Error) - } - - /// Returns `true` if the severity is `Warning`. - pub const fn is_warning(&self) -> bool { - matches!(self, Self::Warning) - } - - /// Returns `true` if the severity is `Info`. - pub const fn is_info(&self) -> bool { - matches!(self, Self::Info) - } - - /// Returns the string representation of the severity. - pub const fn as_str(&self) -> &'static str { - match self { - Self::Error => "Error", - Self::Warning => "Warning", - Self::Info => "Info", - } - } - - /// Returns the color to format the severity with. - pub const fn color(&self) -> Color { - match self { - Self::Error => Color::Red, - Self::Warning => Color::Yellow, - Self::Info => Color::White, - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Error { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub source_location: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub secondary_source_locations: Vec, - pub r#type: String, - pub component: String, - pub severity: Severity, - #[serde(default, with = "serde_helpers::display_from_str_opt")] - pub error_code: Option, - pub message: String, - pub formatted_message: Option, -} - -impl Error { - /// Returns `true` if the error is an error. - pub const fn is_error(&self) -> bool { - self.severity.is_error() - } - - /// Returns `true` if the error is a warning. - pub const fn is_warning(&self) -> bool { - self.severity.is_warning() - } - - /// Returns `true` if the error is an info. - pub const fn is_info(&self) -> bool { - self.severity.is_info() - } -} - -/// Tries to mimic Solidity's own error formatting. -/// -/// -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut short_msg = self.message.trim(); - let fmtd_msg = self.formatted_message.as_deref().unwrap_or(""); - - if short_msg.is_empty() { - // if the message is empty, try to extract the first line from the formatted message - if let Some(first_line) = fmtd_msg.lines().next() { - // this is something like `ParserError: ` - if let Some((_, s)) = first_line.split_once(':') { - short_msg = s.trim_start(); - } else { - short_msg = first_line; - } - } - } - - // Error (XXXX): Error Message - styled(f, self.severity.color().bold(), |f| self.fmt_severity(f))?; - fmt_msg(f, short_msg)?; - - let mut lines = fmtd_msg.lines(); - - // skip the first line if it contains the same message as the one we just formatted, - // unless it also contains a source location, in which case the entire error message is an - // old style error message, like: - // path/to/file:line:column: ErrorType: message - if lines.clone().next().map_or(false, |l| { - l.contains(short_msg) && l.bytes().filter(|b| *b == b':').count() < 3 - }) { - let _ = lines.next(); - } - - // format the main source location - fmt_source_location(f, &mut lines)?; - - // format remaining lines as secondary locations - while let Some(line) = lines.next() { - f.write_str("\n")?; - - if let Some((note, msg)) = line.split_once(':') { - styled(f, Self::secondary_style(), |f| f.write_str(note))?; - fmt_msg(f, msg)?; - } else { - f.write_str(line)?; - } - - fmt_source_location(f, &mut lines)?; - } - - Ok(()) - } -} - -impl Error { - /// The style of the diagnostic severity. - pub fn error_style(&self) -> Style { - self.severity.color().bold() - } - - /// The style of the diagnostic message. - pub fn message_style() -> Style { - Color::White.bold() - } - - /// The style of the secondary source location. - pub fn secondary_style() -> Style { - Color::Cyan.bold() - } - - /// The style of the source location highlight. - pub fn highlight_style() -> Style { - Style::new().fg(Color::Yellow) - } - - /// The style of the diagnostics. - pub fn diag_style() -> Style { - Color::Yellow.bold() - } - - /// The style of the source location frame. - pub fn frame_style() -> Style { - Style::new().fg(Color::Blue) - } - - /// Formats the diagnostic severity: - /// - /// ```text - /// Error (XXXX) - /// ``` - fn fmt_severity(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.severity.as_str())?; - if let Some(code) = self.error_code { - write!(f, " ({code})")?; - } - Ok(()) - } -} - -/// Calls `fun` in between [`Style::fmt_prefix`] and [`Style::fmt_suffix`]. -fn styled(f: &mut fmt::Formatter<'_>, style: Style, fun: F) -> fmt::Result -where - F: FnOnce(&mut fmt::Formatter<'_>) -> fmt::Result, -{ - let enabled = yansi::is_enabled(); - if enabled { - style.fmt_prefix(f)?; - } - fun(f)?; - if enabled { - style.fmt_suffix(f)?; - } - Ok(()) -} - -/// Formats the diagnostic message. -fn fmt_msg(f: &mut fmt::Formatter<'_>, msg: &str) -> fmt::Result { - styled(f, Error::message_style(), |f| { - f.write_str(": ")?; - f.write_str(msg.trim_start()) - }) -} - -/// Colors a Solidity source location: -/// -/// ```text -/// --> /home/user/contract.sol:420:69: -/// | -/// 420 | bad_code() -/// | ^ -/// ``` -fn fmt_source_location(f: &mut fmt::Formatter<'_>, lines: &mut std::str::Lines<'_>) -> fmt::Result { - // --> source - if let Some(line) = lines.next() { - f.write_str("\n")?; - - let arrow = "-->"; - if let Some((left, loc)) = line.split_once(arrow) { - f.write_str(left)?; - styled(f, Error::frame_style(), |f| f.write_str(arrow))?; - f.write_str(loc)?; - } else { - f.write_str(line)?; - } - } - - // get the next 3 lines - let Some(line1) = lines.next() else { - return Ok(()); - }; - let Some(line2) = lines.next() else { - f.write_str("\n")?; - f.write_str(line1)?; - return Ok(()); - }; - let Some(line3) = lines.next() else { - f.write_str("\n")?; - f.write_str(line1)?; - f.write_str("\n")?; - f.write_str(line2)?; - return Ok(()); - }; - - // line 1, just a frame - fmt_framed_location(f, line1, None)?; - - // line 2, frame and code; highlight the text based on line 3's carets - let hl_start = line3.find('^'); - let highlight = hl_start.map(|start| { - let end = if line3.contains("^ (") { - // highlight the entire line because of "spans across multiple lines" diagnostic - line2.len() - } else if let Some(carets) = line3[start..].find(|c: char| c != '^') { - // highlight the text that the carets point to - start + carets - } else { - // the carets span the entire third line - line3.len() - } - // bound in case carets span longer than the code they point to - .min(line2.len()); - (start.min(end)..end, Error::highlight_style()) - }); - fmt_framed_location(f, line2, highlight)?; - - // line 3, frame and maybe highlight, this time till the end unconditionally - let highlight = hl_start.map(|i| (i..line3.len(), Error::diag_style())); - fmt_framed_location(f, line3, highlight) -} - -/// Colors a single Solidity framed source location line. Part of [`fmt_source_location`]. -fn fmt_framed_location( - f: &mut fmt::Formatter<'_>, - line: &str, - highlight: Option<(Range, Style)>, -) -> fmt::Result { - f.write_str("\n")?; - - if let Some((space_or_line_number, rest)) = line.split_once('|') { - // if the potential frame is not just whitespace or numbers, don't color it - if !space_or_line_number.chars().all(|c| c.is_whitespace() || c.is_numeric()) { - return f.write_str(line); - } - - styled(f, Error::frame_style(), |f| { - f.write_str(space_or_line_number)?; - f.write_str("|") - })?; - - if let Some((range, style)) = highlight { - let Range { start, end } = range; - // Skip highlighting if the range is not valid unicode. - if !line.is_char_boundary(start) || !line.is_char_boundary(end) { - f.write_str(rest) - } else { - let rest_start = line.len() - rest.len(); - f.write_str(&line[rest_start..start])?; - styled(f, style, |f| f.write_str(&line[range]))?; - f.write_str(&line[end..]) - } - } else { - f.write_str(rest) - } - } else { - f.write_str(line) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn fmt_unicode() { - let msg = "Invalid character in string. If you are trying to use Unicode characters, use a unicode\"...\" string literal."; - let e = Error { - source_location: Some(SourceLocation { file: "test/Counter.t.sol".into(), start: 418, end: 462 }), - secondary_source_locations: vec![], - r#type: "ParserError".into(), - component: "general".into(), - severity: Severity::Error, - error_code: Some(8936), - message: msg.into(), - formatted_message: Some("ParserError: Invalid character in string. If you are trying to use Unicode characters, use a unicode\"...\" string literal.\n --> test/Counter.t.sol:17:21:\n |\n17 | console.log(\"1. ownership set correctly as governance: ✓\");\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n".into()), - }; - let s = e.to_string(); - eprintln!("{s}"); - assert!(s.contains(msg), "\n{s}"); - } - - #[test] - fn only_formatted() { - let e = Error { - source_location: Some(SourceLocation { file: "test/Counter.t.sol".into(), start: 418, end: 462 }), - secondary_source_locations: vec![], - r#type: "ParserError".into(), - component: "general".into(), - severity: Severity::Error, - error_code: Some(8936), - message: String::new(), - formatted_message: Some("ParserError: Invalid character in string. If you are trying to use Unicode characters, use a unicode\"...\" string literal.\n --> test/Counter.t.sol:17:21:\n |\n17 | console.log(\"1. ownership set correctly as governance: ✓\");\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n".into()), - }; - let s = e.to_string(); - eprintln!("{s}"); - assert!(s.contains("Invalid character in string"), "\n{s}"); - } - - #[test] - fn solc_0_7() { - let output = r#"{"errors":[{"component":"general","errorCode":"6594","formattedMessage":"test/Counter.t.sol:7:1: TypeError: Contract \"CounterTest\" does not use ABI coder v2 but wants to inherit from a contract which uses types that require it. Use \"pragma abicoder v2;\" for the inheriting contract as well to enable the feature.\ncontract CounterTest is Test {\n^ (Relevant source part starts here and spans across multiple lines).\nlib/forge-std/src/StdInvariant.sol:72:5: Type only supported by ABIEncoderV2\n function excludeArtifacts() public view returns (string[] memory excludedArtifacts_) {\n ^ (Relevant source part starts here and spans across multiple lines).\nlib/forge-std/src/StdInvariant.sol:84:5: Type only supported by ABIEncoderV2\n function targetArtifacts() public view returns (string[] memory targetedArtifacts_) {\n ^ (Relevant source part starts here and spans across multiple lines).\nlib/forge-std/src/StdInvariant.sol:88:5: Type only supported by ABIEncoderV2\n function targetArtifactSelectors() public view returns (FuzzSelector[] memory targetedArtifactSelectors_) {\n ^ (Relevant source part starts here and spans across multiple lines).\nlib/forge-std/src/StdInvariant.sol:96:5: Type only supported by ABIEncoderV2\n function targetSelectors() public view returns (FuzzSelector[] memory targetedSelectors_) {\n ^ (Relevant source part starts here and spans across multiple lines).\nlib/forge-std/src/StdInvariant.sol:104:5: Type only supported by ABIEncoderV2\n function targetInterfaces() public view returns (FuzzInterface[] memory targetedInterfaces_) {\n ^ (Relevant source part starts here and spans across multiple lines).\n","message":"Contract \"CounterTest\" does not use ABI coder v2 but wants to inherit from a contract which uses types that require it. Use \"pragma abicoder v2;\" for the inheriting contract as well to enable the feature.","secondarySourceLocations":[{"end":2298,"file":"lib/forge-std/src/StdInvariant.sol","message":"Type only supported by ABIEncoderV2","start":2157},{"end":2732,"file":"lib/forge-std/src/StdInvariant.sol","message":"Type only supported by ABIEncoderV2","start":2592},{"end":2916,"file":"lib/forge-std/src/StdInvariant.sol","message":"Type only supported by ABIEncoderV2","start":2738},{"end":3215,"file":"lib/forge-std/src/StdInvariant.sol","message":"Type only supported by ABIEncoderV2","start":3069},{"end":3511,"file":"lib/forge-std/src/StdInvariant.sol","message":"Type only supported by ABIEncoderV2","start":3360}],"severity":"error","sourceLocation":{"end":558,"file":"test/Counter.t.sol","start":157},"type":"TypeError"}],"sources":{}}"#; - let crate::CompilerOutput { errors, .. } = serde_json::from_str(output).unwrap(); - assert_eq!(errors.len(), 1); - let s = errors[0].to_string(); - eprintln!("{s}"); - assert!(s.contains("test/Counter.t.sol:7:1"), "\n{s}"); - assert!(s.contains("ABI coder v2"), "\n{s}"); - } - - #[test] - fn solc_not_formatting_the_message1() { - let error = r#"{"component":"general","errorCode":"6553","formattedMessage":"SyntaxError: The msize instruction cannot be used when the Yul optimizer is activated because it can change its semantics. Either disable the Yul optimizer or do not use the instruction.\n\n","message":"The msize instruction cannot be used when the Yul optimizer is activated because it can change its semantics. Either disable the Yul optimizer or do not use the instruction.","severity":"error","sourceLocation":{"end":173,"file":"","start":114},"type":"SyntaxError"}"#; - let error = serde_json::from_str::(error).unwrap(); - let s = error.to_string(); - eprintln!("{s}"); - assert!(s.contains("Error (6553)"), "\n{s}"); - assert!(s.contains("The msize instruction cannot be used"), "\n{s}"); - } - - #[test] - fn solc_not_formatting_the_message2() { - let error = r#"{"component":"general","errorCode":"5667","formattedMessage":"Warning: Unused function parameter. Remove or comment out the variable name to silence this warning.\n\n","message":"Unused function parameter. Remove or comment out the variable name to silence this warning.","severity":"warning","sourceLocation":{"end":104,"file":"","start":95},"type":"Warning"}"#; - let error = serde_json::from_str::(error).unwrap(); - let s = error.to_string(); - eprintln!("{s}"); - assert!(s.contains("Warning (5667)"), "\n{s}"); - assert!(s.contains("Unused function parameter. Remove or comment out the variable name to silence this warning."), "\n{s}"); - } -} diff --git a/crates/artifacts/solc/src/hh.rs b/crates/artifacts/solc/src/hh.rs deleted file mode 100644 index 248ebcbf..00000000 --- a/crates/artifacts/solc/src/hh.rs +++ /dev/null @@ -1,86 +0,0 @@ -//! Hardhat support - -use crate::{ - Bytecode, BytecodeObject, CompactContract, CompactContractBytecode, CompactContractBytecodeCow, - ContractBytecode, DeployedBytecode, Offsets, -}; -use alloy_json_abi::JsonAbi; -use serde::{Deserialize, Serialize}; -use std::{borrow::Cow, collections::btree_map::BTreeMap}; - -pub const HH_ARTIFACT_VERSION: &str = "hh-sol-artifact-1"; - -/// A hardhat artifact -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct HardhatArtifact { - #[serde(rename = "_format")] - pub format: String, - /// A string with the contract's name. - pub contract_name: String, - /// The source name of this contract in the workspace like `contracts/Greeter.sol` - pub source_name: String, - /// The contract's ABI - pub abi: JsonAbi, - /// A "0x"-prefixed hex string of the unlinked deployment bytecode. If the contract is not - /// deployable, this has the string "0x" - pub bytecode: Option, - /// A "0x"-prefixed hex string of the unlinked runtime/deployed bytecode. If the contract is - /// not deployable, this has the string "0x" - pub deployed_bytecode: Option, - /// The bytecode's link references object as returned by solc. If the contract doesn't need to - /// be linked, this value contains an empty object. - #[serde(default)] - pub link_references: BTreeMap>>, - /// The deployed bytecode's link references object as returned by solc. If the contract doesn't - /// need to be linked, this value contains an empty object. - #[serde(default)] - pub deployed_link_references: BTreeMap>>, -} - -impl<'a> From<&'a HardhatArtifact> for CompactContractBytecodeCow<'a> { - fn from(artifact: &'a HardhatArtifact) -> Self { - let c: ContractBytecode = artifact.clone().into(); - CompactContractBytecodeCow { - abi: Some(Cow::Borrowed(&artifact.abi)), - bytecode: c.bytecode.map(|b| Cow::Owned(b.into())), - deployed_bytecode: c.deployed_bytecode.map(|b| Cow::Owned(b.into())), - } - } -} - -impl From for CompactContract { - fn from(artifact: HardhatArtifact) -> Self { - Self { - abi: Some(artifact.abi), - bin: artifact.bytecode, - bin_runtime: artifact.deployed_bytecode, - } - } -} - -impl From for ContractBytecode { - fn from(artifact: HardhatArtifact) -> Self { - let bytecode: Option = artifact.bytecode.as_ref().map(|t| { - let mut bcode: Bytecode = t.clone().into(); - bcode.link_references = artifact.link_references.clone(); - bcode - }); - - let deployed_bytecode: Option = artifact.bytecode.as_ref().map(|t| { - let mut bcode: Bytecode = t.clone().into(); - bcode.link_references = artifact.deployed_link_references.clone(); - bcode.into() - }); - - Self { abi: Some(artifact.abi), bytecode, deployed_bytecode } - } -} - -impl From for CompactContractBytecode { - fn from(artifact: HardhatArtifact) -> Self { - let c: ContractBytecode = artifact.into(); - - c.into() - } -} diff --git a/crates/artifacts/solc/src/lib.rs b/crates/artifacts/solc/src/lib.rs deleted file mode 100644 index 2146613e..00000000 --- a/crates/artifacts/solc/src/lib.rs +++ /dev/null @@ -1,2224 +0,0 @@ -//! Solc artifact types. - -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![allow(ambiguous_glob_reexports)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -#[macro_use] -extern crate tracing; - -use semver::Version; -use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; -use serde_repr::{Deserialize_repr, Serialize_repr}; -use std::{ - collections::{BTreeMap, HashSet}, - fmt, - path::{Path, PathBuf}, - str::FromStr, -}; - -pub mod error; -pub use error::*; -pub mod ast; -pub use ast::*; -pub mod remappings; -pub use remappings::*; -pub mod bytecode; -pub use bytecode::*; -pub mod contract; -pub use contract::*; -pub mod configurable; -pub mod hh; -pub use configurable::*; -pub mod output_selection; -pub mod serde_helpers; -pub mod sourcemap; -pub mod sources; -use crate::output_selection::{ContractOutputSelection, OutputSelection}; -use foundry_compilers_core::{ - error::SolcError, - utils::{ - strip_prefix_owned, BERLIN_SOLC, BYZANTIUM_SOLC, CANCUN_SOLC, CONSTANTINOPLE_SOLC, - ISTANBUL_SOLC, LONDON_SOLC, PARIS_SOLC, PETERSBURG_SOLC, PRAGUE_SOLC, SHANGHAI_SOLC, - }, -}; -pub use serde_helpers::{deserialize_bytes, deserialize_opt_bytes}; -pub use sources::*; - -/// Solidity files are made up of multiple `source units`, a solidity contract is such a `source -/// unit`, therefore a solidity file can contain multiple contracts: (1-N*) relationship. -/// -/// This types represents this mapping as `file name -> (contract name -> T)`, where the generic is -/// intended to represent contract specific information, like [`Contract`] itself, See [`Contracts`] -pub type FileToContractsMap = BTreeMap>; - -/// file -> (contract name -> Contract) -pub type Contracts = FileToContractsMap; - -pub const SOLIDITY: &str = "Solidity"; -pub const YUL: &str = "Yul"; - -/// Languages supported by the Solc compiler. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[non_exhaustive] -pub enum SolcLanguage { - Solidity, - Yul, -} - -impl fmt::Display for SolcLanguage { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Solidity => write!(f, "Solidity"), - Self::Yul => write!(f, "Yul"), - } - } -} - -/// Input type `solc` expects. -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct SolcInput { - pub language: SolcLanguage, - pub sources: Sources, - pub settings: Settings, -} - -/// Default `language` field is set to `"Solidity"`. -impl Default for SolcInput { - fn default() -> Self { - Self { - language: SolcLanguage::Solidity, - sources: Sources::default(), - settings: Settings::default(), - } - } -} - -impl SolcInput { - pub fn new(language: SolcLanguage, sources: Sources, mut settings: Settings) -> Self { - if language == SolcLanguage::Yul && !settings.remappings.is_empty() { - warn!("omitting remappings supplied for the yul sources"); - settings.remappings = vec![]; - } - Self { language, sources, settings } - } - - /// Builds one or two inputs from given sources set. Returns two inputs in cases when there are - /// both Solidity and Yul sources. - pub fn resolve_and_build(sources: Sources, settings: Settings) -> Vec { - let mut solidity_sources = Sources::new(); - let mut yul_sources = Sources::new(); - - for (file, source) in sources { - if file.extension().map_or(false, |e| e == "yul") { - yul_sources.insert(file, source); - } else if file.extension().map_or(false, |e| e == "sol") { - solidity_sources.insert(file, source); - } - } - - let mut res = Vec::new(); - - if !solidity_sources.is_empty() { - res.push(Self::new(SolcLanguage::Solidity, solidity_sources, settings.clone())) - } - - if !yul_sources.is_empty() { - res.push(Self::new(SolcLanguage::Yul, yul_sources, settings)) - } - - res - } - - /// This will remove/adjust values in the [`SolcInput`] that are not compatible with this - /// version - pub fn sanitize(&mut self, version: &Version) { - self.settings.sanitize(version, self.language); - } - - /// Consumes the type and returns a [SolcInput::sanitized] version - pub fn sanitized(mut self, version: &Version) -> Self { - self.settings.sanitize(version, self.language); - self - } - - /// Sets the EVM version for compilation - #[must_use] - pub fn evm_version(mut self, version: EvmVersion) -> Self { - self.settings.evm_version = Some(version); - self - } - - /// Sets the optimizer runs (default = 200) - #[must_use] - pub fn optimizer(mut self, runs: usize) -> Self { - self.settings.optimizer.runs(runs); - self - } - - /// Sets the path of the source files to `root` adjoined to the existing path - #[must_use] - pub fn join_path(mut self, root: &Path) -> Self { - self.sources = self.sources.into_iter().map(|(path, s)| (root.join(path), s)).collect(); - self - } - - /// Removes the `base` path from all source files - pub fn strip_prefix(&mut self, base: &Path) { - self.sources = std::mem::take(&mut self.sources) - .into_iter() - .map(|(path, s)| (strip_prefix_owned(path, base), s)) - .collect(); - - self.settings.strip_prefix(base); - } - - /// The flag indicating whether the current [SolcInput] is - /// constructed for the yul sources - pub fn is_yul(&self) -> bool { - self.language == SolcLanguage::Yul - } -} - -/// A `CompilerInput` representation used for verify -/// -/// This type is an alternative `CompilerInput` but uses non-alphabetic ordering of the `sources` -/// and instead emits the (Path -> Source) path in the same order as the pairs in the `sources` -/// `Vec`. This is used over a map, so we can determine the order in which etherscan will display -/// the verified contracts -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct StandardJsonCompilerInput { - pub language: SolcLanguage, - #[serde(with = "serde_helpers::tuple_vec_map")] - pub sources: Vec<(PathBuf, Source)>, - pub settings: Settings, -} - -// === impl StandardJsonCompilerInput === - -impl StandardJsonCompilerInput { - pub fn new(sources: Vec<(PathBuf, Source)>, settings: Settings) -> Self { - Self { language: SolcLanguage::Solidity, sources, settings } - } - - /// Normalizes the EVM version used in the settings to be up to the latest one - /// supported by the provided compiler version. - #[must_use] - pub fn normalize_evm_version(mut self, version: &Version) -> Self { - if let Some(evm_version) = &mut self.settings.evm_version { - self.settings.evm_version = evm_version.normalize_version_solc(version); - } - self - } -} - -impl From for SolcInput { - fn from(input: StandardJsonCompilerInput) -> Self { - let StandardJsonCompilerInput { language, sources, settings } = input; - Self { language, sources: sources.into_iter().collect(), settings } - } -} - -impl From for StandardJsonCompilerInput { - fn from(input: SolcInput) -> Self { - let SolcInput { language, sources, settings, .. } = input; - Self { language, sources: sources.into_iter().collect(), settings } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Settings { - /// Stop compilation after the given stage. - /// since 0.8.11: only "parsing" is valid here - #[serde(default, skip_serializing_if = "Option::is_none")] - pub stop_after: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub remappings: Vec, - /// Custom Optimizer settings - #[serde(default)] - pub optimizer: Optimizer, - /// Model Checker options. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub model_checker: Option, - /// Metadata settings - #[serde(default, skip_serializing_if = "Option::is_none")] - pub metadata: Option, - /// This field can be used to select desired outputs based - /// on file and contract names. - /// If this field is omitted, then the compiler loads and does type - /// checking, but will not generate any outputs apart from errors. - #[serde(default)] - pub output_selection: OutputSelection, - #[serde( - default, - with = "serde_helpers::display_from_str_opt", - skip_serializing_if = "Option::is_none" - )] - pub evm_version: Option, - /// Change compilation pipeline to go through the Yul intermediate representation. This is - /// false by default. - #[serde(rename = "viaIR", default, skip_serializing_if = "Option::is_none")] - pub via_ir: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub debug: Option, - /// Addresses of the libraries. If not all libraries are given here, - /// it can result in unlinked objects whose output data is different. - /// - /// The top level key is the name of the source file where the library is used. - /// If remappings are used, this source file should match the global path - /// after remappings were applied. - /// If this key is an empty string, that refers to a global level. - #[serde(default)] - pub libraries: Libraries, - /// Specify EOF version to produce. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub eof_version: Option, -} - -/// Available EOF versions. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize_repr, Deserialize_repr)] -#[repr(u8)] -pub enum EofVersion { - V1 = 1, -} - -impl Settings { - /// Creates a new `Settings` instance with the given `output_selection` - pub fn new(output_selection: impl Into) -> Self { - Self { output_selection: output_selection.into(), ..Default::default() } - } - - /// Consumes the type and returns a [Settings::sanitize] version - pub fn sanitized(mut self, version: &Version, language: SolcLanguage) -> Self { - self.sanitize(version, language); - self - } - - /// This will remove/adjust values in the settings that are not compatible with this version. - pub fn sanitize(&mut self, version: &Version, language: SolcLanguage) { - const V0_6_0: Version = Version::new(0, 6, 0); - if *version < V0_6_0 { - if let Some(meta) = &mut self.metadata { - // introduced in - // missing in - meta.bytecode_hash = None; - } - // introduced in - self.debug = None; - } - - const V0_7_5: Version = Version::new(0, 7, 5); - if *version < V0_7_5 { - // introduced in 0.7.5 - self.via_ir = None; - } - - const V0_8_7: Version = Version::new(0, 8, 7); - if *version < V0_8_7 { - // lower the disable version from 0.8.10 to 0.8.7, due to `divModNoSlacks`, - // `showUnproved` and `solvers` are implemented - // introduced in - self.model_checker = None; - } - - const V0_8_10: Version = Version::new(0, 8, 10); - if *version < V0_8_10 { - if let Some(debug) = &mut self.debug { - // introduced in - // - debug.debug_info.clear(); - } - - if let Some(model_checker) = &mut self.model_checker { - // introduced in - model_checker.invariants = None; - } - } - - const V0_8_18: Version = Version::new(0, 8, 18); - if *version < V0_8_18 { - // introduced in 0.8.18 - if let Some(meta) = &mut self.metadata { - meta.cbor_metadata = None; - } - - if let Some(model_checker) = &mut self.model_checker { - if let Some(solvers) = &mut model_checker.solvers { - // elf solver introduced in 0.8.18 - solvers.retain(|solver| *solver != ModelCheckerSolver::Eld); - } - } - } - - if *version < SHANGHAI_SOLC { - // introduced in 0.8.20 - if let Some(model_checker) = &mut self.model_checker { - model_checker.show_proved_safe = None; - model_checker.show_unsupported = None; - } - } - - if let Some(ref mut evm_version) = self.evm_version { - self.evm_version = evm_version.normalize_version_solc(version); - } - - if language == SolcLanguage::Yul { - if !self.remappings.is_empty() { - warn!("omitting remappings supplied for the yul sources"); - } - self.remappings = Vec::new(); - } - } - - /// Inserts a set of `ContractOutputSelection` - pub fn push_all(&mut self, settings: impl IntoIterator) { - for value in settings { - self.push_output_selection(value) - } - } - - /// Inserts a set of `ContractOutputSelection` - #[must_use] - pub fn with_extra_output( - mut self, - settings: impl IntoIterator, - ) -> Self { - for value in settings { - self.push_output_selection(value) - } - self - } - - /// Inserts the value for all files and contracts - /// - /// ``` - /// use foundry_compilers_artifacts_solc::{output_selection::ContractOutputSelection, Settings}; - /// let mut selection = Settings::default(); - /// selection.push_output_selection(ContractOutputSelection::Metadata); - /// ``` - pub fn push_output_selection(&mut self, value: impl ToString) { - self.push_contract_output_selection("*", value) - } - - /// Inserts the `key` `value` pair to the `output_selection` for all files - /// - /// If the `key` already exists, then the value is added to the existing list - pub fn push_contract_output_selection( - &mut self, - contracts: impl Into, - value: impl ToString, - ) { - let value = value.to_string(); - let values = self - .output_selection - .as_mut() - .entry("*".to_string()) - .or_default() - .entry(contracts.into()) - .or_default(); - if !values.contains(&value) { - values.push(value) - } - } - - /// Sets the value for all files and contracts - pub fn set_output_selection(&mut self, values: impl IntoIterator) { - self.set_contract_output_selection("*", values) - } - - /// Sets the `key` to the `values` pair to the `output_selection` for all files - /// - /// This will replace the existing values for `key` if they're present - pub fn set_contract_output_selection( - &mut self, - key: impl Into, - values: impl IntoIterator, - ) { - self.output_selection - .as_mut() - .entry("*".to_string()) - .or_default() - .insert(key.into(), values.into_iter().map(|s| s.to_string()).collect()); - } - - /// Sets the `viaIR` value. - #[must_use] - pub fn set_via_ir(mut self, via_ir: bool) -> Self { - self.via_ir = Some(via_ir); - self - } - - /// Enables `viaIR`. - #[must_use] - pub fn with_via_ir(self) -> Self { - self.set_via_ir(true) - } - - /// Enable `viaIR` and use the minimum optimization settings. - /// - /// This is useful in the following scenarios: - /// - When compiling for test coverage, this can resolve the "stack too deep" error while still - /// giving a relatively accurate source mapping - /// - When compiling for test, this can reduce the compilation time - pub fn with_via_ir_minimum_optimization(mut self) -> Self { - // https://github.com/foundry-rs/foundry/pull/5349 - // https://github.com/ethereum/solidity/issues/12533#issuecomment-1013073350 - self.via_ir = Some(true); - self.optimizer.details = Some(OptimizerDetails { - peephole: Some(false), - inliner: Some(false), - jumpdest_remover: Some(false), - order_literals: Some(false), - deduplicate: Some(false), - cse: Some(false), - constant_optimizer: Some(false), - yul: Some(true), // enable yul optimizer - yul_details: Some(YulDetails { - stack_allocation: Some(true), - // with only unused prunner step - optimizer_steps: Some("u".to_string()), - }), - // Set to None as it is only supported for solc starting from 0.8.22. - simple_counter_for_loop_unchecked_increment: None, - }); - self - } - - /// Adds `ast` to output - #[must_use] - pub fn with_ast(mut self) -> Self { - let output = self.output_selection.as_mut().entry("*".to_string()).or_default(); - output.insert(String::new(), vec!["ast".to_string()]); - self - } - - pub fn strip_prefix(&mut self, base: &Path) { - self.remappings.iter_mut().for_each(|r| { - r.strip_prefix(base); - }); - - self.libraries.libs = std::mem::take(&mut self.libraries.libs) - .into_iter() - .map(|(file, libs)| (file.strip_prefix(base).map(Into::into).unwrap_or(file), libs)) - .collect(); - - self.output_selection = OutputSelection( - std::mem::take(&mut self.output_selection.0) - .into_iter() - .map(|(file, selection)| { - ( - Path::new(&file) - .strip_prefix(base) - .map(|p| p.display().to_string()) - .unwrap_or(file), - selection, - ) - }) - .collect(), - ); - - if let Some(mut model_checker) = self.model_checker.take() { - model_checker.contracts = model_checker - .contracts - .into_iter() - .map(|(path, contracts)| { - ( - Path::new(&path) - .strip_prefix(base) - .map(|p| p.display().to_string()) - .unwrap_or(path), - contracts, - ) - }) - .collect(); - self.model_checker = Some(model_checker); - } - } - - /// Strips `base` from all paths - pub fn with_base_path(mut self, base: &Path) -> Self { - self.strip_prefix(base); - self - } -} - -impl Default for Settings { - fn default() -> Self { - Self { - stop_after: None, - optimizer: Default::default(), - metadata: None, - output_selection: OutputSelection::default_output_selection(), - evm_version: Some(EvmVersion::default()), - via_ir: None, - debug: None, - libraries: Default::default(), - remappings: Default::default(), - model_checker: None, - eof_version: None, - } - .with_ast() - } -} - -/// A wrapper type for all libraries in the form of `::` -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[serde(transparent)] -pub struct Libraries { - /// All libraries, `(file path -> (Lib name -> Address))`. - pub libs: BTreeMap>, -} - -// === impl Libraries === - -impl Libraries { - /// Parses all libraries in the form of - /// `::` - /// - /// # Examples - /// - /// ``` - /// use foundry_compilers_artifacts_solc::Libraries; - /// - /// let libs = Libraries::parse(&[ - /// "src/DssSpell.sol:DssExecLib:0xfD88CeE74f7D78697775aBDAE53f9Da1559728E4".to_string(), - /// ])?; - /// # Ok::<(), Box>(()) - /// ``` - pub fn parse(libs: &[String]) -> Result { - let mut libraries = BTreeMap::default(); - for lib in libs { - let mut items = lib.split(':'); - let file = items.next().ok_or_else(|| { - SolcError::msg(format!("failed to parse path to library file: {lib}")) - })?; - let lib = items - .next() - .ok_or_else(|| SolcError::msg(format!("failed to parse library name: {lib}")))?; - let addr = items - .next() - .ok_or_else(|| SolcError::msg(format!("failed to parse library address: {lib}")))?; - if items.next().is_some() { - return Err(SolcError::msg(format!( - "failed to parse, too many arguments passed: {lib}" - ))); - } - libraries - .entry(file.into()) - .or_insert_with(BTreeMap::default) - .insert(lib.to_string(), addr.to_string()); - } - Ok(Self { libs: libraries }) - } - - pub fn is_empty(&self) -> bool { - self.libs.is_empty() - } - - pub fn len(&self) -> usize { - self.libs.len() - } - - /// Applies the given function to [Self] and returns the result. - pub fn apply Self>(self, f: F) -> Self { - f(self) - } - - /// Strips the given prefix from all library file paths to make them relative to the given - /// `base` argument - pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self { - self.libs = self - .libs - .into_iter() - .map(|(f, l)| (f.strip_prefix(base).unwrap_or(&f).to_path_buf(), l)) - .collect(); - self - } - - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - #[cfg(windows)] - { - use path_slash::PathBufExt; - - self.libs = std::mem::take(&mut self.libs) - .into_iter() - .map(|(path, libs)| (PathBuf::from(path.to_slash_lossy().as_ref()), libs)) - .collect() - } - } -} - -impl From>> for Libraries { - fn from(libs: BTreeMap>) -> Self { - Self { libs } - } -} - -impl AsRef>> for Libraries { - fn as_ref(&self) -> &BTreeMap> { - &self.libs - } -} - -impl AsMut>> for Libraries { - fn as_mut(&mut self) -> &mut BTreeMap> { - &mut self.libs - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Optimizer { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub enabled: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub runs: Option, - /// Switch optimizer components on or off in detail. - /// The "enabled" switch above provides two defaults which can be - /// tweaked here. If "details" is given, "enabled" can be omitted. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub details: Option, -} - -impl Optimizer { - pub fn runs(&mut self, runs: usize) { - self.runs = Some(runs); - } - - pub fn disable(&mut self) { - self.enabled.take(); - } - - pub fn enable(&mut self) { - self.enabled = Some(true) - } -} - -impl Default for Optimizer { - fn default() -> Self { - Self { enabled: Some(false), runs: Some(200), details: None } - } -} - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct OptimizerDetails { - /// The peephole optimizer is always on if no details are given, - /// use details to switch it off. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub peephole: Option, - /// The inliner is always on if no details are given, - /// use details to switch it off. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub inliner: Option, - /// The unused jumpdest remover is always on if no details are given, - /// use details to switch it off. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub jumpdest_remover: Option, - /// Sometimes re-orders literals in commutative operations. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub order_literals: Option, - /// Removes duplicate code blocks - #[serde(default, skip_serializing_if = "Option::is_none")] - pub deduplicate: Option, - /// Common subexpression elimination, this is the most complicated step but - /// can also provide the largest gain. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub cse: Option, - /// Optimize representation of literal numbers and strings in code. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub constant_optimizer: Option, - /// The new Yul optimizer. Mostly operates on the code of ABI coder v2 - /// and inline assembly. - /// It is activated together with the global optimizer setting - /// and can be deactivated here. - /// Before Solidity 0.6.0 it had to be activated through this switch. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub yul: Option, - /// Tuning options for the Yul optimizer. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub yul_details: Option, - /// Use unchecked arithmetic when incrementing the counter of for loops - /// under certain circumstances. It is always on if no details are given. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub simple_counter_for_loop_unchecked_increment: Option, -} - -// === impl OptimizerDetails === - -impl OptimizerDetails { - /// Returns true if no settings are set. - pub fn is_empty(&self) -> bool { - self.peephole.is_none() - && self.inliner.is_none() - && self.jumpdest_remover.is_none() - && self.order_literals.is_none() - && self.deduplicate.is_none() - && self.cse.is_none() - && self.constant_optimizer.is_none() - && self.yul.is_none() - && self.yul_details.as_ref().map(|yul| yul.is_empty()).unwrap_or(true) - && self.simple_counter_for_loop_unchecked_increment.is_none() - } -} - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct YulDetails { - /// Improve allocation of stack slots for variables, can free up stack slots early. - /// Activated by default if the Yul optimizer is activated. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub stack_allocation: Option, - /// Select optimization steps to be applied. - /// Optional, the optimizer will use the default sequence if omitted. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub optimizer_steps: Option, -} - -// === impl YulDetails === - -impl YulDetails { - /// Returns true if no settings are set. - pub fn is_empty(&self) -> bool { - self.stack_allocation.is_none() && self.optimizer_steps.is_none() - } -} - -/// EVM versions. -/// -/// Default is `Cancun`, since 0.8.25 -/// -/// Kept in sync with: -// When adding new EVM versions (see a previous attempt at https://github.com/foundry-rs/compilers/pull/51): -// - add the version to the end of the enum -// - update the default variant to `m_version` default: https://github.com/ethereum/solidity/blob/develop/liblangutil/EVMVersion.h#L122 -// - create a constant for the Solc version that introduced it in `../compile/mod.rs` -// - add the version to the top of `normalize_version` and wherever else the compiler complains -// - update `FromStr` impl -// - write a test case in `test_evm_version_normalization` at the bottom of this file. -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -pub enum EvmVersion { - Homestead, - TangerineWhistle, - SpuriousDragon, - Byzantium, - Constantinople, - Petersburg, - Istanbul, - Berlin, - London, - Paris, - Shanghai, - #[default] - Cancun, - Prague, -} - -impl EvmVersion { - /// Find the default EVM version for the given compiler version. - pub fn default_version_solc(version: &Version) -> Option { - // In most cases, Solc compilers use the highest EVM version available at the time. - let default = Self::default().normalize_version_solc(version)?; - - // However, there are some exceptions where the default is lower than the highest available. - match default { - Self::Constantinople => { - // Actually, Constantinople is never used as the default EVM version by Solidity - // compilers. - Some(Self::Byzantium) - } - Self::Cancun if *version == Version::new(0, 8, 24) => { - // While Cancun is introduced at the time of releasing 0.8.24, it has not been - // supported by the mainnet. So, the default EVM version of Solc 0.8.24 remains as - // Shanghai. - // - // - Some(Self::Shanghai) - } - Self::Prague if *version == Version::new(0, 8, 27) => { - // Prague was not set as default EVM version in 0.8.27. - Some(Self::Cancun) - } - _ => Some(default), - } - } - - /// Normalizes this EVM version by checking against the given Solc [`Version`]. - pub fn normalize_version_solc(self, version: &Version) -> Option { - // The EVM version flag was only added in 0.4.21; we work our way backwards - if *version >= BYZANTIUM_SOLC { - // If the Solc version is the latest, it supports all EVM versions. - // For all other cases, cap at the at-the-time highest possible fork. - let normalized = if *version >= PRAGUE_SOLC { - self - } else if self >= Self::Cancun && *version >= CANCUN_SOLC { - Self::Cancun - } else if self >= Self::Shanghai && *version >= SHANGHAI_SOLC { - Self::Shanghai - } else if self >= Self::Paris && *version >= PARIS_SOLC { - Self::Paris - } else if self >= Self::London && *version >= LONDON_SOLC { - Self::London - } else if self >= Self::Berlin && *version >= BERLIN_SOLC { - Self::Berlin - } else if self >= Self::Istanbul && *version >= ISTANBUL_SOLC { - Self::Istanbul - } else if self >= Self::Petersburg && *version >= PETERSBURG_SOLC { - Self::Petersburg - } else if self >= Self::Constantinople && *version >= CONSTANTINOPLE_SOLC { - Self::Constantinople - } else if self >= Self::Byzantium { - Self::Byzantium - } else { - self - }; - Some(normalized) - } else { - None - } - } - - /// Returns the EVM version as a string. - pub const fn as_str(&self) -> &'static str { - match self { - Self::Homestead => "homestead", - Self::TangerineWhistle => "tangerineWhistle", - Self::SpuriousDragon => "spuriousDragon", - Self::Byzantium => "byzantium", - Self::Constantinople => "constantinople", - Self::Petersburg => "petersburg", - Self::Istanbul => "istanbul", - Self::Berlin => "berlin", - Self::London => "london", - Self::Paris => "paris", - Self::Shanghai => "shanghai", - Self::Cancun => "cancun", - Self::Prague => "prague", - } - } - - /// Has the `RETURNDATACOPY` and `RETURNDATASIZE` opcodes. - pub fn supports_returndata(&self) -> bool { - *self >= Self::Byzantium - } - - pub fn has_static_call(&self) -> bool { - *self >= Self::Byzantium - } - - pub fn has_bitwise_shifting(&self) -> bool { - *self >= Self::Constantinople - } - - pub fn has_create2(&self) -> bool { - *self >= Self::Constantinople - } - - pub fn has_ext_code_hash(&self) -> bool { - *self >= Self::Constantinople - } - - pub fn has_chain_id(&self) -> bool { - *self >= Self::Istanbul - } - - pub fn has_self_balance(&self) -> bool { - *self >= Self::Istanbul - } - - pub fn has_base_fee(&self) -> bool { - *self >= Self::London - } - - pub fn has_prevrandao(&self) -> bool { - *self >= Self::Paris - } - - pub fn has_push0(&self) -> bool { - *self >= Self::Shanghai - } -} - -impl fmt::Display for EvmVersion { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -impl FromStr for EvmVersion { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "homestead" => Ok(Self::Homestead), - "tangerineWhistle" => Ok(Self::TangerineWhistle), - "spuriousDragon" => Ok(Self::SpuriousDragon), - "byzantium" => Ok(Self::Byzantium), - "constantinople" => Ok(Self::Constantinople), - "petersburg" => Ok(Self::Petersburg), - "istanbul" => Ok(Self::Istanbul), - "berlin" => Ok(Self::Berlin), - "london" => Ok(Self::London), - "paris" => Ok(Self::Paris), - "shanghai" => Ok(Self::Shanghai), - "cancun" => Ok(Self::Cancun), - "prague" => Ok(Self::Prague), - s => Err(format!("Unknown evm version: {s}")), - } - } -} - -/// Debugging settings for solc -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct DebuggingSettings { - #[serde( - default, - with = "serde_helpers::display_from_str_opt", - skip_serializing_if = "Option::is_none" - )] - pub revert_strings: Option, - /// How much extra debug information to include in comments in the produced EVM assembly and - /// Yul code. - /// Available components are: - // - `location`: Annotations of the form `@src ::` indicating the location of - // the corresponding element in the original Solidity file, where: - // - `` is the file index matching the `@use-src` annotation, - // - `` is the index of the first byte at that location, - // - `` is the index of the first byte after that location. - // - `snippet`: A single-line code snippet from the location indicated by `@src`. The snippet is - // quoted and follows the corresponding `@src` annotation. - // - `*`: Wildcard value that can be used to request everything. - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub debug_info: Vec, -} - -/// How to treat revert (and require) reason strings. -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -pub enum RevertStrings { - /// "default" does not inject compiler-generated revert strings and keeps user-supplied ones. - #[default] - Default, - /// "strip" removes all revert strings (if possible, i.e. if literals are used) keeping - /// side-effects - Strip, - /// "debug" injects strings for compiler-generated internal reverts, implemented for ABI - /// encoders V1 and V2 for now. - Debug, - /// "verboseDebug" even appends further information to user-supplied revert strings (not yet - /// implemented) - VerboseDebug, -} - -impl fmt::Display for RevertStrings { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let string = match self { - Self::Default => "default", - Self::Strip => "strip", - Self::Debug => "debug", - Self::VerboseDebug => "verboseDebug", - }; - write!(f, "{string}") - } -} - -impl FromStr for RevertStrings { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "default" => Ok(Self::Default), - "strip" => Ok(Self::Strip), - "debug" => Ok(Self::Debug), - "verboseDebug" | "verbosedebug" => Ok(Self::VerboseDebug), - s => Err(format!("Unknown revert string mode: {s}")), - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct SettingsMetadata { - /// Use only literal content and not URLs (false by default) - #[serde(default, rename = "useLiteralContent", skip_serializing_if = "Option::is_none")] - pub use_literal_content: Option, - /// Use the given hash method for the metadata hash that is appended to the bytecode. - /// The metadata hash can be removed from the bytecode via option "none". - /// The other options are "ipfs" and "bzzr1". - /// If the option is omitted, "ipfs" is used by default. - #[serde( - default, - rename = "bytecodeHash", - skip_serializing_if = "Option::is_none", - with = "serde_helpers::display_from_str_opt" - )] - pub bytecode_hash: Option, - #[serde(default, rename = "appendCBOR", skip_serializing_if = "Option::is_none")] - pub cbor_metadata: Option, -} - -impl SettingsMetadata { - pub fn new(hash: BytecodeHash, cbor: bool) -> Self { - Self { use_literal_content: None, bytecode_hash: Some(hash), cbor_metadata: Some(cbor) } - } -} - -impl From for SettingsMetadata { - fn from(hash: BytecodeHash) -> Self { - Self { use_literal_content: None, bytecode_hash: Some(hash), cbor_metadata: None } - } -} - -/// Determines the hash method for the metadata hash that is appended to the bytecode. -/// -/// Solc's default is `Ipfs`, see . -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub enum BytecodeHash { - #[default] - Ipfs, - None, - Bzzr1, -} - -impl FromStr for BytecodeHash { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "none" => Ok(Self::None), - "ipfs" => Ok(Self::Ipfs), - "bzzr1" => Ok(Self::Bzzr1), - s => Err(format!("Unknown bytecode hash: {s}")), - } - } -} - -impl fmt::Display for BytecodeHash { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let s = match self { - Self::Ipfs => "ipfs", - Self::None => "none", - Self::Bzzr1 => "bzzr1", - }; - f.write_str(s) - } -} - -/// Bindings for [`solc` contract metadata](https://docs.soliditylang.org/en/latest/metadata.html) -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Metadata { - pub compiler: Compiler, - pub language: String, - pub output: Output, - pub settings: MetadataSettings, - pub sources: MetadataSources, - pub version: i64, -} - -/// A helper type that ensures lossless (de)serialisation so we can preserve the exact String -/// metadata value that's being hashed by solc -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct LosslessMetadata { - /// The complete abi as json value - pub raw_metadata: String, - /// The deserialised metadata of `raw_metadata` - pub metadata: Metadata, -} - -// === impl LosslessMetadata === - -impl LosslessMetadata { - /// Returns the whole string raw metadata as `serde_json::Value` - pub fn raw_json(&self) -> serde_json::Result { - serde_json::from_str(&self.raw_metadata) - } -} - -impl Serialize for LosslessMetadata { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&self.raw_metadata) - } -} - -impl<'de> Deserialize<'de> for LosslessMetadata { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct LosslessMetadataVisitor; - - impl<'de> Visitor<'de> for LosslessMetadataVisitor { - type Value = LosslessMetadata; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(formatter, "metadata string") - } - - fn visit_str(self, value: &str) -> Result - where - E: serde::de::Error, - { - let metadata = serde_json::from_str(value).map_err(serde::de::Error::custom)?; - let raw_metadata = value.to_string(); - Ok(LosslessMetadata { raw_metadata, metadata }) - } - } - deserializer.deserialize_str(LosslessMetadataVisitor) - } -} - -/// Compiler settings -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct MetadataSettings { - #[serde(default)] - pub remappings: Vec, - pub optimizer: Optimizer, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub metadata: Option, - /// Required for Solidity: File and name of the contract or library this metadata is created - /// for. - #[serde(default, rename = "compilationTarget")] - pub compilation_target: BTreeMap, - // Introduced in 0.8.20 - #[serde( - default, - rename = "evmVersion", - with = "serde_helpers::display_from_str_opt", - skip_serializing_if = "Option::is_none" - )] - pub evm_version: Option, - /// Metadata settings - /// - /// Note: this differs from `Libraries` and does not require another mapping for file name - /// since metadata is per file - #[serde(default)] - pub libraries: BTreeMap, - /// Change compilation pipeline to go through the Yul intermediate representation. This is - /// false by default. - #[serde(rename = "viaIR", default, skip_serializing_if = "Option::is_none")] - pub via_ir: Option, -} - -/// Compilation source files/source units, keys are file names -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct MetadataSources { - #[serde(flatten)] - pub inner: BTreeMap, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct MetadataSource { - /// Required: keccak256 hash of the source file - pub keccak256: String, - /// Required (unless "content" is used, see below): Sorted URL(s) - /// to the source file, protocol is more or less arbitrary, but a - /// Swarm URL is recommended - #[serde(default)] - pub urls: Vec, - /// Required (unless "url" is used): literal contents of the source file - #[serde(default, skip_serializing_if = "Option::is_none")] - pub content: Option, - /// Optional: SPDX license identifier as given in the source file - pub license: Option, -} - -/// Model checker settings for solc -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ModelCheckerSettings { - #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] - pub contracts: BTreeMap>, - #[serde( - default, - with = "serde_helpers::display_from_str_opt", - skip_serializing_if = "Option::is_none" - )] - pub engine: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub timeout: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub targets: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub invariants: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub show_unproved: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub div_mod_with_slacks: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub solvers: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub show_unsupported: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub show_proved_safe: Option, -} - -/// Which model checker engine to run. -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub enum ModelCheckerEngine { - #[default] - Default, - All, - BMC, - CHC, -} - -impl fmt::Display for ModelCheckerEngine { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let string = match self { - Self::Default => "none", - Self::All => "all", - Self::BMC => "bmc", - Self::CHC => "chc", - }; - write!(f, "{string}") - } -} - -impl FromStr for ModelCheckerEngine { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "none" => Ok(Self::Default), - "all" => Ok(Self::All), - "bmc" => Ok(Self::BMC), - "chc" => Ok(Self::CHC), - s => Err(format!("Unknown model checker engine: {s}")), - } - } -} - -/// Which model checker targets to check. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum ModelCheckerTarget { - Assert, - Underflow, - Overflow, - DivByZero, - ConstantCondition, - PopEmptyArray, - OutOfBounds, - Balance, -} - -impl fmt::Display for ModelCheckerTarget { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let string = match self { - Self::Assert => "assert", - Self::Underflow => "underflow", - Self::Overflow => "overflow", - Self::DivByZero => "divByZero", - Self::ConstantCondition => "constantCondition", - Self::PopEmptyArray => "popEmptyArray", - Self::OutOfBounds => "outOfBounds", - Self::Balance => "balance", - }; - write!(f, "{string}") - } -} - -impl FromStr for ModelCheckerTarget { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "assert" => Ok(Self::Assert), - "underflow" => Ok(Self::Underflow), - "overflow" => Ok(Self::Overflow), - "divByZero" => Ok(Self::DivByZero), - "constantCondition" => Ok(Self::ConstantCondition), - "popEmptyArray" => Ok(Self::PopEmptyArray), - "outOfBounds" => Ok(Self::OutOfBounds), - "balance" => Ok(Self::Balance), - s => Err(format!("Unknown model checker target: {s}")), - } - } -} - -/// Which model checker invariants to check. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum ModelCheckerInvariant { - Contract, - Reentrancy, -} - -impl fmt::Display for ModelCheckerInvariant { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let string = match self { - Self::Contract => "contract", - Self::Reentrancy => "reentrancy", - }; - write!(f, "{string}") - } -} - -impl FromStr for ModelCheckerInvariant { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "contract" => Ok(Self::Contract), - "reentrancy" => Ok(Self::Reentrancy), - s => Err(format!("Unknown model checker invariant: {s}")), - } - } -} - -/// Which model checker solvers to check. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub enum ModelCheckerSolver { - Cvc4, - Eld, - Smtlib2, - Z3, -} - -impl fmt::Display for ModelCheckerSolver { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let string = match self { - Self::Cvc4 => "cvc4", - Self::Eld => "eld", - Self::Smtlib2 => "smtlib2", - Self::Z3 => "z3", - }; - write!(f, "{string}") - } -} - -impl FromStr for ModelCheckerSolver { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "cvc4" => Ok(Self::Cvc4), - "eld" => Ok(Self::Cvc4), - "smtlib2" => Ok(Self::Smtlib2), - "z3" => Ok(Self::Z3), - s => Err(format!("Unknown model checker invariant: {s}")), - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Compiler { - pub version: String, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Output { - pub abi: Vec, - pub devdoc: Option, - pub userdoc: Option, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct SolcAbi { - #[serde(default)] - pub inputs: Vec, - #[serde(rename = "stateMutability", skip_serializing_if = "Option::is_none")] - pub state_mutability: Option, - #[serde(rename = "type")] - pub abi_type: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub name: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub outputs: Vec, - // required to satisfy solidity events - #[serde(default, skip_serializing_if = "Option::is_none")] - pub anonymous: Option, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Item { - #[serde(rename = "internalType")] - pub internal_type: Option, - pub name: String, - #[serde(rename = "type")] - pub put_type: String, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub components: Vec, - /// Indexed flag. for solidity events - #[serde(default, skip_serializing_if = "Option::is_none")] - pub indexed: Option, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Doc { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub kind: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub methods: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub version: Option, -} - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct DocLibraries { - #[serde(flatten)] - pub libs: BTreeMap, -} - -/// Output type `solc` produces -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct CompilerOutput { - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub errors: Vec, - #[serde(default)] - pub sources: BTreeMap, - #[serde(default)] - pub contracts: Contracts, -} - -impl CompilerOutput { - /// Whether the output contains a compiler error - pub fn has_error(&self) -> bool { - self.errors.iter().any(|err| err.severity.is_error()) - } - - /// Finds the _first_ contract with the given name - pub fn find(&self, contract_name: &str) -> Option> { - self.contracts_iter().find_map(|(name, contract)| { - (name == contract_name).then(|| CompactContractRef::from(contract)) - }) - } - - /// Finds the first contract with the given name and removes it from the set - pub fn remove(&mut self, contract_name: &str) -> Option { - self.contracts.values_mut().find_map(|c| c.remove(contract_name)) - } - - /// Iterate over all contracts and their names - pub fn contracts_iter(&self) -> impl Iterator { - self.contracts.values().flatten() - } - - /// Iterate over all contracts and their names - pub fn contracts_into_iter(self) -> impl Iterator { - self.contracts.into_values().flatten() - } - - /// Given the contract file's path and the contract's name, tries to return the contract's - /// bytecode, runtime bytecode, and abi - pub fn get(&self, path: &Path, contract: &str) -> Option> { - self.contracts - .get(path) - .and_then(|contracts| contracts.get(contract)) - .map(CompactContractRef::from) - } - - /// Returns the output's source files and contracts separately, wrapped in helper types that - /// provide several helper methods - pub fn split(self) -> (SourceFiles, OutputContracts) { - (SourceFiles(self.sources), OutputContracts(self.contracts)) - } - - /// Retains only those files the given iterator yields - /// - /// In other words, removes all contracts for files not included in the iterator - pub fn retain_files<'a, I>(&mut self, files: I) - where - I: IntoIterator, - { - // Note: use `to_lowercase` here because solc not necessarily emits the exact file name, - // e.g. `src/utils/upgradeProxy.sol` is emitted as `src/utils/UpgradeProxy.sol` - let files: HashSet<_> = - files.into_iter().map(|s| s.to_string_lossy().to_lowercase()).collect(); - self.contracts.retain(|f, _| files.contains(&f.to_string_lossy().to_lowercase())); - self.sources.retain(|f, _| files.contains(&f.to_string_lossy().to_lowercase())); - } - - pub fn merge(&mut self, other: Self) { - self.errors.extend(other.errors); - self.contracts.extend(other.contracts); - self.sources.extend(other.sources); - } -} - -/// A wrapper helper type for the `Contracts` type alias -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct OutputContracts(pub Contracts); - -impl OutputContracts { - /// Returns an iterator over all contracts and their source names. - pub fn into_contracts(self) -> impl Iterator { - self.0.into_values().flatten() - } - - /// Iterate over all contracts and their names - pub fn contracts_iter(&self) -> impl Iterator { - self.0.values().flatten() - } - - /// Finds the _first_ contract with the given name - pub fn find(&self, contract_name: &str) -> Option> { - self.contracts_iter().find_map(|(name, contract)| { - (name == contract_name).then(|| CompactContractRef::from(contract)) - }) - } - - /// Finds the first contract with the given name and removes it from the set - pub fn remove(&mut self, contract_name: &str) -> Option { - self.0.values_mut().find_map(|c| c.remove(contract_name)) - } -} - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct UserDoc { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub version: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub kind: Option, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub methods: BTreeMap, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub events: BTreeMap, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub errors: BTreeMap>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub notice: Option, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum UserDocNotice { - // NOTE: this a variant used for constructors on older solc versions - Constructor(String), - Notice { notice: String }, -} - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct DevDoc { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub version: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub kind: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub author: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub details: Option, - #[serde(default, rename = "custom:experimental", skip_serializing_if = "Option::is_none")] - pub custom_experimental: Option, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub methods: BTreeMap, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub events: BTreeMap, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub errors: BTreeMap>, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub title: Option, -} - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct MethodDoc { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub details: Option, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub params: BTreeMap, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub returns: BTreeMap, -} - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct EventDoc { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub details: Option, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub params: BTreeMap, -} - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct ErrorDoc { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub details: Option, - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub params: BTreeMap, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Evm { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub assembly: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub legacy_assembly: Option, - pub bytecode: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub deployed_bytecode: Option, - /// The list of function hashes - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub method_identifiers: BTreeMap, - /// Function gas estimates - #[serde(default, skip_serializing_if = "Option::is_none")] - pub gas_estimates: Option, -} - -impl Evm { - /// Crate internal helper do transform the underlying bytecode artifacts into a more convenient - /// structure - pub(crate) fn into_compact(self) -> CompactEvm { - let Self { - assembly, - legacy_assembly, - bytecode, - deployed_bytecode, - method_identifiers, - gas_estimates, - } = self; - - let (bytecode, deployed_bytecode) = match (bytecode, deployed_bytecode) { - (Some(bcode), Some(dbcode)) => (Some(bcode.into()), Some(dbcode.into())), - (None, Some(dbcode)) => (None, Some(dbcode.into())), - (Some(bcode), None) => (Some(bcode.into()), None), - (None, None) => (None, None), - }; - - CompactEvm { - assembly, - legacy_assembly, - bytecode, - deployed_bytecode, - method_identifiers, - gas_estimates, - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub(crate) struct CompactEvm { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub assembly: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub legacy_assembly: Option, - pub bytecode: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub deployed_bytecode: Option, - /// The list of function hashes - #[serde(default, skip_serializing_if = "::std::collections::BTreeMap::is_empty")] - pub method_identifiers: BTreeMap, - /// Function gas estimates - #[serde(default, skip_serializing_if = "Option::is_none")] - pub gas_estimates: Option, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct FunctionDebugData { - pub entry_point: Option, - pub id: Option, - pub parameter_slots: Option, - pub return_slots: Option, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct GeneratedSource { - pub ast: serde_json::Value, - pub contents: String, - pub id: u32, - pub language: String, - pub name: String, -} - -/// Byte offsets into the bytecode. -/// Linking replaces the 20 bytes located there. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Offsets { - pub start: u32, - pub length: u32, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct GasEstimates { - pub creation: Creation, - #[serde(default)] - pub external: BTreeMap, - #[serde(default)] - pub internal: BTreeMap, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Creation { - pub code_deposit_cost: String, - pub execution_cost: String, - pub total_cost: String, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Ewasm { - #[serde(default, skip_serializing_if = "Option::is_none")] - pub wast: Option, - pub wasm: String, -} - -/// Represents the `storage-layout` section of the `CompilerOutput` if selected. -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct StorageLayout { - pub storage: Vec, - #[serde(default, deserialize_with = "serde_helpers::default_for_null")] - pub types: BTreeMap, -} - -impl StorageLayout { - pub fn is_empty(&self) -> bool { - self.storage.is_empty() && self.types.is_empty() - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Storage { - #[serde(rename = "astId")] - pub ast_id: u64, - pub contract: String, - pub label: String, - pub offset: i64, - pub slot: String, - #[serde(rename = "type")] - pub storage_type: String, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct StorageType { - pub encoding: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub key: Option, - pub label: String, - #[serde(rename = "numberOfBytes")] - pub number_of_bytes: String, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub value: Option, - /// additional fields - #[serde(flatten)] - pub other: BTreeMap, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct SourceFile { - pub id: u32, - #[serde(default, with = "serde_helpers::empty_json_object_opt")] - pub ast: Option, -} - -impl SourceFile { - /// Returns `true` if the source file contains at least 1 `ContractDefinition` such as - /// `contract`, `abstract contract`, `interface` or `library`. - pub fn contains_contract_definition(&self) -> bool { - self.ast.as_ref().is_some_and(|ast| { - ast.nodes.iter().any(|node| matches!(node.node_type, NodeType::ContractDefinition)) - }) - } -} - -/// A wrapper type for a list of source files: `path -> SourceFile`. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct SourceFiles(pub BTreeMap); - -impl SourceFiles { - /// Returns an iterator over the source files' IDs and path. - pub fn into_ids(self) -> impl Iterator { - self.0.into_iter().map(|(k, v)| (v.id, k)) - } - - /// Returns an iterator over the source files' paths and IDs. - pub fn into_paths(self) -> impl Iterator { - self.0.into_iter().map(|(k, v)| (k, v.id)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::Address; - use similar_asserts::assert_eq; - use std::fs; - - #[test] - fn can_link_bytecode() { - // test cases taken from - - #[derive(Serialize, Deserialize)] - struct Mockject { - object: BytecodeObject, - } - fn parse_bytecode(bytecode: &str) -> BytecodeObject { - let object: Mockject = - serde_json::from_value(serde_json::json!({ "object": bytecode })).unwrap(); - object.object - } - - let bytecode = "6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__lib2.sol:L____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a723058207979b30bd4a07c77b02774a511f2a1dd04d7e5d65b5c2735b5fc96ad61d43ae40029"; - - let mut object = parse_bytecode(bytecode); - assert!(object.is_unlinked()); - assert!(object.contains_placeholder("lib2.sol", "L")); - assert!(object.contains_fully_qualified_placeholder("lib2.sol:L")); - assert!(object.link("lib2.sol", "L", Address::random()).resolve().is_some()); - assert!(!object.is_unlinked()); - - let mut code = Bytecode { - function_debug_data: Default::default(), - object: parse_bytecode(bytecode), - opcodes: None, - source_map: None, - generated_sources: vec![], - link_references: BTreeMap::from([( - "lib2.sol".to_string(), - BTreeMap::from([("L".to_string(), vec![])]), - )]), - }; - - assert!(!code.link("lib2.sol", "Y", Address::random())); - assert!(code.link("lib2.sol", "L", Address::random())); - assert!(code.link("lib2.sol", "L", Address::random())); - - let hashed_placeholder = "6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__$cb901161e812ceb78cfe30ca65050c4337$__6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a723058207979b30bd4a07c77b02774a511f2a1dd04d7e5d65b5c2735b5fc96ad61d43ae40029"; - let mut object = parse_bytecode(hashed_placeholder); - assert!(object.is_unlinked()); - assert!(object.contains_placeholder("lib2.sol", "L")); - assert!(object.contains_fully_qualified_placeholder("lib2.sol:L")); - assert!(object.link("lib2.sol", "L", Address::default()).resolve().is_some()); - assert!(!object.is_unlinked()); - } - - #[test] - fn can_parse_compiler_output() { - let dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../../test-data/out"); - - for path in fs::read_dir(dir).unwrap() { - let path = path.unwrap().path(); - let compiler_output = fs::read_to_string(&path).unwrap(); - serde_json::from_str::(&compiler_output).unwrap_or_else(|err| { - panic!("Failed to read compiler output of {} {}", path.display(), err) - }); - } - } - - #[test] - fn can_parse_compiler_input() { - let dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../../test-data/in"); - - for path in fs::read_dir(dir).unwrap() { - let path = path.unwrap().path(); - let compiler_input = fs::read_to_string(&path).unwrap(); - serde_json::from_str::(&compiler_input).unwrap_or_else(|err| { - panic!("Failed to read compiler input of {} {}", path.display(), err) - }); - } - } - - #[test] - fn can_parse_standard_json_compiler_input() { - let dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../../test-data/in"); - - for path in fs::read_dir(dir).unwrap() { - let path = path.unwrap().path(); - let compiler_input = fs::read_to_string(&path).unwrap(); - let val = serde_json::from_str::(&compiler_input) - .unwrap_or_else(|err| { - panic!("Failed to read compiler output of {} {}", path.display(), err) - }); - - let pretty = serde_json::to_string_pretty(&val).unwrap(); - serde_json::from_str::(&pretty).unwrap_or_else(|err| { - panic!("Failed to read converted compiler input of {} {}", path.display(), err) - }); - } - } - - #[test] - fn test_evm_version_default() { - for &(solc_version, expected) in &[ - // Everything before 0.4.21 should always return None - ("0.4.20", None), - // Byzantium clipping - ("0.4.21", Some(EvmVersion::Byzantium)), - // Constantinople bug fix - ("0.4.22", Some(EvmVersion::Byzantium)), - // Petersburg - ("0.5.5", Some(EvmVersion::Petersburg)), - // Istanbul - ("0.5.14", Some(EvmVersion::Istanbul)), - // Berlin - ("0.8.5", Some(EvmVersion::Berlin)), - // London - ("0.8.7", Some(EvmVersion::London)), - // Paris - ("0.8.18", Some(EvmVersion::Paris)), - // Shanghai - ("0.8.20", Some(EvmVersion::Shanghai)), - // Cancun - ("0.8.24", Some(EvmVersion::Shanghai)), - ("0.8.25", Some(EvmVersion::Cancun)), - ] { - let version = Version::from_str(solc_version).unwrap(); - assert_eq!( - EvmVersion::default_version_solc(&version), - expected, - "({version}, {expected:?})" - ) - } - } - - #[test] - fn test_evm_version_normalization() { - for &(solc_version, evm_version, expected) in &[ - // Everything before 0.4.21 should always return None - ("0.4.20", EvmVersion::Homestead, None), - // Byzantium clipping - ("0.4.21", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.4.21", EvmVersion::Constantinople, Some(EvmVersion::Byzantium)), - ("0.4.21", EvmVersion::London, Some(EvmVersion::Byzantium)), - // Constantinople bug fix - ("0.4.22", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.4.22", EvmVersion::Constantinople, Some(EvmVersion::Constantinople)), - ("0.4.22", EvmVersion::London, Some(EvmVersion::Constantinople)), - // Petersburg - ("0.5.5", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.5.5", EvmVersion::Petersburg, Some(EvmVersion::Petersburg)), - ("0.5.5", EvmVersion::London, Some(EvmVersion::Petersburg)), - // Istanbul - ("0.5.14", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.5.14", EvmVersion::Istanbul, Some(EvmVersion::Istanbul)), - ("0.5.14", EvmVersion::London, Some(EvmVersion::Istanbul)), - // Berlin - ("0.8.5", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.8.5", EvmVersion::Berlin, Some(EvmVersion::Berlin)), - ("0.8.5", EvmVersion::London, Some(EvmVersion::Berlin)), - // London - ("0.8.7", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.8.7", EvmVersion::London, Some(EvmVersion::London)), - ("0.8.7", EvmVersion::Paris, Some(EvmVersion::London)), - // Paris - ("0.8.18", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.8.18", EvmVersion::Paris, Some(EvmVersion::Paris)), - ("0.8.18", EvmVersion::Shanghai, Some(EvmVersion::Paris)), - // Shanghai - ("0.8.20", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.8.20", EvmVersion::Paris, Some(EvmVersion::Paris)), - ("0.8.20", EvmVersion::Shanghai, Some(EvmVersion::Shanghai)), - ("0.8.20", EvmVersion::Cancun, Some(EvmVersion::Shanghai)), - // Cancun - ("0.8.24", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.8.24", EvmVersion::Shanghai, Some(EvmVersion::Shanghai)), - ("0.8.24", EvmVersion::Cancun, Some(EvmVersion::Cancun)), - // Prague - ("0.8.26", EvmVersion::Homestead, Some(EvmVersion::Homestead)), - ("0.8.26", EvmVersion::Shanghai, Some(EvmVersion::Shanghai)), - ("0.8.26", EvmVersion::Cancun, Some(EvmVersion::Cancun)), - ("0.8.26", EvmVersion::Prague, Some(EvmVersion::Cancun)), - ("0.8.27", EvmVersion::Prague, Some(EvmVersion::Prague)), - ] { - let version = Version::from_str(solc_version).unwrap(); - assert_eq!( - evm_version.normalize_version_solc(&version), - expected, - "({version}, {evm_version:?})" - ) - } - } - - #[test] - fn can_sanitize_byte_code_hash() { - let version: Version = "0.6.0".parse().unwrap(); - - let settings = Settings { metadata: Some(BytecodeHash::Ipfs.into()), ..Default::default() }; - - let input = - SolcInput { language: SolcLanguage::Solidity, sources: Default::default(), settings }; - - let i = input.clone().sanitized(&version); - assert_eq!(i.settings.metadata.unwrap().bytecode_hash, Some(BytecodeHash::Ipfs)); - - let version: Version = "0.5.17".parse().unwrap(); - let i = input.sanitized(&version); - assert!(i.settings.metadata.unwrap().bytecode_hash.is_none()); - } - - #[test] - fn can_sanitize_cbor_metadata() { - let version: Version = "0.8.18".parse().unwrap(); - - let settings = Settings { - metadata: Some(SettingsMetadata::new(BytecodeHash::Ipfs, true)), - ..Default::default() - }; - - let input = - SolcInput { language: SolcLanguage::Solidity, sources: Default::default(), settings }; - - let i = input.clone().sanitized(&version); - assert_eq!(i.settings.metadata.unwrap().cbor_metadata, Some(true)); - - let i = input.sanitized(&Version::new(0, 8, 0)); - assert!(i.settings.metadata.unwrap().cbor_metadata.is_none()); - } - - #[test] - fn can_parse_libraries() { - let libraries = ["./src/lib/LibraryContract.sol:Library:0xaddress".to_string()]; - - let libs = Libraries::parse(&libraries[..]).unwrap().libs; - - assert_eq!( - libs, - BTreeMap::from([( - PathBuf::from("./src/lib/LibraryContract.sol"), - BTreeMap::from([("Library".to_string(), "0xaddress".to_string())]) - )]) - ); - } - - #[test] - fn can_strip_libraries_path_prefixes() { - let libraries= [ - "/global/root/src/FileInSrc.sol:Chainlink:0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string(), - "src/deep/DeepFileInSrc.sol:ChainlinkTWAP:0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string(), - "/global/GlobalFile.sol:Math:0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string(), - "/global/root/test/ChainlinkTWAP.t.sol:ChainlinkTWAP:0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string(), - "test/SizeAuctionDiscount.sol:Math:0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string(), - ]; - - let libs = Libraries::parse(&libraries[..]) - .unwrap() - .with_stripped_file_prefixes("/global/root".as_ref()) - .libs; - - assert_eq!( - libs, - BTreeMap::from([ - ( - PathBuf::from("/global/GlobalFile.sol"), - BTreeMap::from([( - "Math".to_string(), - "0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string() - )]) - ), - ( - PathBuf::from("src/FileInSrc.sol"), - BTreeMap::from([( - "Chainlink".to_string(), - "0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string() - )]) - ), - ( - PathBuf::from("src/deep/DeepFileInSrc.sol"), - BTreeMap::from([( - "ChainlinkTWAP".to_string(), - "0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string() - )]) - ), - ( - PathBuf::from("test/SizeAuctionDiscount.sol"), - BTreeMap::from([( - "Math".to_string(), - "0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string() - )]) - ), - ( - PathBuf::from("test/ChainlinkTWAP.t.sol"), - BTreeMap::from([( - "ChainlinkTWAP".to_string(), - "0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string() - )]) - ), - ]) - ); - } - - #[test] - fn can_parse_many_libraries() { - let libraries= [ - "./src/SizeAuctionDiscount.sol:Chainlink:0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string(), - "./src/SizeAuction.sol:ChainlinkTWAP:0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string(), - "./src/SizeAuction.sol:Math:0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string(), - "./src/test/ChainlinkTWAP.t.sol:ChainlinkTWAP:0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string(), - "./src/SizeAuctionDiscount.sol:Math:0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string(), - ]; - - let libs = Libraries::parse(&libraries[..]).unwrap().libs; - - assert_eq!( - libs, - BTreeMap::from([ - ( - PathBuf::from("./src/SizeAuctionDiscount.sol"), - BTreeMap::from([ - ( - "Chainlink".to_string(), - "0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string() - ), - ( - "Math".to_string(), - "0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string() - ) - ]) - ), - ( - PathBuf::from("./src/SizeAuction.sol"), - BTreeMap::from([ - ( - "ChainlinkTWAP".to_string(), - "0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string() - ), - ( - "Math".to_string(), - "0x902f6cf364b8d9470d5793a9b2b2e86bddd21e0c".to_string() - ) - ]) - ), - ( - PathBuf::from("./src/test/ChainlinkTWAP.t.sol"), - BTreeMap::from([( - "ChainlinkTWAP".to_string(), - "0xffedba5e171c4f15abaaabc86e8bd01f9b54dae5".to_string() - )]) - ), - ]) - ); - } - - #[test] - fn test_lossless_metadata() { - #[derive(Debug, Serialize, Deserialize)] - #[serde(rename_all = "camelCase")] - pub struct Contract { - #[serde( - default, - skip_serializing_if = "Option::is_none", - with = "serde_helpers::json_string_opt" - )] - pub metadata: Option, - } - - let s = r#"{"metadata":"{\"compiler\":{\"version\":\"0.4.18+commit.9cf6e910\"},\"language\":\"Solidity\",\"output\":{\"abi\":[{\"constant\":true,\"inputs\":[],\"name\":\"owner\",\"outputs\":[{\"name\":\"\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"newOwner\",\"type\":\"address\"}],\"name\":\"transferOwnership\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"}],\"devdoc\":{\"methods\":{\"transferOwnership(address)\":{\"details\":\"Allows the current owner to transfer control of the contract to a newOwner.\",\"params\":{\"newOwner\":\"The address to transfer ownership to.\"}}},\"title\":\"Ownable\"},\"userdoc\":{\"methods\":{}}},\"settings\":{\"compilationTarget\":{\"src/Contract.sol\":\"Ownable\"},\"libraries\":{},\"optimizer\":{\"enabled\":true,\"runs\":1000000},\"remappings\":[\":src/=src/\"]},\"sources\":{\"src/Contract.sol\":{\"keccak256\":\"0x3e0d611f53491f313ae035797ed7ecfd1dfd8db8fef8f82737e6f0cd86d71de7\",\"urls\":[\"bzzr://9c33025fa9d1b8389e4c7c9534a1d70fad91c6c2ad70eb5e4b7dc3a701a5f892\"]}},\"version\":1}"}"#; - - let value: serde_json::Value = serde_json::from_str(s).unwrap(); - let c: Contract = serde_json::from_value(value).unwrap(); - assert_eq!(c.metadata.as_ref().unwrap().raw_metadata, "{\"compiler\":{\"version\":\"0.4.18+commit.9cf6e910\"},\"language\":\"Solidity\",\"output\":{\"abi\":[{\"constant\":true,\"inputs\":[],\"name\":\"owner\",\"outputs\":[{\"name\":\"\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"newOwner\",\"type\":\"address\"}],\"name\":\"transferOwnership\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"constructor\"}],\"devdoc\":{\"methods\":{\"transferOwnership(address)\":{\"details\":\"Allows the current owner to transfer control of the contract to a newOwner.\",\"params\":{\"newOwner\":\"The address to transfer ownership to.\"}}},\"title\":\"Ownable\"},\"userdoc\":{\"methods\":{}}},\"settings\":{\"compilationTarget\":{\"src/Contract.sol\":\"Ownable\"},\"libraries\":{},\"optimizer\":{\"enabled\":true,\"runs\":1000000},\"remappings\":[\":src/=src/\"]},\"sources\":{\"src/Contract.sol\":{\"keccak256\":\"0x3e0d611f53491f313ae035797ed7ecfd1dfd8db8fef8f82737e6f0cd86d71de7\",\"urls\":[\"bzzr://9c33025fa9d1b8389e4c7c9534a1d70fad91c6c2ad70eb5e4b7dc3a701a5f892\"]}},\"version\":1}"); - - let value = serde_json::to_string(&c).unwrap(); - assert_eq!(s, value); - } - - #[test] - fn test_lossless_storage_layout() { - let input = include_str!("../../../../test-data/foundryissue2462.json").trim(); - let layout: StorageLayout = serde_json::from_str(input).unwrap(); - assert_eq!(input, &serde_json::to_string(&layout).unwrap()); - } - - // - #[test] - fn can_parse_compiler_output_spells_0_6_12() { - let path = - Path::new(env!("CARGO_MANIFEST_DIR")).join("../../../test-data/0.6.12-with-libs.json"); - let content = fs::read_to_string(path).unwrap(); - let _output: CompilerOutput = serde_json::from_str(&content).unwrap(); - } -} diff --git a/crates/artifacts/solc/src/output_selection.rs b/crates/artifacts/solc/src/output_selection.rs deleted file mode 100644 index e39e28d0..00000000 --- a/crates/artifacts/solc/src/output_selection.rs +++ /dev/null @@ -1,659 +0,0 @@ -//! Bindings for standard json output selection. - -use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer}; -use std::{collections::BTreeMap, fmt, str::FromStr}; - -/// Represents the desired outputs based on a File `(file -> (contract -> [outputs]))` -pub type FileOutputSelection = BTreeMap>; - -/// Represents the selected output of files and contracts. -/// -/// The first level key is the file name and the second level key is the -/// contract name. An empty contract name is used for outputs that are -/// not tied to a contract but to the whole source file like the AST. -/// A star as contract name refers to all contracts in the file. -/// Similarly, a star as a file name matches all files. -/// To select all outputs the compiler can possibly generate, use -/// "outputSelection: { "*": { "*": [ "*" ], "": [ "*" ] } }" -/// but note that this might slow down the compilation process needlessly. -/// -/// The available output types are as follows: -/// -/// File level (needs empty string as contract name): -/// ast - AST of all source files -/// -/// Contract level (needs the contract name or "*"): -/// abi - ABI -/// devdoc - Developer documentation (natspec) -/// userdoc - User documentation (natspec) -/// metadata - Metadata -/// ir - Yul intermediate representation of the code before optimization -/// irOptimized - Intermediate representation after optimization -/// storageLayout - Slots, offsets and types of the contract's state -/// variables. -/// evm.assembly - New assembly format -/// evm.legacyAssembly - Old-style assembly format in JSON -/// evm.bytecode.functionDebugData - Debugging information at function level -/// evm.bytecode.object - Bytecode object -/// evm.bytecode.opcodes - Opcodes list -/// evm.bytecode.sourceMap - Source mapping (useful for debugging) -/// evm.bytecode.linkReferences - Link references (if unlinked object) -/// evm.bytecode.generatedSources - Sources generated by the compiler -/// evm.deployedBytecode* - Deployed bytecode (has all the options that -/// evm.bytecode has) -/// evm.deployedBytecode.immutableReferences - Map from AST ids to -/// bytecode ranges that reference immutables -/// evm.methodIdentifiers - The list of function hashes -/// evm.gasEstimates - Function gas estimates -/// ewasm.wast - Ewasm in WebAssembly S-expressions format -/// ewasm.wasm - Ewasm in WebAssembly binary format -/// -/// Note that using a using `evm`, `evm.bytecode`, `ewasm`, etc. will select -/// every target part of that output. Additionally, `*` can be used as a -/// wildcard to request everything. -/// -/// The default output selection is -/// -/// ```json -/// { -/// "*": { -/// "*": [ -/// "abi", -/// "evm.bytecode", -/// "evm.deployedBytecode", -/// "evm.methodIdentifiers" -/// ], -/// "": [ -/// "ast" -/// ] -/// } -/// } -/// ``` -#[derive(Clone, Debug, Default, PartialEq, Eq, Deserialize)] -#[serde(transparent)] -pub struct OutputSelection(pub BTreeMap); - -impl OutputSelection { - /// select all outputs the compiler can possibly generate, use - /// `{ "*": { "*": [ "*" ], "": [ "*" ] } }` - /// but note that this might slow down the compilation process needlessly. - pub fn complete_output_selection() -> Self { - BTreeMap::from([( - "*".to_string(), - BTreeMap::from([ - ("*".to_string(), vec!["*".to_string()]), - (String::new(), vec!["*".to_string()]), - ]), - )]) - .into() - } - - /// Default output selection for compiler output: - /// - /// `{ "*": { "*": [ "*" ], "": [ - /// "abi","evm.bytecode","evm.deployedBytecode","evm.methodIdentifiers"] } }` - /// - /// Which enables it for all files and all their contracts ("*" wildcard) - pub fn default_output_selection() -> Self { - BTreeMap::from([("*".to_string(), Self::default_file_output_selection())]).into() - } - - /// Default output selection for a single file: - /// - /// `{ "*": [ "*" ], "": [ - /// "abi","evm.bytecode","evm.deployedBytecode","evm.methodIdentifiers"] }` - /// - /// Which enables it for all the contracts in the file ("*" wildcard) - pub fn default_file_output_selection() -> FileOutputSelection { - BTreeMap::from([( - "*".to_string(), - vec![ - "abi".to_string(), - "evm.bytecode".to_string(), - "evm.deployedBytecode".to_string(), - "evm.methodIdentifiers".to_string(), - ], - )]) - } - - /// Returns output selection configuration which enables the same provided outputs for each - /// contract of each source. - pub fn common_output_selection(outputs: impl IntoIterator) -> Self { - BTreeMap::from([( - "*".to_string(), - BTreeMap::from([("*".to_string(), outputs.into_iter().collect())]), - )]) - .into() - } - - /// Returns an empty output selection which corresponds to an empty map `{}` - pub fn empty_file_output_select() -> FileOutputSelection { - Default::default() - } - - /// Returns output selection which requests only AST for all sources. - pub fn ast_output_selection() -> Self { - BTreeMap::from([( - "*".to_string(), - BTreeMap::from([ - // Do not request any output for separate contracts - ("*".to_string(), vec![]), - // Request AST for all sources. - (String::new(), vec!["ast".to_string()]), - ]), - )]) - .into() - } - - /// Returns true if this output selection is a subset of the other output selection. - /// TODO: correctly process wildcard keys to reduce false negatives - pub fn is_subset_of(&self, other: &Self) -> bool { - self.0.iter().all(|(file, selection)| { - other.0.get(file).map_or(false, |other_selection| { - selection.iter().all(|(contract, outputs)| { - other_selection.get(contract).map_or(false, |other_outputs| { - outputs.iter().all(|output| other_outputs.contains(output)) - }) - }) - }) - }) - } -} - -// this will make sure that if the `FileOutputSelection` for a certain file is empty will be -// serializes as `"*" : []` because -// > Contract level (needs the contract name or "*") -impl Serialize for OutputSelection { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - struct EmptyFileOutput; - - impl Serialize for EmptyFileOutput { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let mut map = serializer.serialize_map(Some(1))?; - map.serialize_entry("*", &[] as &[String])?; - map.end() - } - } - - let mut map = serializer.serialize_map(Some(self.0.len()))?; - for (file, selection) in self.0.iter() { - if selection.is_empty() { - map.serialize_entry(file, &EmptyFileOutput {})?; - } else { - map.serialize_entry(file, selection)?; - } - } - map.end() - } -} - -impl AsRef> for OutputSelection { - fn as_ref(&self) -> &BTreeMap { - &self.0 - } -} - -impl AsMut> for OutputSelection { - fn as_mut(&mut self) -> &mut BTreeMap { - &mut self.0 - } -} - -impl From> for OutputSelection { - fn from(s: BTreeMap) -> Self { - Self(s) - } -} - -/// Contract level output selection -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum ContractOutputSelection { - Abi, - DevDoc, - UserDoc, - Metadata, - Ir, - IrOptimized, - IrOptimizedAst, - StorageLayout, - TransientStorageLayout, - Evm(EvmOutputSelection), - Ewasm(EwasmOutputSelection), -} - -impl ContractOutputSelection { - /// Returns the basic set of contract level settings that should be included in the `Contract` - /// that solc emits: - /// - "abi" - /// - "evm.bytecode" - /// - "evm.deployedBytecode" - /// - "evm.methodIdentifiers" - pub fn basic() -> Vec { - vec![ - Self::Abi, - BytecodeOutputSelection::All.into(), - DeployedBytecodeOutputSelection::All.into(), - EvmOutputSelection::MethodIdentifiers.into(), - ] - } -} - -impl Serialize for ContractOutputSelection { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.collect_str(self) - } -} - -impl<'de> Deserialize<'de> for ContractOutputSelection { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) - } -} - -impl fmt::Display for ContractOutputSelection { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Abi => f.write_str("abi"), - Self::DevDoc => f.write_str("devdoc"), - Self::UserDoc => f.write_str("userdoc"), - Self::Metadata => f.write_str("metadata"), - Self::Ir => f.write_str("ir"), - Self::IrOptimized => f.write_str("irOptimized"), - Self::IrOptimizedAst => f.write_str("irOptimizedAst"), - Self::StorageLayout => f.write_str("storageLayout"), - Self::TransientStorageLayout => f.write_str("transientStorageLayout"), - Self::Evm(e) => e.fmt(f), - Self::Ewasm(e) => e.fmt(f), - } - } -} - -impl FromStr for ContractOutputSelection { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "abi" => Ok(Self::Abi), - "devdoc" => Ok(Self::DevDoc), - "userdoc" => Ok(Self::UserDoc), - "metadata" => Ok(Self::Metadata), - "ir" => Ok(Self::Ir), - "ir-optimized" | "irOptimized" | "iroptimized" => Ok(Self::IrOptimized), - "irOptimizedAst" | "ir-optimized-ast" | "iroptimizedast" => Ok(Self::IrOptimizedAst), - "storage-layout" | "storagelayout" | "storageLayout" => Ok(Self::StorageLayout), - "transient-storage-layout" | "transientstoragelayout" | "transientStorageLayout" => { - Ok(Self::TransientStorageLayout) - } - s => EvmOutputSelection::from_str(s) - .map(ContractOutputSelection::Evm) - .or_else(|_| EwasmOutputSelection::from_str(s).map(ContractOutputSelection::Ewasm)) - .map_err(|_| format!("Invalid contract output selection: {s}")), - } - } -} - -impl> From for ContractOutputSelection { - fn from(evm: T) -> Self { - Self::Evm(evm.into()) - } -} - -impl From for ContractOutputSelection { - fn from(ewasm: EwasmOutputSelection) -> Self { - Self::Ewasm(ewasm) - } -} - -/// Contract level output selection for `evm` -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum EvmOutputSelection { - All, - Assembly, - LegacyAssembly, - MethodIdentifiers, - GasEstimates, - ByteCode(BytecodeOutputSelection), - DeployedByteCode(DeployedBytecodeOutputSelection), -} - -impl From for EvmOutputSelection { - fn from(b: BytecodeOutputSelection) -> Self { - Self::ByteCode(b) - } -} - -impl From for EvmOutputSelection { - fn from(b: DeployedBytecodeOutputSelection) -> Self { - Self::DeployedByteCode(b) - } -} - -impl Serialize for EvmOutputSelection { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.collect_str(self) - } -} - -impl<'de> Deserialize<'de> for EvmOutputSelection { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) - } -} - -impl fmt::Display for EvmOutputSelection { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::All => f.write_str("evm"), - Self::Assembly => f.write_str("evm.assembly"), - Self::LegacyAssembly => f.write_str("evm.legacyAssembly"), - Self::MethodIdentifiers => f.write_str("evm.methodIdentifiers"), - Self::GasEstimates => f.write_str("evm.gasEstimates"), - Self::ByteCode(b) => b.fmt(f), - Self::DeployedByteCode(b) => b.fmt(f), - } - } -} - -impl FromStr for EvmOutputSelection { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "evm" => Ok(Self::All), - "asm" | "evm.assembly" => Ok(Self::Assembly), - "legacyAssembly" | "evm.legacyAssembly" => Ok(Self::LegacyAssembly), - "methodidentifiers" | "evm.methodIdentifiers" | "evm.methodidentifiers" => { - Ok(Self::MethodIdentifiers) - } - "gas" | "evm.gasEstimates" | "evm.gasestimates" => Ok(Self::GasEstimates), - s => BytecodeOutputSelection::from_str(s) - .map(EvmOutputSelection::ByteCode) - .or_else(|_| { - DeployedBytecodeOutputSelection::from_str(s) - .map(EvmOutputSelection::DeployedByteCode) - }) - .map_err(|_| format!("Invalid evm selection: {s}")), - } - } -} - -/// Contract level output selection for `evm.bytecode` -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum BytecodeOutputSelection { - All, - FunctionDebugData, - Object, - Opcodes, - SourceMap, - LinkReferences, - GeneratedSources, -} - -impl Serialize for BytecodeOutputSelection { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.collect_str(self) - } -} - -impl<'de> Deserialize<'de> for BytecodeOutputSelection { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) - } -} - -impl fmt::Display for BytecodeOutputSelection { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::All => f.write_str("evm.bytecode"), - Self::FunctionDebugData => f.write_str("evm.bytecode.functionDebugData"), - Self::Object => f.write_str("evm.bytecode.object"), - Self::Opcodes => f.write_str("evm.bytecode.opcodes"), - Self::SourceMap => f.write_str("evm.bytecode.sourceMap"), - Self::LinkReferences => f.write_str("evm.bytecode.linkReferences"), - Self::GeneratedSources => f.write_str("evm.bytecode.generatedSources"), - } - } -} - -impl FromStr for BytecodeOutputSelection { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "evm.bytecode" => Ok(Self::All), - "evm.bytecode.functionDebugData" => Ok(Self::FunctionDebugData), - "code" | "bin" | "evm.bytecode.object" => Ok(Self::Object), - "evm.bytecode.opcodes" => Ok(Self::Opcodes), - "evm.bytecode.sourceMap" => Ok(Self::SourceMap), - "evm.bytecode.linkReferences" => Ok(Self::LinkReferences), - "evm.bytecode.generatedSources" => Ok(Self::GeneratedSources), - s => Err(format!("Invalid bytecode selection: {s}")), - } - } -} - -/// Contract level output selection for `evm.deployedBytecode` -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum DeployedBytecodeOutputSelection { - All, - FunctionDebugData, - Object, - Opcodes, - SourceMap, - LinkReferences, - GeneratedSources, - ImmutableReferences, -} - -impl Serialize for DeployedBytecodeOutputSelection { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.collect_str(self) - } -} - -impl<'de> Deserialize<'de> for DeployedBytecodeOutputSelection { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) - } -} - -impl fmt::Display for DeployedBytecodeOutputSelection { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::All => f.write_str("evm.deployedBytecode"), - Self::FunctionDebugData => f.write_str("evm.deployedBytecode.functionDebugData"), - Self::Object => f.write_str("evm.deployedBytecode.object"), - Self::Opcodes => f.write_str("evm.deployedBytecode.opcodes"), - Self::SourceMap => f.write_str("evm.deployedBytecode.sourceMap"), - Self::LinkReferences => f.write_str("evm.deployedBytecode.linkReferences"), - Self::GeneratedSources => f.write_str("evm.deployedBytecode.generatedSources"), - Self::ImmutableReferences => f.write_str("evm.deployedBytecode.immutableReferences"), - } - } -} - -impl FromStr for DeployedBytecodeOutputSelection { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "evm.deployedBytecode" => Ok(Self::All), - "evm.deployedBytecode.functionDebugData" => Ok(Self::FunctionDebugData), - "deployed-code" - | "deployed-bin" - | "runtime-code" - | "runtime-bin" - | "evm.deployedBytecode.object" => Ok(Self::Object), - "evm.deployedBytecode.opcodes" => Ok(Self::Opcodes), - "evm.deployedBytecode.sourceMap" => Ok(Self::SourceMap), - "evm.deployedBytecode.linkReferences" => Ok(Self::LinkReferences), - "evm.deployedBytecode.generatedSources" => Ok(Self::GeneratedSources), - "evm.deployedBytecode.immutableReferences" => Ok(Self::ImmutableReferences), - s => Err(format!("Invalid deployedBytecode selection: {s}")), - } - } -} - -/// Contract level output selection for `evm.ewasm` -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum EwasmOutputSelection { - All, - Wast, - Wasm, -} - -impl Serialize for EwasmOutputSelection { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.collect_str(self) - } -} - -impl<'de> Deserialize<'de> for EwasmOutputSelection { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) - } -} - -impl fmt::Display for EwasmOutputSelection { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::All => f.write_str("ewasm"), - Self::Wast => f.write_str("ewasm.wast"), - Self::Wasm => f.write_str("ewasm.wasm"), - } - } -} - -impl FromStr for EwasmOutputSelection { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "ewasm" => Ok(Self::All), - "ewasm.wast" => Ok(Self::Wast), - "ewasm.wasm" => Ok(Self::Wasm), - s => Err(format!("Invalid ewasm selection: {s}")), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn outputselection_serde_works() { - let mut output = BTreeMap::default(); - output.insert( - "*".to_string(), - vec![ - "abi".to_string(), - "evm.bytecode".to_string(), - "evm.deployedBytecode".to_string(), - "evm.methodIdentifiers".to_string(), - ], - ); - - let json = serde_json::to_string(&output).unwrap(); - let deserde_selection: BTreeMap> = - serde_json::from_str(&json).unwrap(); - - assert_eq!(json, serde_json::to_string(&deserde_selection).unwrap()); - } - - #[test] - fn empty_outputselection_serde_works() { - let mut empty = OutputSelection::default(); - empty.0.insert("contract.sol".to_string(), OutputSelection::empty_file_output_select()); - let s = serde_json::to_string(&empty).unwrap(); - assert_eq!(s, r#"{"contract.sol":{"*":[]}}"#); - } - - #[test] - fn outputselection_subset_of() { - let output_selection = OutputSelection::from(BTreeMap::from([( - "*".to_string(), - BTreeMap::from([( - "*".to_string(), - vec!["abi".to_string(), "evm.bytecode".to_string()], - )]), - )])); - - let output_selection_abi = OutputSelection::from(BTreeMap::from([( - "*".to_string(), - BTreeMap::from([("*".to_string(), vec!["abi".to_string()])]), - )])); - - assert!(output_selection_abi.is_subset_of(&output_selection)); - assert!(!output_selection.is_subset_of(&output_selection_abi)); - - let output_selection_empty = OutputSelection::from(BTreeMap::from([( - "*".to_string(), - BTreeMap::from([("*".to_string(), vec![])]), - )])); - - assert!(output_selection_empty.is_subset_of(&output_selection)); - assert!(output_selection_empty.is_subset_of(&output_selection_abi)); - assert!(!output_selection.is_subset_of(&output_selection_empty)); - assert!(!output_selection_abi.is_subset_of(&output_selection_empty)); - - let output_selecttion_specific = OutputSelection::from(BTreeMap::from([( - "Contract.sol".to_string(), - BTreeMap::from([( - "Contract".to_string(), - vec![ - "abi".to_string(), - "evm.bytecode".to_string(), - "evm.deployedBytecode".to_string(), - ], - )]), - )])); - - assert!(!output_selecttion_specific.is_subset_of(&output_selection)); - } - - #[test] - fn deployed_bytecode_from_str() { - assert_eq!( - DeployedBytecodeOutputSelection::from_str("evm.deployedBytecode.immutableReferences") - .unwrap(), - DeployedBytecodeOutputSelection::ImmutableReferences - ) - } -} diff --git a/crates/artifacts/solc/src/remappings.rs b/crates/artifacts/solc/src/remappings.rs deleted file mode 100644 index 8b107a0d..00000000 --- a/crates/artifacts/solc/src/remappings.rs +++ /dev/null @@ -1,1377 +0,0 @@ -use foundry_compilers_core::utils; -use serde::{Deserialize, Serialize}; -use std::{ - collections::{btree_map::Entry, BTreeMap, HashSet}, - fmt, - path::{Path, PathBuf}, - str::FromStr, -}; - -const DAPPTOOLS_CONTRACTS_DIR: &str = "src"; -const DAPPTOOLS_LIB_DIR: &str = "lib"; -const JS_CONTRACTS_DIR: &str = "contracts"; -const JS_LIB_DIR: &str = "node_modules"; - -/// The solidity compiler can only reference files that exist locally on your computer. -/// So importing directly from GitHub (as an example) is not possible. -/// -/// Let's imagine you want to use OpenZeppelin's amazing library of smart contracts, -/// `@openzeppelin/contracts-ethereum-package`: -/// -/// ```ignore -/// pragma solidity 0.5.11; -/// -/// import "@openzeppelin/contracts-ethereum-package/contracts/math/SafeMath.sol"; -/// -/// contract MyContract { -/// using SafeMath for uint256; -/// ... -/// } -/// ``` -/// -/// When using `solc`, you have to specify the following: -/// -/// - A `prefix`: the path that's used in your smart contract, i.e. -/// `@openzeppelin/contracts-ethereum-package` -/// - A `target`: the absolute path of the downloaded contracts on your computer -/// -/// The format looks like this: `solc prefix=target ./MyContract.sol` -/// -/// For example: -/// -/// ```text -/// solc --bin \ -/// @openzeppelin/contracts-ethereum-package=/Your/Absolute/Path/To/@openzeppelin/contracts-ethereum-package \ -/// ./MyContract.sol -/// ``` -/// -/// You can also specify a `context` which limits the scope of the remapping to a subset of your -/// project. This allows you to apply the remapping only to imports located in a specific library or -/// a specific file. Without a context a remapping is applied to every matching import in all files. -/// -/// The format is: `solc context:prefix=target ./MyContract.sol` -/// -/// [Source](https://ethereum.stackexchange.com/questions/74448/what-are-remappings-and-how-do-they-work-in-solidity) -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct Remapping { - pub context: Option, - pub name: String, - pub path: String, -} - -impl Remapping { - /// Convenience function for [`RelativeRemapping::new`] - pub fn into_relative(self, root: &Path) -> RelativeRemapping { - RelativeRemapping::new(self, root) - } - - /// Removes the `base` path from the remapping - pub fn strip_prefix(&mut self, base: &Path) -> &mut Self { - if let Ok(stripped) = Path::new(&self.path).strip_prefix(base) { - self.path = stripped.display().to_string(); - } - self - } -} - -#[derive(Debug, PartialEq, Eq, PartialOrd, thiserror::Error)] -pub enum RemappingError { - #[error("invalid remapping format, found `{0}`, expected `=`")] - InvalidRemapping(String), - #[error("remapping key can't be empty, found `{0}`, expected `=`")] - EmptyRemappingKey(String), - #[error("remapping value must be a path, found `{0}`, expected `=`")] - EmptyRemappingValue(String), -} - -impl FromStr for Remapping { - type Err = RemappingError; - - fn from_str(remapping: &str) -> Result { - let (name, path) = remapping - .split_once('=') - .ok_or_else(|| RemappingError::InvalidRemapping(remapping.to_string()))?; - let (context, name) = name - .split_once(':') - .map_or((None, name), |(context, name)| (Some(context.to_string()), name)); - if name.trim().is_empty() { - return Err(RemappingError::EmptyRemappingKey(remapping.to_string())); - } - if path.trim().is_empty() { - return Err(RemappingError::EmptyRemappingValue(remapping.to_string())); - } - // if the remapping just starts with : (no context name), treat it as global - let context = - context.and_then(|c| if c.trim().is_empty() { None } else { Some(c.to_string()) }); - Ok(Self { context, name: name.to_string(), path: path.to_string() }) - } -} - -impl Serialize for Remapping { - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::ser::Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} - -impl<'de> Deserialize<'de> for Remapping { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::de::Deserializer<'de>, - { - let remapping = String::deserialize(deserializer)?; - Self::from_str(&remapping).map_err(serde::de::Error::custom) - } -} - -// Remappings are printed as `prefix=target` -impl fmt::Display for Remapping { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut s = String::new(); - if let Some(context) = self.context.as_ref() { - #[cfg(target_os = "windows")] - { - // ensure we have `/` slashes on windows - use path_slash::PathExt; - s.push_str(&std::path::Path::new(context).to_slash_lossy()); - } - #[cfg(not(target_os = "windows"))] - { - s.push_str(context); - } - s.push(':'); - } - let name = - if !self.name.ends_with('/') { format!("{}/", self.name) } else { self.name.clone() }; - s.push_str(&{ - #[cfg(target_os = "windows")] - { - // ensure we have `/` slashes on windows - use path_slash::PathExt; - format!("{}={}", name, std::path::Path::new(&self.path).to_slash_lossy()) - } - #[cfg(not(target_os = "windows"))] - { - format!("{}={}", name, self.path) - } - }); - - if !s.ends_with('/') { - s.push('/'); - } - f.write_str(&s) - } -} - -impl Remapping { - /// Attempts to autodetect all remappings given a certain root path. - /// - /// See [`Self::find_many`] for more information. - pub fn find_many_str(path: &Path) -> Vec { - Self::find_many(path).into_iter().map(|r| r.to_string()).collect() - } - - /// Attempts to autodetect all remappings given a certain root path. - /// - /// This will recursively scan all subdirectories of the root path, if a subdirectory contains a - /// solidity file then this a candidate for a remapping. The name of the remapping will be the - /// folder name. - /// - /// However, there are additional rules/assumptions when it comes to determining if a candidate - /// should in fact be a remapping: - /// - /// All names and paths end with a trailing "/" - /// - /// The name of the remapping will be the parent folder of a solidity file, unless the folder is - /// named `src`, `lib` or `contracts` in which case the name of the remapping will be the parent - /// folder's name of `src`, `lib`, `contracts`: The remapping of `repo1/src/contract.sol` is - /// `name: "repo1/", path: "repo1/src/"` - /// - /// Nested remappings need to be separated by `src`, `lib` or `contracts`, The remapping of - /// `repo1/lib/ds-math/src/contract.sol` is `name: "ds-math/", "repo1/lib/ds-math/src/"` - /// - /// Remapping detection is primarily designed for dapptool's rules for lib folders, however, we - /// attempt to detect and optimize various folder structures commonly used in `node_modules` - /// dependencies. For those the same rules apply. In addition, we try to unify all - /// remappings discovered according to the rules mentioned above, so that layouts like, - // @aave/ - // ├─ governance/ - // │ ├─ contracts/ - // ├─ protocol-v2/ - // │ ├─ contracts/ - /// - /// which would be multiple rededications according to our rules ("governance", "protocol-v2"), - /// are unified into `@aave` by looking at their common ancestor, the root of this subdirectory - /// (`@aave`) - pub fn find_many(dir: &Path) -> Vec { - /// prioritize - /// - ("a", "1/2") over ("a", "1/2/3") - /// - if a path ends with `src` - fn insert_prioritized( - mappings: &mut BTreeMap, - key: String, - path: PathBuf, - ) { - match mappings.entry(key) { - Entry::Occupied(mut e) => { - if e.get().components().count() > path.components().count() - || (path.ends_with(DAPPTOOLS_CONTRACTS_DIR) - && !e.get().ends_with(DAPPTOOLS_CONTRACTS_DIR)) - { - e.insert(path); - } - } - Entry::Vacant(e) => { - e.insert(path); - } - } - } - - // all combined remappings from all subdirs - let mut all_remappings = BTreeMap::new(); - - let is_inside_node_modules = dir.ends_with("node_modules"); - - let mut visited_symlink_dirs = HashSet::new(); - // iterate over all dirs that are children of the root - for dir in walkdir::WalkDir::new(dir) - .sort_by_file_name() - .follow_links(true) - .min_depth(1) - .max_depth(1) - .into_iter() - .filter_entry(|e| !is_hidden(e)) - .filter_map(Result::ok) - .filter(|e| e.file_type().is_dir()) - { - let depth1_dir = dir.path(); - // check all remappings in this depth 1 folder - let candidates = find_remapping_candidates( - depth1_dir, - depth1_dir, - 0, - is_inside_node_modules, - &mut visited_symlink_dirs, - ); - - for candidate in candidates { - if let Some(name) = candidate.window_start.file_name().and_then(|s| s.to_str()) { - insert_prioritized( - &mut all_remappings, - format!("{name}/"), - candidate.source_dir, - ); - } - } - } - - all_remappings - .into_iter() - .map(|(name, path)| Self { context: None, name, path: format!("{}/", path.display()) }) - .collect() - } - - /// Converts any `\\` separators in the `path` to `/` - pub fn slash_path(&mut self) { - #[cfg(windows)] - { - use path_slash::PathExt; - self.path = Path::new(&self.path).to_slash_lossy().to_string(); - if let Some(context) = self.context.as_mut() { - *context = Path::new(&context).to_slash_lossy().to_string(); - } - } - } -} - -/// A relative [`Remapping`] that's aware of the current location -/// -/// See [`RelativeRemappingPathBuf`] -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct RelativeRemapping { - pub context: Option, - pub name: String, - pub path: RelativeRemappingPathBuf, -} - -impl RelativeRemapping { - /// Creates a new `RelativeRemapping` starting prefixed with `root` - pub fn new(remapping: Remapping, root: &Path) -> Self { - Self { - context: remapping.context.map(|c| { - RelativeRemappingPathBuf::with_root(root, c).path.to_string_lossy().to_string() - }), - name: remapping.name, - path: RelativeRemappingPathBuf::with_root(root, remapping.path), - } - } - - /// Converts this relative remapping into an absolute remapping - /// - /// This sets to root of the remapping to the given `root` path - pub fn to_remapping(mut self, root: PathBuf) -> Remapping { - self.path.parent = Some(root); - self.into() - } - - /// Converts this relative remapping into [`Remapping`] without the root path - pub fn to_relative_remapping(mut self) -> Remapping { - self.path.parent.take(); - self.into() - } -} - -// Remappings are printed as `prefix=target` -impl fmt::Display for RelativeRemapping { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut s = String::new(); - if let Some(context) = self.context.as_ref() { - #[cfg(target_os = "windows")] - { - // ensure we have `/` slashes on windows - use path_slash::PathExt; - s.push_str(&std::path::Path::new(context).to_slash_lossy()); - } - #[cfg(not(target_os = "windows"))] - { - s.push_str(context); - } - s.push(':'); - } - s.push_str(&{ - #[cfg(target_os = "windows")] - { - // ensure we have `/` slashes on windows - use path_slash::PathExt; - format!("{}={}", self.name, self.path.original().to_slash_lossy()) - } - #[cfg(not(target_os = "windows"))] - { - format!("{}={}", self.name, self.path.original().display()) - } - }); - - if !s.ends_with('/') { - s.push('/'); - } - f.write_str(&s) - } -} - -impl From for Remapping { - fn from(r: RelativeRemapping) -> Self { - let RelativeRemapping { context, mut name, path } = r; - let mut path = path.relative().display().to_string(); - if !path.ends_with('/') { - path.push('/'); - } - if !name.ends_with('/') { - name.push('/'); - } - Self { context, name, path } - } -} - -impl From for RelativeRemapping { - fn from(r: Remapping) -> Self { - Self { context: r.context, name: r.name, path: r.path.into() } - } -} - -/// The path part of the [`Remapping`] that knows the path of the file it was configured in, if any. -/// -/// A [`Remapping`] is intended to be absolute, but paths in configuration files are often desired -/// to be relative to the configuration file itself. For example, a path of -/// `weird-erc20/=lib/weird-erc20/src/` configured in a file `/var/foundry.toml` might be desired to -/// resolve as a `weird-erc20/=/var/lib/weird-erc20/src/` remapping. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct RelativeRemappingPathBuf { - pub parent: Option, - pub path: PathBuf, -} - -impl RelativeRemappingPathBuf { - /// Creates a new `RelativeRemappingPathBuf` that checks if the `path` is a child path of - /// `parent`. - pub fn with_root( - parent: impl AsRef + Into, - path: impl AsRef + Into, - ) -> Self { - if let Ok(path) = path.as_ref().strip_prefix(parent.as_ref()) { - Self { parent: Some(parent.into()), path: path.to_path_buf() } - } else if path.as_ref().has_root() { - Self { parent: None, path: path.into() } - } else { - Self { parent: Some(parent.into()), path: path.into() } - } - } - - /// Returns the path as it was declared, without modification. - pub fn original(&self) -> &Path { - &self.path - } - - /// Returns this path relative to the file it was declared in, if any. - /// Returns the original if this path was not declared in a file or if the - /// path has a root. - pub fn relative(&self) -> PathBuf { - if self.original().has_root() { - return self.original().into(); - } - self.parent - .as_ref() - .map(|p| p.join(self.original())) - .unwrap_or_else(|| self.original().into()) - } -} - -impl> From

for RelativeRemappingPathBuf { - fn from(path: P) -> Self { - Self { parent: None, path: path.into() } - } -} - -impl Serialize for RelativeRemapping { - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::ser::Serializer, - { - serializer.serialize_str(&self.to_string()) - } -} - -impl<'de> Deserialize<'de> for RelativeRemapping { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::de::Deserializer<'de>, - { - let remapping = String::deserialize(deserializer)?; - let remapping = Remapping::from_str(&remapping).map_err(serde::de::Error::custom)?; - Ok(Self { context: remapping.context, name: remapping.name, path: remapping.path.into() }) - } -} - -#[derive(Clone, Debug)] -struct Candidate { - /// dir that opened the window - window_start: PathBuf, - /// dir that contains the solidity file - source_dir: PathBuf, - /// number of the current nested dependency - window_level: usize, -} - -impl Candidate { - /// There are several cases where multiple candidates are detected for the same level - /// - /// # Example - Dapptools style - /// - /// Another directory next to a `src` dir: - /// ```text - /// ds-test/ - /// ├── aux/demo.sol - /// └── src/test.sol - /// ``` - /// which effectively ignores the `aux` dir by prioritizing source dirs and keeps - /// `ds-test/=ds-test/src/` - /// - /// - /// # Example - node_modules / commonly onpenzeppelin related - /// - /// The `@openzeppelin` domain can contain several nested dirs in `node_modules/@openzeppelin`. - /// Such as - /// - `node_modules/@openzeppelin/contracts` - /// - `node_modules/@openzeppelin/contracts-upgradeable` - /// - /// Which should be resolved to the top level dir `@openzeppelin` - /// - /// We also treat candidates with a `node_modules` parent directory differently and consider - /// them to be `hardhat` style. In which case the trailing library barrier `contracts` will be - /// stripped from the remapping path. This differs from dapptools style which does not include - /// the library barrier path `src` in the solidity import statements. For example, for - /// dapptools you could have - /// - /// ```text - /// /lib/ - /// ├── src - /// ├── A.sol - /// ├── B.sol - /// ``` - /// - /// with remapping `library/=library/src/` - /// - /// whereas with hardhat's import resolver the import statement - /// - /// ```text - /// /node_modules/ - /// ├── contracts - /// ├── A.sol - /// ├── B.sol - /// ``` - /// with the simple remapping `library/=library/` because hardhat's lib resolver essentially - /// joins the import path inside a solidity file with the `nodes_modules` folder when it tries - /// to find an imported solidity file. For example - /// - /// ```solidity - /// import "hardhat/console.sol"; - /// ``` - /// expects the file to be at: `/node_modules/hardhat/console.sol`. - /// - /// In order to support these cases, we treat the Dapptools case as the outlier, in which case - /// we only keep the candidate that ends with `src` - /// - /// - `candidates`: list of viable remapping candidates - /// - `current_dir`: the directory that's currently processed, like `@openzeppelin/contracts` - /// - `current_level`: the number of nested library dirs encountered - /// - `window_start`: This contains the root directory of the current window. In other words - /// this will be the parent directory of the most recent library barrier, which will be - /// `@openzeppelin` if the `current_dir` is `@openzeppelin/contracts` See also - /// [`next_nested_window()`] - /// - `is_inside_node_modules` whether we're inside a `node_modules` lib - fn merge_on_same_level( - candidates: &mut Vec, - current_dir: &Path, - current_level: usize, - window_start: PathBuf, - is_inside_node_modules: bool, - ) { - // if there's only a single source dir candidate then we use this - if let Some(pos) = candidates - .iter() - .enumerate() - .fold((0, None), |(mut contracts_dir_count, mut pos), (idx, c)| { - if c.source_dir.ends_with(DAPPTOOLS_CONTRACTS_DIR) { - contracts_dir_count += 1; - if contracts_dir_count == 1 { - pos = Some(idx) - } else { - pos = None; - } - } - - (contracts_dir_count, pos) - }) - .1 - { - let c = candidates.remove(pos); - *candidates = vec![c]; - } else { - // merge all candidates on the current level if the current dir is itself a candidate or - // there are multiple nested candidates on the current level like `current/{auth, - // tokens}/contracts/c.sol` - candidates.retain(|c| c.window_level != current_level); - - let source_dir = if is_inside_node_modules { - window_start.clone() - } else { - current_dir.to_path_buf() - }; - - // if the window start and the source dir are the same directory we can end early if - // we wrongfully detect something like: `/src/lib/` - if current_level > 0 - && source_dir == window_start - && (is_source_dir(&source_dir) || is_lib_dir(&source_dir)) - { - return; - } - candidates.push(Self { window_start, source_dir, window_level: current_level }); - } - } - - /// Returns `true` if the `source_dir` ends with `contracts` or `contracts/src` - /// - /// This is used to detect an edge case in `"@chainlink/contracts"` which layout is - /// - /// ```text - /// contracts/src - /// ├── v0.4 - /// ├── Pointer.sol - /// ├── interfaces - /// ├── AggregatorInterface.sol - /// ├── tests - /// ├── BasicConsumer.sol - /// ├── v0.5 - /// ├── Chainlink.sol - /// ├── v0.6 - /// ├── AccessControlledAggregator.sol - /// ``` - /// - /// And import commonly used is - /// - /// ```solidity - /// import '@chainlink/contracts/src/v0.6/interfaces/AggregatorV3Interface.sol'; - /// ``` - fn source_dir_ends_with_js_source(&self) -> bool { - self.source_dir.ends_with(JS_CONTRACTS_DIR) || self.source_dir.ends_with("contracts/src/") - } -} - -fn is_source_dir(dir: &Path) -> bool { - dir.file_name() - .and_then(|p| p.to_str()) - .map(|name| [DAPPTOOLS_CONTRACTS_DIR, JS_CONTRACTS_DIR].contains(&name)) - .unwrap_or_default() -} - -fn is_lib_dir(dir: &Path) -> bool { - dir.file_name() - .and_then(|p| p.to_str()) - .map(|name| [DAPPTOOLS_LIB_DIR, JS_LIB_DIR].contains(&name)) - .unwrap_or_default() -} - -/// Returns true if the file is _hidden_ -fn is_hidden(entry: &walkdir::DirEntry) -> bool { - entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) -} - -/// Finds all remappings in the directory recursively -/// -/// Note: this supports symlinks and will short-circuit if a symlink dir has already been visited, this can occur in pnpm setups: -fn find_remapping_candidates( - current_dir: &Path, - open: &Path, - current_level: usize, - is_inside_node_modules: bool, - visited_symlink_dirs: &mut HashSet, -) -> Vec { - // this is a marker if the current root is a candidate for a remapping - let mut is_candidate = false; - - // all found candidates - let mut candidates = Vec::new(); - - // scan all entries in the current dir - for entry in walkdir::WalkDir::new(current_dir) - .sort_by_file_name() - .follow_links(true) - .min_depth(1) - .max_depth(1) - .into_iter() - .filter_entry(|e| !is_hidden(e)) - .filter_map(Result::ok) - { - let entry: walkdir::DirEntry = entry; - - // found a solidity file directly the current dir - if !is_candidate - && entry.file_type().is_file() - && entry.path().extension() == Some("sol".as_ref()) - { - is_candidate = true; - } else if entry.file_type().is_dir() { - // if the dir is a symlink to a parent dir we short circuit here - // `walkdir` will catch symlink loops, but this check prevents that we end up scanning a - // workspace like - // ```text - // my-package/node_modules - // ├── dep/node_modules - // ├── symlink to `my-package` - // ``` - if entry.path_is_symlink() { - if let Ok(target) = utils::canonicalize(entry.path()) { - if !visited_symlink_dirs.insert(target.clone()) { - // short-circuiting if we've already visited the symlink - return Vec::new(); - } - // the symlink points to a parent dir of the current window - if open.components().count() > target.components().count() - && utils::common_ancestor(open, &target).is_some() - { - // short-circuiting - return Vec::new(); - } - } - } - - let subdir = entry.path(); - // we skip commonly used subdirs that should not be searched for recursively - if !(subdir.ends_with("tests") || subdir.ends_with("test") || subdir.ends_with("demo")) - { - // scan the subdirectory for remappings, but we need a way to identify nested - // dependencies like `ds-token/lib/ds-stop/lib/ds-note/src/contract.sol`, or - // `oz/{tokens,auth}/{contracts, interfaces}/contract.sol` to assign - // the remappings to their root, we use a window that lies between two barriers. If - // we find a solidity file within a window, it belongs to the dir that opened the - // window. - - // check if the subdir is a lib barrier, in which case we open a new window - if is_lib_dir(subdir) { - candidates.extend(find_remapping_candidates( - subdir, - subdir, - current_level + 1, - is_inside_node_modules, - visited_symlink_dirs, - )); - } else { - // continue scanning with the current window - candidates.extend(find_remapping_candidates( - subdir, - open, - current_level, - is_inside_node_modules, - visited_symlink_dirs, - )); - } - } - } - } - - // need to find the actual next window in the event `open` is a lib dir - let window_start = next_nested_window(open, current_dir); - // finally, we need to merge, adjust candidates from the same level and open window - if is_candidate - || candidates - .iter() - .filter(|c| c.window_level == current_level && c.window_start == window_start) - .count() - > 1 - { - Candidate::merge_on_same_level( - &mut candidates, - current_dir, - current_level, - window_start, - is_inside_node_modules, - ); - } else { - // this handles the case if there is a single nested candidate - if let Some(candidate) = candidates.iter_mut().find(|c| c.window_level == current_level) { - // we need to determine the distance from the starting point of the window to the - // contracts dir for cases like `current/nested/contracts/c.sol` which should point to - // `current` - let distance = dir_distance(&candidate.window_start, &candidate.source_dir); - if distance > 1 && candidate.source_dir_ends_with_js_source() { - candidate.source_dir = window_start; - } else if !is_source_dir(&candidate.source_dir) - && candidate.source_dir != candidate.window_start - { - candidate.source_dir = last_nested_source_dir(open, &candidate.source_dir); - } - } - } - candidates -} - -/// Counts the number of components between `root` and `current` -/// `dir_distance("root/a", "root/a/b/c") == 2` -fn dir_distance(root: &Path, current: &Path) -> usize { - if root == current { - return 0; - } - if let Ok(rem) = current.strip_prefix(root) { - rem.components().count() - } else { - 0 - } -} - -/// This finds the next window between `root` and `current` -/// If `root` ends with a `lib` component then start joining components from `current` until no -/// valid window opener is found -fn next_nested_window(root: &Path, current: &Path) -> PathBuf { - if !is_lib_dir(root) || root == current { - return root.to_path_buf(); - } - if let Ok(rem) = current.strip_prefix(root) { - let mut p = root.to_path_buf(); - for c in rem.components() { - let next = p.join(c); - if !is_lib_dir(&next) || !next.ends_with(JS_CONTRACTS_DIR) { - return next; - } - p = next - } - } - root.to_path_buf() -} - -/// Finds the last valid source directory in the window (root -> dir) -fn last_nested_source_dir(root: &Path, dir: &Path) -> PathBuf { - if is_source_dir(dir) { - return dir.to_path_buf(); - } - let mut p = dir; - while let Some(parent) = p.parent() { - if parent == root { - return root.to_path_buf(); - } - if is_source_dir(parent) { - return parent.to_path_buf(); - } - p = parent; - } - root.to_path_buf() -} - -#[cfg(test)] -mod tests { - use super::*; - use foundry_compilers_core::utils::{mkdir_or_touch, tempdir, touch}; - use similar_asserts::assert_eq; - - #[test] - fn relative_remapping() { - let remapping = "oz=a/b/c/d"; - let remapping = Remapping::from_str(remapping).unwrap(); - - let relative = RelativeRemapping::new(remapping.clone(), Path::new("a/b/c")); - assert_eq!(relative.path.relative(), Path::new(&remapping.path)); - assert_eq!(relative.path.original(), Path::new("d")); - - let relative = RelativeRemapping::new(remapping.clone(), Path::new("x/y")); - assert_eq!(relative.path.relative(), Path::new("x/y/a/b/c/d")); - assert_eq!(relative.path.original(), Path::new(&remapping.path)); - - let remapping = "oz=/a/b/c/d"; - let remapping = Remapping::from_str(remapping).unwrap(); - let relative = RelativeRemapping::new(remapping.clone(), Path::new("a/b")); - assert_eq!(relative.path.relative(), Path::new(&remapping.path)); - assert_eq!(relative.path.original(), Path::new(&remapping.path)); - assert!(relative.path.parent.is_none()); - - let relative = RelativeRemapping::new(remapping, Path::new("/a/b")); - assert_eq!(relative.to_relative_remapping(), Remapping::from_str("oz/=c/d/").unwrap()); - } - - #[test] - fn remapping_errors() { - let remapping = "oz=../b/c/d"; - let remapping = Remapping::from_str(remapping).unwrap(); - assert_eq!(remapping.name, "oz".to_string()); - assert_eq!(remapping.path, "../b/c/d".to_string()); - - let err = Remapping::from_str("").unwrap_err(); - matches!(err, RemappingError::InvalidRemapping(_)); - - let err = Remapping::from_str("oz=").unwrap_err(); - matches!(err, RemappingError::EmptyRemappingValue(_)); - } - - // helper function for converting path bufs to remapping strings - fn to_str(p: std::path::PathBuf) -> String { - format!("{}/", p.display()) - } - - #[test] - #[cfg_attr(windows, ignore = "Windows remappings #2347")] - fn find_remapping_dapptools() { - let tmp_dir = tempdir("lib").unwrap(); - let tmp_dir_path = tmp_dir.path(); - let paths = ["repo1/src/", "repo1/src/contract.sol"]; - mkdir_or_touch(tmp_dir_path, &paths[..]); - - let path = tmp_dir_path.join("repo1").display().to_string(); - let remappings = Remapping::find_many(tmp_dir_path); - // repo1/=lib/repo1/src - assert_eq!(remappings.len(), 1); - - assert_eq!(remappings[0].name, "repo1/"); - assert_eq!(remappings[0].path, format!("{path}/src/")); - } - - #[test] - #[cfg_attr(windows, ignore = "Windows remappings #2347")] - fn can_resolve_contract_dir_combinations() { - let tmp_dir = tempdir("demo").unwrap(); - let paths = - ["lib/timeless/src/lib/A.sol", "lib/timeless/src/B.sol", "lib/timeless/src/test/C.sol"]; - mkdir_or_touch(tmp_dir.path(), &paths[..]); - - let tmp_dir_path = tmp_dir.path().join("lib"); - let remappings = Remapping::find_many(&tmp_dir_path); - let expected = vec![Remapping { - context: None, - name: "timeless/".to_string(), - path: to_str(tmp_dir_path.join("timeless/src")), - }]; - assert_eq!(remappings, expected); - } - - #[test] - #[cfg_attr(windows, ignore = "Windows remappings #2347")] - fn can_resolve_geb_remappings() { - let tmp_dir = tempdir("geb").unwrap(); - let paths = [ - "lib/ds-token/src/test/Contract.sol", - "lib/ds-token/lib/ds-test/src/Contract.sol", - "lib/ds-token/lib/ds-test/aux/Contract.sol", - "lib/ds-token/lib/ds-stop/lib/ds-test/src/Contract.sol", - "lib/ds-token/lib/ds-stop/lib/ds-note/src/Contract.sol", - "lib/ds-token/lib/ds-math/lib/ds-test/aux/Contract.sol", - "lib/ds-token/lib/ds-math/src/Contract.sol", - "lib/ds-token/lib/ds-stop/lib/ds-test/aux/Contract.sol", - "lib/ds-token/lib/ds-stop/lib/ds-note/lib/ds-test/src/Contract.sol", - "lib/ds-token/lib/ds-math/lib/ds-test/src/Contract.sol", - "lib/ds-token/lib/ds-stop/lib/ds-auth/lib/ds-test/src/Contract.sol", - "lib/ds-token/lib/ds-stop/src/Contract.sol", - "lib/ds-token/src/Contract.sol", - "lib/ds-token/lib/erc20/src/Contract.sol", - "lib/ds-token/lib/ds-stop/lib/ds-auth/lib/ds-test/aux/Contract.sol", - "lib/ds-token/lib/ds-stop/lib/ds-auth/src/Contract.sol", - "lib/ds-token/lib/ds-stop/lib/ds-note/lib/ds-test/aux/Contract.sol", - ]; - mkdir_or_touch(tmp_dir.path(), &paths[..]); - - let tmp_dir_path = tmp_dir.path().join("lib"); - let mut remappings = Remapping::find_many(&tmp_dir_path); - remappings.sort_unstable(); - let mut expected = vec![ - Remapping { - context: None, - name: "ds-auth/".to_string(), - path: to_str(tmp_dir_path.join("ds-token/lib/ds-stop/lib/ds-auth/src")), - }, - Remapping { - context: None, - name: "ds-math/".to_string(), - path: to_str(tmp_dir_path.join("ds-token/lib/ds-math/src")), - }, - Remapping { - context: None, - name: "ds-note/".to_string(), - path: to_str(tmp_dir_path.join("ds-token/lib/ds-stop/lib/ds-note/src")), - }, - Remapping { - context: None, - name: "ds-stop/".to_string(), - path: to_str(tmp_dir_path.join("ds-token/lib/ds-stop/src")), - }, - Remapping { - context: None, - name: "ds-test/".to_string(), - path: to_str(tmp_dir_path.join("ds-token/lib/ds-test/src")), - }, - Remapping { - context: None, - name: "ds-token/".to_string(), - path: to_str(tmp_dir_path.join("ds-token/src")), - }, - Remapping { - context: None, - name: "erc20/".to_string(), - path: to_str(tmp_dir_path.join("ds-token/lib/erc20/src")), - }, - ]; - expected.sort_unstable(); - assert_eq!(remappings, expected); - } - - #[test] - fn can_resolve_nested_chainlink_remappings() { - let tmp_dir = tempdir("root").unwrap(); - let paths = [ - "@chainlink/contracts/src/v0.6/vendor/Contract.sol", - "@chainlink/contracts/src/v0.8/tests/Contract.sol", - "@chainlink/contracts/src/v0.7/Contract.sol", - "@chainlink/contracts/src/v0.6/Contract.sol", - "@chainlink/contracts/src/v0.5/Contract.sol", - "@chainlink/contracts/src/v0.7/tests/Contract.sol", - "@chainlink/contracts/src/v0.7/interfaces/Contract.sol", - "@chainlink/contracts/src/v0.4/tests/Contract.sol", - "@chainlink/contracts/src/v0.6/tests/Contract.sol", - "@chainlink/contracts/src/v0.5/tests/Contract.sol", - "@chainlink/contracts/src/v0.8/vendor/Contract.sol", - "@chainlink/contracts/src/v0.5/dev/Contract.sol", - "@chainlink/contracts/src/v0.6/examples/Contract.sol", - "@chainlink/contracts/src/v0.5/interfaces/Contract.sol", - "@chainlink/contracts/src/v0.4/interfaces/Contract.sol", - "@chainlink/contracts/src/v0.4/vendor/Contract.sol", - "@chainlink/contracts/src/v0.6/interfaces/Contract.sol", - "@chainlink/contracts/src/v0.7/dev/Contract.sol", - "@chainlink/contracts/src/v0.8/dev/Contract.sol", - "@chainlink/contracts/src/v0.5/vendor/Contract.sol", - "@chainlink/contracts/src/v0.7/vendor/Contract.sol", - "@chainlink/contracts/src/v0.4/Contract.sol", - "@chainlink/contracts/src/v0.8/interfaces/Contract.sol", - "@chainlink/contracts/src/v0.6/dev/Contract.sol", - ]; - mkdir_or_touch(tmp_dir.path(), &paths[..]); - let remappings = Remapping::find_many(tmp_dir.path()); - - let expected = vec![Remapping { - context: None, - name: "@chainlink/".to_string(), - path: to_str(tmp_dir.path().join("@chainlink")), - }]; - assert_eq!(remappings, expected); - } - - #[test] - fn can_resolve_oz_upgradeable_remappings() { - let tmp_dir = tempdir("root").unwrap(); - let paths = [ - "@openzeppelin/contracts-upgradeable/proxy/ERC1967/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC1155/Contract.sol", - "@openzeppelin/contracts/token/ERC777/Contract.sol", - "@openzeppelin/contracts/token/ERC721/presets/Contract.sol", - "@openzeppelin/contracts/interfaces/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC777/presets/Contract.sol", - "@openzeppelin/contracts/token/ERC1155/extensions/Contract.sol", - "@openzeppelin/contracts/proxy/Contract.sol", - "@openzeppelin/contracts/proxy/utils/Contract.sol", - "@openzeppelin/contracts-upgradeable/security/Contract.sol", - "@openzeppelin/contracts-upgradeable/utils/Contract.sol", - "@openzeppelin/contracts/token/ERC20/Contract.sol", - "@openzeppelin/contracts-upgradeable/utils/introspection/Contract.sol", - "@openzeppelin/contracts/metatx/Contract.sol", - "@openzeppelin/contracts/utils/cryptography/Contract.sol", - "@openzeppelin/contracts/token/ERC20/utils/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC20/utils/Contract.sol", - "@openzeppelin/contracts-upgradeable/proxy/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC20/presets/Contract.sol", - "@openzeppelin/contracts-upgradeable/utils/math/Contract.sol", - "@openzeppelin/contracts-upgradeable/utils/escrow/Contract.sol", - "@openzeppelin/contracts/governance/extensions/Contract.sol", - "@openzeppelin/contracts-upgradeable/interfaces/Contract.sol", - "@openzeppelin/contracts/proxy/transparent/Contract.sol", - "@openzeppelin/contracts/utils/structs/Contract.sol", - "@openzeppelin/contracts-upgradeable/access/Contract.sol", - "@openzeppelin/contracts/governance/compatibility/Contract.sol", - "@openzeppelin/contracts/governance/Contract.sol", - "@openzeppelin/contracts-upgradeable/governance/extensions/Contract.sol", - "@openzeppelin/contracts/security/Contract.sol", - "@openzeppelin/contracts-upgradeable/metatx/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC721/utils/Contract.sol", - "@openzeppelin/contracts/token/ERC721/utils/Contract.sol", - "@openzeppelin/contracts-upgradeable/governance/compatibility/Contract.sol", - "@openzeppelin/contracts/token/common/Contract.sol", - "@openzeppelin/contracts/proxy/beacon/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC721/Contract.sol", - "@openzeppelin/contracts-upgradeable/proxy/beacon/Contract.sol", - "@openzeppelin/contracts/token/ERC1155/utils/Contract.sol", - "@openzeppelin/contracts/token/ERC777/presets/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC20/Contract.sol", - "@openzeppelin/contracts-upgradeable/utils/structs/Contract.sol", - "@openzeppelin/contracts/utils/escrow/Contract.sol", - "@openzeppelin/contracts/utils/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC721/extensions/Contract.sol", - "@openzeppelin/contracts/token/ERC721/extensions/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC777/Contract.sol", - "@openzeppelin/contracts/token/ERC1155/presets/Contract.sol", - "@openzeppelin/contracts/token/ERC721/Contract.sol", - "@openzeppelin/contracts/token/ERC1155/Contract.sol", - "@openzeppelin/contracts-upgradeable/governance/Contract.sol", - "@openzeppelin/contracts/token/ERC20/extensions/Contract.sol", - "@openzeppelin/contracts-upgradeable/utils/cryptography/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC1155/presets/Contract.sol", - "@openzeppelin/contracts/access/Contract.sol", - "@openzeppelin/contracts/governance/utils/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC20/extensions/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/common/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC1155/utils/Contract.sol", - "@openzeppelin/contracts/proxy/ERC1967/Contract.sol", - "@openzeppelin/contracts/finance/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC1155/extensions/Contract.sol", - "@openzeppelin/contracts-upgradeable/governance/utils/Contract.sol", - "@openzeppelin/contracts-upgradeable/proxy/utils/Contract.sol", - "@openzeppelin/contracts/token/ERC20/presets/Contract.sol", - "@openzeppelin/contracts/utils/math/Contract.sol", - "@openzeppelin/contracts-upgradeable/token/ERC721/presets/Contract.sol", - "@openzeppelin/contracts-upgradeable/finance/Contract.sol", - "@openzeppelin/contracts/utils/introspection/Contract.sol", - ]; - mkdir_or_touch(tmp_dir.path(), &paths[..]); - let remappings = Remapping::find_many(tmp_dir.path()); - - let expected = vec![Remapping { - context: None, - name: "@openzeppelin/".to_string(), - path: to_str(tmp_dir.path().join("@openzeppelin")), - }]; - assert_eq!(remappings, expected); - } - - #[test] - #[cfg_attr(windows, ignore = "Windows remappings #2347")] - fn recursive_remappings() { - let tmp_dir = tempdir("lib").unwrap(); - let tmp_dir_path = tmp_dir.path(); - let paths = [ - "repo1/src/contract.sol", - "repo1/lib/ds-test/src/test.sol", - "repo1/lib/ds-math/src/contract.sol", - "repo1/lib/ds-math/lib/ds-test/src/test.sol", - "repo1/lib/guni-lev/src/contract.sol", - "repo1/lib/solmate/src/auth/contract.sol", - "repo1/lib/solmate/src/tokens/contract.sol", - "repo1/lib/solmate/lib/ds-test/src/test.sol", - "repo1/lib/solmate/lib/ds-test/demo/demo.sol", - "repo1/lib/openzeppelin-contracts/contracts/access/AccessControl.sol", - "repo1/lib/ds-token/lib/ds-stop/src/contract.sol", - "repo1/lib/ds-token/lib/ds-stop/lib/ds-note/src/contract.sol", - ]; - mkdir_or_touch(tmp_dir_path, &paths[..]); - - let mut remappings = Remapping::find_many(tmp_dir_path); - remappings.sort_unstable(); - - let mut expected = vec![ - Remapping { - context: None, - name: "repo1/".to_string(), - path: to_str(tmp_dir_path.join("repo1").join("src")), - }, - Remapping { - context: None, - name: "ds-math/".to_string(), - path: to_str(tmp_dir_path.join("repo1").join("lib").join("ds-math").join("src")), - }, - Remapping { - context: None, - name: "ds-test/".to_string(), - path: to_str(tmp_dir_path.join("repo1").join("lib").join("ds-test").join("src")), - }, - Remapping { - context: None, - name: "guni-lev/".to_string(), - path: to_str(tmp_dir_path.join("repo1/lib/guni-lev").join("src")), - }, - Remapping { - context: None, - name: "solmate/".to_string(), - path: to_str(tmp_dir_path.join("repo1/lib/solmate").join("src")), - }, - Remapping { - context: None, - name: "openzeppelin-contracts/".to_string(), - path: to_str(tmp_dir_path.join("repo1/lib/openzeppelin-contracts/contracts")), - }, - Remapping { - context: None, - name: "ds-stop/".to_string(), - path: to_str(tmp_dir_path.join("repo1/lib/ds-token/lib/ds-stop/src")), - }, - Remapping { - context: None, - name: "ds-note/".to_string(), - path: to_str(tmp_dir_path.join("repo1/lib/ds-token/lib/ds-stop/lib/ds-note/src")), - }, - ]; - expected.sort_unstable(); - assert_eq!(remappings, expected); - } - - #[test] - fn can_resolve_contexts() { - let remapping = "context:oz=a/b/c/d"; - let remapping = Remapping::from_str(remapping).unwrap(); - - assert_eq!( - remapping, - Remapping { - context: Some("context".to_string()), - name: "oz".to_string(), - path: "a/b/c/d".to_string(), - } - ); - assert_eq!(remapping.to_string(), "context:oz/=a/b/c/d/".to_string()); - - let remapping = "context:foo=C:/bar/src/"; - let remapping = Remapping::from_str(remapping).unwrap(); - - assert_eq!( - remapping, - Remapping { - context: Some("context".to_string()), - name: "foo".to_string(), - path: "C:/bar/src/".to_string() - } - ); - } - - #[test] - fn can_resolve_global_contexts() { - let remapping = ":oz=a/b/c/d/"; - let remapping = Remapping::from_str(remapping).unwrap(); - - assert_eq!( - remapping, - Remapping { context: None, name: "oz".to_string(), path: "a/b/c/d/".to_string() } - ); - assert_eq!(remapping.to_string(), "oz/=a/b/c/d/".to_string()); - } - - #[test] - fn remappings() { - let tmp_dir = tempdir("tmp").unwrap(); - let tmp_dir_path = tmp_dir.path().join("lib"); - let repo1 = tmp_dir_path.join("src_repo"); - let repo2 = tmp_dir_path.join("contracts_repo"); - - let dir1 = repo1.join("src"); - std::fs::create_dir_all(&dir1).unwrap(); - - let dir2 = repo2.join("contracts"); - std::fs::create_dir_all(&dir2).unwrap(); - - let contract1 = dir1.join("contract.sol"); - touch(&contract1).unwrap(); - - let contract2 = dir2.join("contract.sol"); - touch(&contract2).unwrap(); - - let mut remappings = Remapping::find_many(&tmp_dir_path); - remappings.sort_unstable(); - let mut expected = vec![ - Remapping { - context: None, - name: "src_repo/".to_string(), - path: format!("{}/", dir1.into_os_string().into_string().unwrap()), - }, - Remapping { - context: None, - name: "contracts_repo/".to_string(), - path: format!( - "{}/", - repo2.join("contracts").into_os_string().into_string().unwrap() - ), - }, - ]; - expected.sort_unstable(); - assert_eq!(remappings, expected); - } - - #[test] - #[cfg_attr(windows, ignore = "Windows remappings #2347")] - fn simple_dapptools_remappings() { - let tmp_dir = tempdir("lib").unwrap(); - let tmp_dir_path = tmp_dir.path(); - let paths = [ - "ds-test/src", - "ds-test/demo", - "ds-test/demo/demo.sol", - "ds-test/src/test.sol", - "openzeppelin/src/interfaces/c.sol", - "openzeppelin/src/token/ERC/c.sol", - "standards/src/interfaces/iweth.sol", - "uniswapv2/src", - ]; - mkdir_or_touch(tmp_dir_path, &paths[..]); - - let mut remappings = Remapping::find_many(tmp_dir_path); - remappings.sort_unstable(); - - let mut expected = vec![ - Remapping { - context: None, - name: "ds-test/".to_string(), - path: to_str(tmp_dir_path.join("ds-test/src")), - }, - Remapping { - context: None, - name: "openzeppelin/".to_string(), - path: to_str(tmp_dir_path.join("openzeppelin/src")), - }, - Remapping { - context: None, - name: "standards/".to_string(), - path: to_str(tmp_dir_path.join("standards/src")), - }, - ]; - expected.sort_unstable(); - assert_eq!(remappings, expected); - } - - #[test] - #[cfg_attr(windows, ignore = "Windows remappings #2347")] - fn hardhat_remappings() { - let tmp_dir = tempdir("node_modules").unwrap(); - let tmp_dir_node_modules = tmp_dir.path().join("node_modules"); - let paths = [ - "node_modules/@aave/aave-token/contracts/token/AaveToken.sol", - "node_modules/@aave/governance-v2/contracts/governance/Executor.sol", - "node_modules/@aave/protocol-v2/contracts/protocol/lendingpool/", - "node_modules/@aave/protocol-v2/contracts/protocol/lendingpool/LendingPool.sol", - "node_modules/@ensdomains/ens/contracts/contract.sol", - "node_modules/prettier-plugin-solidity/tests/format/ModifierDefinitions/", - "node_modules/prettier-plugin-solidity/tests/format/ModifierDefinitions/ - ModifierDefinitions.sol", - "node_modules/@openzeppelin/contracts/tokens/contract.sol", - "node_modules/@openzeppelin/contracts/access/contract.sol", - "node_modules/eth-gas-reporter/mock/contracts/ConvertLib.sol", - "node_modules/eth-gas-reporter/mock/test/TestMetacoin.sol", - ]; - mkdir_or_touch(tmp_dir.path(), &paths[..]); - let mut remappings = Remapping::find_many(&tmp_dir_node_modules); - remappings.sort_unstable(); - let mut expected = vec![ - Remapping { - context: None, - name: "@aave/".to_string(), - path: to_str(tmp_dir_node_modules.join("@aave")), - }, - Remapping { - context: None, - name: "@ensdomains/".to_string(), - path: to_str(tmp_dir_node_modules.join("@ensdomains")), - }, - Remapping { - context: None, - name: "@openzeppelin/".to_string(), - path: to_str(tmp_dir_node_modules.join("@openzeppelin")), - }, - Remapping { - context: None, - name: "eth-gas-reporter/".to_string(), - path: to_str(tmp_dir_node_modules.join("eth-gas-reporter")), - }, - ]; - expected.sort_unstable(); - assert_eq!(remappings, expected); - } - - #[test] - fn can_determine_nested_window() { - let a = Path::new( - "/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/lib.Z6ODLZJQeJQa/repo1/lib", - ); - let b = Path::new( - "/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/lib.Z6ODLZJQeJQa/repo1/lib/ds-test/src" - ); - assert_eq!(next_nested_window(a, b),Path::new( - "/var/folders/l5/lprhf87s6xv8djgd017f0b2h0000gn/T/lib.Z6ODLZJQeJQa/repo1/lib/ds-test" - )); - } - - #[test] - #[cfg_attr(windows, ignore = "Windows remappings #2347")] - fn find_openzeppelin_remapping() { - let tmp_dir = tempdir("lib").unwrap(); - let tmp_dir_path = tmp_dir.path(); - let paths = [ - "lib/ds-test/src/test.sol", - "lib/forge-std/src/test.sol", - "openzeppelin/contracts/interfaces/c.sol", - ]; - mkdir_or_touch(tmp_dir_path, &paths[..]); - - let path = tmp_dir_path.display().to_string(); - let mut remappings = Remapping::find_many(path.as_ref()); - remappings.sort_unstable(); - - let mut expected = vec![ - Remapping { - context: None, - name: "ds-test/".to_string(), - path: to_str(tmp_dir_path.join("lib/ds-test/src")), - }, - Remapping { - context: None, - name: "openzeppelin/".to_string(), - path: to_str(tmp_dir_path.join("openzeppelin/contracts")), - }, - Remapping { - context: None, - name: "forge-std/".to_string(), - path: to_str(tmp_dir_path.join("lib/forge-std/src")), - }, - ]; - expected.sort_unstable(); - assert_eq!(remappings, expected); - } -} diff --git a/crates/artifacts/solc/src/serde_helpers.rs b/crates/artifacts/solc/src/serde_helpers.rs deleted file mode 100644 index 73526a95..00000000 --- a/crates/artifacts/solc/src/serde_helpers.rs +++ /dev/null @@ -1,246 +0,0 @@ -//! Serde helpers. - -use alloy_primitives::Bytes; -use serde::{Deserialize, Deserializer}; - -pub fn deserialize_bytes<'de, D>(d: D) -> Result -where - D: Deserializer<'de>, -{ - String::deserialize(d)?.parse::().map_err(serde::de::Error::custom) -} - -pub fn deserialize_opt_bytes<'de, D>(d: D) -> Result, D::Error> -where - D: Deserializer<'de>, -{ - let value = Option::::deserialize(d)?; - value.as_deref().map(str::parse).transpose().map_err(serde::de::Error::custom) -} - -pub fn default_for_null<'de, D, T>(deserializer: D) -> Result -where - D: Deserializer<'de>, - T: Deserialize<'de> + Default, -{ - Ok(Option::::deserialize(deserializer)?.unwrap_or_default()) -} - -pub mod json_string_opt { - use serde::{ - de::{self, DeserializeOwned}, - Deserialize, Deserializer, Serialize, Serializer, - }; - - pub fn serialize(value: &Option, serializer: S) -> Result - where - S: Serializer, - T: Serialize, - { - if let Some(value) = value { - value.serialize(serializer) - } else { - serializer.serialize_none() - } - } - - pub fn deserialize<'de, T, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - T: DeserializeOwned, - { - if let Some(s) = Option::::deserialize(deserializer)? { - if s.is_empty() { - return Ok(None); - } - let value = serde_json::Value::String(s); - serde_json::from_value(value).map_err(de::Error::custom).map(Some) - } else { - Ok(None) - } - } -} - -/// deserializes empty json object `{}` as `None` -pub mod empty_json_object_opt { - use serde::{ - de::{self, DeserializeOwned}, - Deserialize, Deserializer, Serialize, Serializer, - }; - - pub fn serialize(value: &Option, serializer: S) -> Result - where - S: Serializer, - T: Serialize, - { - if let Some(value) = value { - value.serialize(serializer) - } else { - let empty = serde_json::Value::Object(Default::default()); - serde_json::Value::serialize(&empty, serializer) - } - } - - pub fn deserialize<'de, T, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - T: DeserializeOwned, - { - let json = serde_json::Value::deserialize(deserializer)?; - if json.is_null() { - return Ok(None); - } - if json.as_object().map(|obj| obj.is_empty()).unwrap_or_default() { - return Ok(None); - } - serde_json::from_value(json).map_err(de::Error::custom).map(Some) - } -} - -/// serde support for string -pub mod string_bytes { - use serde::{Deserialize, Deserializer, Serializer}; - - pub fn serialize(value: &String, serializer: S) -> Result - where - S: Serializer, - { - if value.starts_with("0x") { - serializer.serialize_str(value.as_str()) - } else { - serializer.serialize_str(&format!("0x{value}")) - } - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let value = String::deserialize(deserializer)?; - if let Some(rem) = value.strip_prefix("0x") { - Ok(rem.to_string()) - } else { - Ok(value) - } - } -} - -pub mod display_from_str_opt { - use serde::{de, Deserialize, Deserializer, Serializer}; - use std::{fmt, str::FromStr}; - - pub fn serialize(value: &Option, serializer: S) -> Result - where - T: fmt::Display, - S: Serializer, - { - if let Some(value) = value { - serializer.collect_str(value) - } else { - serializer.serialize_none() - } - } - - pub fn deserialize<'de, T, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - T: FromStr, - T::Err: fmt::Display, - { - if let Some(s) = Option::::deserialize(deserializer)? { - s.parse().map_err(de::Error::custom).map(Some) - } else { - Ok(None) - } - } -} - -pub mod display_from_str { - use serde::{de, Deserialize, Deserializer, Serializer}; - use std::{fmt, str::FromStr}; - - pub fn serialize(value: &T, serializer: S) -> Result - where - T: fmt::Display, - S: Serializer, - { - serializer.collect_str(value) - } - - pub fn deserialize<'de, T, D>(deserializer: D) -> Result - where - D: Deserializer<'de>, - T: FromStr, - T::Err: fmt::Display, - { - String::deserialize(deserializer)?.parse().map_err(de::Error::custom) - } -} - -/// (De)serialize vec of tuples as map -pub mod tuple_vec_map { - use serde::{de::DeserializeOwned, Deserialize, Deserializer, Serialize, Serializer}; - - pub fn serialize(data: &[(K, V)], serializer: S) -> Result - where - S: Serializer, - K: Serialize, - V: Serialize, - { - serializer.collect_map(data.iter().map(|x| (&x.0, &x.1))) - } - - pub fn deserialize<'de, K, V, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - K: DeserializeOwned, - V: DeserializeOwned, - { - use serde::de::{MapAccess, Visitor}; - use std::{fmt, marker::PhantomData}; - - struct TupleVecMapVisitor { - marker: PhantomData>, - } - - impl TupleVecMapVisitor { - pub fn new() -> Self { - Self { marker: PhantomData } - } - } - - impl<'de, K, V> Visitor<'de> for TupleVecMapVisitor - where - K: Deserialize<'de>, - V: Deserialize<'de>, - { - type Value = Vec<(K, V)>; - - fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { - formatter.write_str("a map") - } - - #[inline] - fn visit_unit(self) -> Result, E> { - Ok(Vec::new()) - } - - #[inline] - fn visit_map(self, mut access: T) -> Result, T::Error> - where - T: MapAccess<'de>, - { - let mut values = - Vec::with_capacity(std::cmp::min(access.size_hint().unwrap_or(0), 4096)); - - while let Some((key, value)) = access.next_entry()? { - values.push((key, value)); - } - - Ok(values) - } - } - - deserializer.deserialize_map(TupleVecMapVisitor::new()) - } -} diff --git a/crates/artifacts/solc/src/sourcemap.rs b/crates/artifacts/solc/src/sourcemap.rs deleted file mode 100644 index 3260472d..00000000 --- a/crates/artifacts/solc/src/sourcemap.rs +++ /dev/null @@ -1,628 +0,0 @@ -use std::{fmt, fmt::Write, iter::Peekable, str::CharIndices}; - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum Jump { - /// A jump instruction that goes into a function - In, - /// A jump represents an instruction that returns from a function - Out, - /// A regular jump instruction - Regular, -} - -impl Jump { - /// Returns the string representation of the jump instruction. - pub fn to_str(self) -> &'static str { - match self { - Self::In => "i", - Self::Out => "o", - Self::Regular => "-", - } - } - - fn to_int(self) -> u32 { - match self { - Self::In => 0, - Self::Out => 1, - Self::Regular => 2, - } - } - - fn from_int(i: u32) -> Self { - match i { - 0 => Self::In, - 1 => Self::Out, - 2 => Self::Regular, - _ => unreachable!(), - } - } -} - -impl fmt::Display for Jump { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.to_str()) - } -} - -/// An error that can happen during source map parsing. -#[derive(Debug, thiserror::Error)] -pub struct SyntaxError(Box); - -#[derive(Debug)] -struct SyntaxErrorInner { - pos: Option, - msg: String, -} - -impl fmt::Display for SyntaxError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("failed to parse source map: ")?; - if let Some(pos) = self.0.pos { - write!(f, "[{pos}] ")?; - } - f.write_str(&self.0.msg) - } -} - -impl SyntaxError { - fn new(pos: impl Into>, msg: impl Into) -> Self { - Self(Box::new(SyntaxErrorInner { pos: pos.into(), msg: msg.into() })) - } -} - -impl From for SyntaxError { - fn from(_value: std::num::TryFromIntError) -> Self { - Self::new(None, "offset overflow") - } -} - -#[derive(PartialEq, Eq)] -enum Token<'a> { - /// Decimal number - Number(&'a str), - /// `;` - Semicolon, - /// `:` - Colon, - /// `i` which represents an instruction that goes into a function - In, - /// `o` which represents an instruction that returns from a function - Out, - /// `-` regular jump - Regular, -} - -impl<'a> fmt::Debug for Token<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Token::Number(s) => write!(f, "NUMBER({s:?})"), - Token::Semicolon => write!(f, "SEMICOLON"), - Token::Colon => write!(f, "COLON"), - Token::In => write!(f, "JMP(i)"), - Token::Out => write!(f, "JMP(o)"), - Token::Regular => write!(f, "JMP(-)"), - } - } -} - -impl<'a> fmt::Display for Token<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Token::Number(_) => write!(f, "number"), - Token::Semicolon => write!(f, "`;`"), - Token::Colon => write!(f, "`:`"), - Token::In => write!(f, "jmp-in"), - Token::Out => write!(f, "jmp-out"), - Token::Regular => write!(f, "jmp"), - } - } -} - -struct Lexer<'input> { - input: &'input str, - chars: Peekable>, -} - -impl<'input> Lexer<'input> { - fn new(input: &'input str) -> Self { - Lexer { chars: input.char_indices().peekable(), input } - } - - fn number(&mut self, start: usize, mut end: usize) -> Token<'input> { - loop { - if let Some((_, ch)) = self.chars.peek().cloned() { - if !ch.is_ascii_digit() { - break; - } - self.chars.next(); - end += 1; - } else { - end = self.input.len(); - break; - } - } - Token::Number(&self.input[start..end]) - } -} - -impl<'input> Iterator for Lexer<'input> { - type Item = Result<(Token<'input>, usize), SyntaxError>; - - fn next(&mut self) -> Option { - let (start, ch) = self.chars.next()?; - let token = match ch { - ';' => Token::Semicolon, - ':' => Token::Colon, - 'i' => Token::In, - 'o' => Token::Out, - '-' => match self.chars.peek() { - Some((_, ch)) if ch.is_ascii_digit() => { - self.chars.next(); - self.number(start, start + 2) - } - _ => Token::Regular, - }, - ch if ch.is_ascii_digit() => self.number(start, start + 1), - ch => return Some(Err(SyntaxError::new(start, format!("unexpected character: {ch}")))), - }; - Some(Ok((token, start))) - } -} - -/// A Solidity source map, which is composed of multiple [`SourceElement`]s, separated by -/// semicolons. -/// -/// Solidity reference: -pub type SourceMap = Vec; - -/// A single element in a [`SourceMap`]. -/// -/// Solidity reference: -#[derive(Clone, PartialEq, Eq, Hash)] -pub struct SourceElement { - offset: u32, - length: u32, - index: i32, - // 2 bits for jump, 30 bits for modifier depth; see [set_jump_and_modifier_depth] - jump_and_modifier_depth: u32, -} - -impl fmt::Debug for SourceElement { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SourceElement") - .field("offset", &self.offset()) - .field("length", &self.length()) - .field("index", &self.index_i32()) - .field("jump", &self.jump()) - .field("modifier_depth", &self.modifier_depth()) - .field("formatted", &format_args!("{self}")) - .finish() - } -} - -impl Default for SourceElement { - fn default() -> Self { - Self::new() - } -} - -impl SourceElement { - /// Creates a new source element with default values. - pub fn new() -> Self { - Self { offset: 0, length: 0, index: -1, jump_and_modifier_depth: 0 } - } - - /// Creates a new source element with default values. - #[deprecated = "use `new` instead"] - pub fn new_invalid() -> Self { - Self::new() - } - - /// The byte-offset to the start of the range in the source file. - #[inline] - pub fn offset(&self) -> u32 { - self.offset - } - - /// The length of the source range in bytes. - #[inline] - pub fn length(&self) -> u32 { - self.length - } - - /// The source index. - /// - /// Note: In the case of instructions that are not associated with any particular source file, - /// the source mapping assigns an integer identifier of -1. This may happen for bytecode - /// sections stemming from compiler-generated inline assembly statements. - /// This case is represented as a `None` value. - #[inline] - pub fn index(&self) -> Option { - if self.index == -1 { - None - } else { - Some(self.index as u32) - } - } - - /// The source index. - /// - /// See [`Self::index`] for more information. - #[inline] - pub fn index_i32(&self) -> i32 { - self.index - } - - /// Jump instruction. - #[inline] - pub fn jump(&self) -> Jump { - Jump::from_int(self.jump_and_modifier_depth >> 30) - } - - #[inline] - fn set_jump(&mut self, jump: Jump) { - self.set_jump_and_modifier_depth(jump, self.modifier_depth()); - } - - /// Modifier depth. - /// - /// This depth is increased whenever the placeholder statement (`_`) is entered in a modifier - /// and decreased when it is left again. - #[inline] - pub fn modifier_depth(&self) -> u32 { - (self.jump_and_modifier_depth << 2) >> 2 - } - - #[inline] - fn set_modifier_depth(&mut self, modifier_depth: usize) -> Result<(), SyntaxError> { - if modifier_depth > (1 << 30) - 1 { - return Err(SyntaxError::new(None, "modifier depth overflow")); - } - self.set_jump_and_modifier_depth(self.jump(), modifier_depth as u32); - Ok(()) - } - - #[inline] - fn set_jump_and_modifier_depth(&mut self, jump: Jump, modifier_depth: u32) { - self.jump_and_modifier_depth = (jump.to_int() << 30) | modifier_depth; - } -} - -impl fmt::Display for SourceElement { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "{}:{}:{}:{}:{}", - self.offset(), - self.length(), - self.index_i32(), - self.jump(), - self.modifier_depth(), - ) - } -} - -#[derive(Default)] -struct SourceElementBuilder { - offset: Option, - length: Option, - index: Option>, - jump: Option, - modifier_depth: Option, -} - -impl fmt::Display for SourceElementBuilder { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.offset.is_none() - && self.length.is_none() - && self.index.is_none() - && self.jump.is_none() - && self.modifier_depth.is_none() - { - return Ok(()); - } - - if let Some(s) = self.offset { - if s == 0 && self.index == Some(None) { - f.write_str("-1")?; - } else { - write!(f, "{s}")?; - } - } - if self.length.is_none() - && self.index.is_none() - && self.jump.is_none() - && self.modifier_depth.is_none() - { - return Ok(()); - } - f.write_char(':')?; - - if let Some(s) = self.length { - if s == 0 && self.index == Some(None) { - f.write_str("-1")?; - } else { - write!(f, "{s}")?; - } - } - if self.index.is_none() && self.jump.is_none() && self.modifier_depth.is_none() { - return Ok(()); - } - f.write_char(':')?; - - if let Some(s) = self.index { - let s = s.map(|s| s as i64).unwrap_or(-1); - write!(f, "{s}")?; - } - if self.jump.is_none() && self.modifier_depth.is_none() { - return Ok(()); - } - f.write_char(':')?; - - if let Some(s) = self.jump { - write!(f, "{s}")?; - } - if self.modifier_depth.is_none() { - return Ok(()); - } - f.write_char(':')?; - - if let Some(s) = self.modifier_depth { - if self.index == Some(None) { - f.write_str("-1")?; - } else { - s.fmt(f)?; - } - } - - Ok(()) - } -} - -impl SourceElementBuilder { - fn finish(self, prev: Option) -> Result { - let mut element = prev.unwrap_or_default(); - macro_rules! get_field { - (| $field:ident | $e:expr) => { - if let Some($field) = self.$field { - $e; - } - }; - } - get_field!(|offset| element.offset = offset.try_into()?); - get_field!(|length| element.length = length.try_into()?); - get_field!(|index| element.index = index.map(|x| x as i32).unwrap_or(-1)); - get_field!(|jump| element.set_jump(jump)); - // Modifier depth is optional. - if let Some(modifier_depth) = self.modifier_depth { - element.set_modifier_depth(modifier_depth)?; - } - Ok(element) - } - - fn set_jmp(&mut self, jmp: Jump, pos: usize) -> Result<(), SyntaxError> { - if self.jump.is_some() { - return Err(SyntaxError::new(pos, "jump already set")); - } - self.jump = Some(jmp); - Ok(()) - } - - fn set_offset(&mut self, offset: usize, pos: usize) -> Result<(), SyntaxError> { - if self.offset.is_some() { - return Err(SyntaxError::new(pos, "offset already set")); - } - self.offset = Some(offset); - Ok(()) - } - - fn set_length(&mut self, length: usize, pos: usize) -> Result<(), SyntaxError> { - if self.length.is_some() { - return Err(SyntaxError::new(pos, "length already set")); - } - self.length = Some(length); - Ok(()) - } - - fn set_index(&mut self, index: Option, pos: usize) -> Result<(), SyntaxError> { - if self.index.is_some() { - return Err(SyntaxError::new(pos, "index already set")); - } - self.index = Some(index); - Ok(()) - } - - fn set_modifier(&mut self, modifier_depth: usize, pos: usize) -> Result<(), SyntaxError> { - if self.modifier_depth.is_some() { - return Err(SyntaxError::new(pos, "modifier depth already set")); - } - self.modifier_depth = Some(modifier_depth); - Ok(()) - } -} - -pub struct Parser<'input> { - lexer: Lexer<'input>, - last_element: Option, - done: bool, - #[cfg(test)] - output: Option<&'input mut dyn Write>, -} - -impl<'input> Parser<'input> { - pub fn new(input: &'input str) -> Self { - Self { - done: input.is_empty(), - lexer: Lexer::new(input), - last_element: None, - #[cfg(test)] - output: None, - } - } - - fn advance(&mut self) -> Result, SyntaxError> { - // start parsing at the offset state, `s` - let mut state = State::Offset; - let mut builder = SourceElementBuilder::default(); - - let parse_number = |num: &str, pos: usize| { - let num = match num.parse::() { - Ok(num) => num, - Err(e) => return Err(SyntaxError::new(pos, e.to_string())), - }; - match num { - ..-1 => Err(SyntaxError::new(pos, "unexpected negative number")), - -1 => Ok(None), - 0.. => u32::try_from(num) - .map(Some) - .map_err(|_| SyntaxError::new(pos, "number too large")), - } - }; - - loop { - match self.lexer.next() { - Some(Ok((token, pos))) => match token { - Token::Semicolon => break, - Token::Number(num) => match state { - State::Offset => { - builder - .set_offset(parse_number(num, pos)?.unwrap_or(0) as usize, pos)?; - } - State::Length => { - builder - .set_length(parse_number(num, pos)?.unwrap_or(0) as usize, pos)?; - } - State::Index => { - builder.set_index(parse_number(num, pos)?, pos)?; - } - State::Modifier => builder - .set_modifier(parse_number(num, pos)?.unwrap_or(0) as usize, pos)?, - State::Jmp => { - return Err(SyntaxError::new(pos, "expected jump, found number")); - } - }, - Token::Colon => state.advance(pos)?, - Token::In => builder.set_jmp(Jump::In, pos)?, - Token::Out => builder.set_jmp(Jump::Out, pos)?, - Token::Regular => builder.set_jmp(Jump::Regular, pos)?, - }, - Some(Err(err)) => return Err(err), - None => { - if self.done { - return Ok(None); - } - self.done = true; - break; - } - } - } - - #[cfg(test)] - if let Some(out) = self.output.as_mut() { - if self.last_element.is_some() { - out.write_char(';').unwrap(); - } - write!(out, "{builder}").unwrap(); - } - - let element = builder.finish(self.last_element.take())?; - self.last_element = Some(element.clone()); - Ok(Some(element)) - } -} - -impl<'input> Iterator for Parser<'input> { - type Item = Result; - - fn next(&mut self) -> Option { - self.advance().transpose() - } -} - -/// State machine to keep track of separating `:` -#[derive(Clone, Copy, PartialEq, Eq)] -enum State { - // s - Offset, - // l - Length, - // f - Index, - // j - Jmp, - // m - Modifier, -} - -impl State { - fn advance(&mut self, pos: usize) -> Result<(), SyntaxError> { - match self { - Self::Offset => *self = Self::Length, - Self::Length => *self = Self::Index, - Self::Index => *self = Self::Jmp, - Self::Jmp => *self = Self::Modifier, - Self::Modifier => return Err(SyntaxError::new(pos, "unexpected colon")), - } - Ok(()) - } -} - -/// Parses a source map -pub fn parse(input: &str) -> Result { - Parser::new(input).collect() -} - -#[cfg(test)] -mod tests { - use super::*; - - fn parse_test(input: &str) { - match parse_test_(input) { - Ok(_) => {} - Err(e) => panic!("{e}"), - } - } - - fn parse_test_(input: &str) -> Result { - let mut s = String::new(); - let mut p = Parser::new(input); - p.output = Some(&mut s); - let sm = p.collect::>()?; - if s != input { - return Err(SyntaxError::new( - None, - format!("mismatched output:\n actual: {s:?}\n expected: {input:?}\n sm: {sm:#?}"), - )); - } - Ok(sm) - } - - #[test] - fn empty() { - parse_test(""); - } - - #[test] - fn source_maps() { - // all source maps from the compiler output test data - let source_maps = include_str!("../../../../test-data/out-source-maps.txt"); - - for (line, s) in source_maps.lines().enumerate() { - let line = line + 1; - parse_test_(s).unwrap_or_else(|e| panic!("Failed to parse line {line}: {e}\n{s:?}")); - } - } - - #[test] - fn cheatcodes() { - let s = include_str!("../../../../test-data/cheatcodes.sol-sourcemap.txt"); - parse_test(s); - } - - // https://github.com/foundry-rs/foundry/issues/8986 - #[test] - fn univ4_deployer() { - let s = ":::-:0;;1888:10801:91;2615:100;;;2679:3;2615:100;;;;2700:4;2615:100;;;;-1:-1:-1;2615:100:91;;;;2546:169;;;-1:-1:-1;;2546:169:91;;;;;;;;;;;2615:100;2546:169;;;2615:100;2797:101;;;;;;;;;-1:-1:-1;;2797:101:91;;;;;;;;2546:169;2721:177;;;;;;;;;;;;;;;;;;2957:101;1888:10801;2957:101;2797;2957;;;-1:-1:-1;;2957:101:91;;;;356:29:89;2957:101:91;;;;2904:154;;;-1:-1:-1;;2904:154:91;;;;;;;;;;;;-1:-1:-1;;;;;;2904:154:91;;;;;;;;4018:32;;;;;4048:2;4018:32;;;4056:74;;;-1:-1:-1;;;;;4056:74:91;;;;;;;;1888:10801;;;;;;;;;;;;;;;;"; - parse_test(s); - } -} diff --git a/crates/artifacts/solc/src/sources.rs b/crates/artifacts/solc/src/sources.rs deleted file mode 100644 index 01a9b0e2..00000000 --- a/crates/artifacts/solc/src/sources.rs +++ /dev/null @@ -1,273 +0,0 @@ -use alloy_primitives::hex; -use foundry_compilers_core::{error::SolcIoError, utils}; -use md5::Digest; -use serde::{Deserialize, Serialize}; -use std::{ - collections::BTreeMap, - fs, - path::{Path, PathBuf}, - sync::Arc, -}; - -type SourcesInner = BTreeMap; - -/// An ordered list of files and their source. -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct Sources(pub SourcesInner); - -impl Sources { - /// Returns a new instance of [Sources]. - pub fn new() -> Self { - Self::default() - } - - /// Returns `true` if no sources should have optimized output selection. - pub fn all_dirty(&self) -> bool { - self.0.values().all(|s| s.is_dirty()) - } - - /// Returns all entries that should not be optimized. - pub fn dirty(&self) -> impl Iterator + '_ { - self.0.iter().filter(|(_, s)| s.is_dirty()) - } - - /// Returns all entries that should be optimized. - pub fn clean(&self) -> impl Iterator + '_ { - self.0.iter().filter(|(_, s)| !s.is_dirty()) - } - - /// Returns all files that should not be optimized. - pub fn dirty_files(&self) -> impl Iterator + '_ { - self.dirty().map(|(k, _)| k) - } -} - -impl std::ops::Deref for Sources { - type Target = SourcesInner; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl std::ops::DerefMut for Sources { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl From for Sources -where - SourcesInner: From, -{ - fn from(value: I) -> Self { - Self(From::from(value)) - } -} - -impl FromIterator for Sources -where - SourcesInner: FromIterator, -{ - fn from_iter>(iter: T) -> Self { - Self(FromIterator::from_iter(iter)) - } -} - -impl IntoIterator for Sources { - type Item = ::Item; - type IntoIter = ::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -impl<'a> IntoIterator for &'a Sources { - type Item = <&'a SourcesInner as IntoIterator>::Item; - type IntoIter = <&'a SourcesInner as IntoIterator>::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.0.iter() - } -} - -impl<'a> IntoIterator for &'a mut Sources { - type Item = <&'a mut SourcesInner as IntoIterator>::Item; - type IntoIter = <&'a mut SourcesInner as IntoIterator>::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.0.iter_mut() - } -} - -/// Content of a solidity file -/// -/// This contains the actual source code of a file -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct Source { - /// Content of the file - /// - /// This is an `Arc` because it may be cloned. If the graph of the project contains multiple - /// conflicting versions then the same [Source] may be required by conflicting versions and - /// needs to be duplicated. - pub content: Arc, - #[serde(skip, default)] - pub kind: SourceCompilationKind, -} - -impl Source { - /// Creates a new instance of [Source] with the given content. - pub fn new(content: impl Into) -> Self { - Self { content: Arc::new(content.into()), kind: SourceCompilationKind::Complete } - } - - /// Reads the file's content - #[instrument(name = "read_source", level = "debug", skip_all, err)] - pub fn read(file: &Path) -> Result { - trace!(file=%file.display()); - let mut content = fs::read_to_string(file).map_err(|err| SolcIoError::new(err, file))?; - - // Normalize line endings to ensure deterministic metadata. - if content.contains('\r') { - content = content.replace("\r\n", "\n"); - } - - Ok(Self::new(content)) - } - - /// Returns `true` if the source should be compiled with full output selection. - pub fn is_dirty(&self) -> bool { - self.kind.is_dirty() - } - - /// Recursively finds all source files under the given dir path and reads them all - pub fn read_all_from(dir: &Path, extensions: &[&str]) -> Result { - Self::read_all_files(utils::source_files(dir, extensions)) - } - - /// Recursively finds all solidity and yul files under the given dir path and reads them all - pub fn read_sol_yul_from(dir: &Path) -> Result { - Self::read_all_from(dir, utils::SOLC_EXTENSIONS) - } - - /// Reads all source files of the given vec - /// - /// Depending on the len of the vec it will try to read the files in parallel - pub fn read_all_files(files: Vec) -> Result { - Self::read_all(files) - } - - /// Reads all files - pub fn read_all(files: I) -> Result - where - I: IntoIterator, - T: Into, - { - files - .into_iter() - .map(Into::into) - .map(|file| Self::read(&file).map(|source| (file, source))) - .collect() - } - - /// Parallelized version of `Self::read_all` that reads all files using a parallel iterator - /// - /// NOTE: this is only expected to be faster than `Self::read_all` if the given iterator - /// contains at least several paths or the files are rather large. - pub fn par_read_all(files: I) -> Result - where - I: IntoIterator, - ::IntoIter: Send, - T: Into + Send, - { - use rayon::{iter::ParallelBridge, prelude::ParallelIterator}; - files - .into_iter() - .par_bridge() - .map(Into::into) - .map(|file| Self::read(&file).map(|source| (file, source))) - .collect::, _>>() - .map(Sources) - } - - /// Generate a non-cryptographically secure checksum of the file's content - pub fn content_hash(&self) -> String { - let mut hasher = md5::Md5::new(); - hasher.update(self); - let result = hasher.finalize(); - hex::encode(result) - } -} - -#[cfg(feature = "async")] -impl Source { - /// async version of `Self::read` - #[instrument(name = "async_read_source", level = "debug", skip_all, err)] - pub async fn async_read(file: &Path) -> Result { - let mut content = - tokio::fs::read_to_string(file).await.map_err(|err| SolcIoError::new(err, file))?; - - // Normalize line endings to ensure deterministic metadata. - if content.contains('\r') { - content = content.replace("\r\n", "\n"); - } - - Ok(Self::new(content)) - } - - /// Finds all source files under the given dir path and reads them all - pub async fn async_read_all_from( - dir: &Path, - extensions: &[&str], - ) -> Result { - Self::async_read_all(utils::source_files(dir, extensions)).await - } - - /// async version of `Self::read_all` - pub async fn async_read_all(files: I) -> Result - where - I: IntoIterator, - T: Into, - { - futures_util::future::join_all( - files - .into_iter() - .map(Into::into) - .map(|file| async { Self::async_read(&file).await.map(|source| (file, source)) }), - ) - .await - .into_iter() - .collect() - } -} - -impl AsRef for Source { - fn as_ref(&self) -> &str { - &self.content - } -} - -impl AsRef<[u8]> for Source { - fn as_ref(&self) -> &[u8] { - self.content.as_bytes() - } -} - -/// Represents the state of a filtered [`Source`]. -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub enum SourceCompilationKind { - /// We need a complete compilation output for the source. - #[default] - Complete, - /// A source for which we don't need a complete output and want to optimize its compilation by - /// reducing output selection. - Optimized, -} - -impl SourceCompilationKind { - /// Whether this file should be compiled with full output selection - pub fn is_dirty(&self) -> bool { - matches!(self, Self::Complete) - } -} diff --git a/crates/artifacts/vyper/Cargo.toml b/crates/artifacts/vyper/Cargo.toml deleted file mode 100644 index bb019f40..00000000 --- a/crates/artifacts/vyper/Cargo.toml +++ /dev/null @@ -1,33 +0,0 @@ -[package] -name = "foundry-compilers-artifacts-vyper" -description = "Rust bindings for Vyper JSON artifacts" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -foundry-compilers-artifacts-solc.workspace = true -foundry-compilers-core.workspace = true - -serde.workspace = true -alloy-primitives.workspace = true -alloy-json-abi.workspace = true -semver.workspace = true - -[target.'cfg(windows)'.dependencies] -path-slash.workspace = true - -[dev-dependencies] -serde_path_to_error = "0.1" -similar-asserts.workspace = true -foundry-compilers-core = { workspace = true, features = ["test-utils"] } -serde_json.workspace = true diff --git a/crates/artifacts/vyper/src/error.rs b/crates/artifacts/vyper/src/error.rs deleted file mode 100644 index f524c68c..00000000 --- a/crates/artifacts/vyper/src/error.rs +++ /dev/null @@ -1,42 +0,0 @@ -use core::fmt; -use foundry_compilers_artifacts_solc::Severity; -use serde::{Deserialize, Serialize}; -use std::path::PathBuf; - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct VyperSourceLocation { - file: PathBuf, - #[serde(rename = "lineno")] - line: Option, - #[serde(rename = "col_offset")] - offset: Option, -} - -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct VyperCompilationError { - pub message: String, - pub severity: Severity, - pub source_location: Option, - pub formatted_message: Option, -} - -impl fmt::Display for VyperCompilationError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(location) = &self.source_location { - write!(f, "Location: {}", location.file.display())?; - if let Some(line) = location.line { - write!(f, ":{line}")?; - } - if let Some(offset) = location.offset { - write!(f, ":{offset}")?; - } - writeln!(f)?; - } - if let Some(message) = &self.formatted_message { - write!(f, "{message}") - } else { - write!(f, "{}", self.message) - } - } -} diff --git a/crates/artifacts/vyper/src/input.rs b/crates/artifacts/vyper/src/input.rs deleted file mode 100644 index 7b99b79a..00000000 --- a/crates/artifacts/vyper/src/input.rs +++ /dev/null @@ -1,63 +0,0 @@ -use super::VyperSettings; -use foundry_compilers_artifacts_solc::sources::Sources; -use foundry_compilers_core::utils::strip_prefix_owned; -use semver::Version; -use serde::{Deserialize, Serialize}; -use std::path::Path; - -/// Extension of Vyper interface file. -pub const VYPER_INTERFACE_EXTENSION: &str = "vyi"; - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct VyperInput { - pub language: String, - pub sources: Sources, - pub interfaces: Sources, - pub settings: VyperSettings, -} - -impl VyperInput { - pub fn new(sources: Sources, mut settings: VyperSettings) -> Self { - let mut new_sources = Sources::new(); - let mut interfaces = Sources::new(); - - for (path, content) in sources { - if path.extension().map_or(false, |ext| ext == VYPER_INTERFACE_EXTENSION) { - // Interface .vyi files should be removed from the output selection. - settings.output_selection.0.remove(path.to_string_lossy().as_ref()); - interfaces.insert(path, content); - } else { - new_sources.insert(path, content); - } - } - - settings.sanitize_output_selection(); - Self { language: "Vyper".to_string(), sources: new_sources, interfaces, settings } - } - - pub fn strip_prefix(&mut self, base: &Path) { - self.sources = std::mem::take(&mut self.sources) - .into_iter() - .map(|(path, s)| (strip_prefix_owned(path, base), s)) - .collect(); - - self.interfaces = std::mem::take(&mut self.interfaces) - .into_iter() - .map(|(path, s)| (strip_prefix_owned(path, base), s)) - .collect(); - - self.settings.strip_prefix(base) - } - - /// This will remove/adjust values in the [`VyperInput`] that are not compatible with this - /// version - pub fn sanitize(&mut self, version: &Version) { - self.settings.sanitize(version); - } - - /// Consumes the type and returns a [VyperInput::sanitized] version - pub fn sanitized(mut self, version: &Version) -> Self { - self.sanitize(version); - self - } -} diff --git a/crates/artifacts/vyper/src/lib.rs b/crates/artifacts/vyper/src/lib.rs deleted file mode 100644 index 57d1f958..00000000 --- a/crates/artifacts/vyper/src/lib.rs +++ /dev/null @@ -1,16 +0,0 @@ -//! Vyper artifact types. - -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -mod settings; -pub use settings::{VyperOptimizationMode, VyperSettings}; - -mod error; -pub use error::VyperCompilationError; - -mod input; -pub use input::VyperInput; - -mod output; -pub use output::VyperOutput; diff --git a/crates/artifacts/vyper/src/output.rs b/crates/artifacts/vyper/src/output.rs deleted file mode 100644 index d20f79ef..00000000 --- a/crates/artifacts/vyper/src/output.rs +++ /dev/null @@ -1,181 +0,0 @@ -use super::error::VyperCompilationError; -use alloy_json_abi::JsonAbi; -use alloy_primitives::Bytes; -use foundry_compilers_artifacts_solc as solc_artifacts; -use foundry_compilers_artifacts_solc::BytecodeObject; -use serde::Deserialize; -use std::{ - collections::{BTreeMap, HashSet}, - path::{Path, PathBuf}, -}; - -#[derive(Clone, Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct Bytecode { - pub object: Bytes, - /// Opcodes list (string) - #[serde(default, skip_serializing_if = "Option::is_none")] - pub opcodes: Option, - #[serde(default, deserialize_with = "deserialize_vyper_sourcemap")] - pub source_map: Option, -} - -impl From for solc_artifacts::Bytecode { - fn from(bytecode: Bytecode) -> Self { - Self { - object: BytecodeObject::Bytecode(bytecode.object), - opcodes: bytecode.opcodes, - source_map: bytecode.source_map, - function_debug_data: Default::default(), - generated_sources: Default::default(), - link_references: Default::default(), - } - } -} - -#[derive(Clone, Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct VyperEvm { - #[serde(default)] - pub bytecode: Option, - #[serde(default)] - pub deployed_bytecode: Option, - /// The list of function hashes - #[serde(default)] - pub method_identifiers: BTreeMap, -} - -impl From for solc_artifacts::Evm { - fn from(evm: VyperEvm) -> Self { - Self { - bytecode: evm.bytecode.map(Into::into), - deployed_bytecode: evm.deployed_bytecode.map(|b| solc_artifacts::DeployedBytecode { - bytecode: Some(b.into()), - immutable_references: Default::default(), - }), - method_identifiers: evm.method_identifiers, - assembly: None, - legacy_assembly: None, - gas_estimates: None, - } - } -} - -#[derive(Clone, Debug, Deserialize)] -pub struct VyperContract { - /// Contract ABI. - pub abi: Option, - /// EVM-related outputs - #[serde(default, skip_serializing_if = "Option::is_none")] - pub evm: Option, -} - -impl From for solc_artifacts::Contract { - fn from(contract: VyperContract) -> Self { - Self { - abi: contract.abi, - evm: contract.evm.map(Into::into), - metadata: None, - userdoc: Default::default(), - devdoc: Default::default(), - ir: None, - storage_layout: Default::default(), - transient_storage_layout: Default::default(), - ewasm: None, - ir_optimized: None, - ir_optimized_ast: None, - } - } -} - -#[derive(Clone, Debug, Deserialize)] -pub struct VyperSourceFile { - pub id: u32, -} - -impl From for solc_artifacts::SourceFile { - fn from(source: VyperSourceFile) -> Self { - Self { id: source.id, ast: None } - } -} - -/// Vyper compiler output -#[derive(Debug, Deserialize)] -pub struct VyperOutput { - #[serde(default = "Vec::new", skip_serializing_if = "Vec::is_empty")] - pub errors: Vec, - #[serde(default)] - pub contracts: solc_artifacts::FileToContractsMap, - #[serde(default)] - pub sources: BTreeMap, -} - -impl VyperOutput { - /// Retains only those files the given iterator yields - /// - /// In other words, removes all contracts for files not included in the iterator - pub fn retain_files<'a, I>(&mut self, files: I) - where - I: IntoIterator, - { - // Note: use `to_lowercase` here because vyper not necessarily emits the exact file name, - // e.g. `src/utils/upgradeProxy.sol` is emitted as `src/utils/UpgradeProxy.sol` - let files: HashSet<_> = - files.into_iter().map(|s| s.to_string_lossy().to_lowercase()).collect(); - self.contracts.retain(|f, _| files.contains(&f.to_string_lossy().to_lowercase())); - self.sources.retain(|f, _| files.contains(&f.to_string_lossy().to_lowercase())); - } -} - -/// Before Vyper 0.4 source map was represented as a string, after 0.4 it is represented as a map -/// where compressed source map is stored under `pc_pos_map_compressed` key. -fn deserialize_vyper_sourcemap<'de, D>(deserializer: D) -> Result, D::Error> -where - D: serde::Deserializer<'de>, -{ - #[derive(Deserialize)] - #[serde(untagged)] - enum SourceMap { - New { pc_pos_map_compressed: String }, - Old(String), - } - - Ok(SourceMap::deserialize(deserializer).map_or(None, |v| { - Some(match v { - SourceMap::Old(s) => s, - SourceMap::New { pc_pos_map_compressed } => pc_pos_map_compressed, - }) - })) -} - -#[cfg(test)] -mod tests { - use std::path::{Path, PathBuf}; - - fn test_output(artifact_path: &str) { - let output = std::fs::read_to_string( - Path::new(env!("CARGO_MANIFEST_DIR")).join("../../../test-data").join(artifact_path), - ) - .unwrap(); - let output: super::VyperOutput = serde_json::from_str(&output).unwrap(); - - assert_eq!(output.contracts.len(), 2); - assert_eq!(output.sources.len(), 2); - - let artifact = output.contracts.get(&PathBuf::from("src/a.vy")).unwrap().get("a").unwrap(); - assert!(artifact.evm.is_some()); - let evm = artifact.evm.as_ref().unwrap(); - let deployed_bytecode = evm.deployed_bytecode.as_ref().unwrap(); - assert!(deployed_bytecode.source_map.is_some()); - } - - #[test] - fn can_deserialize_03_output() { - test_output("sample-vyper-0.3-output.json"); - } - - #[test] - fn can_deserialize_04_output() { - test_output("sample-vyper-0.4-output.json"); - } -} diff --git a/crates/artifacts/vyper/src/settings.rs b/crates/artifacts/vyper/src/settings.rs deleted file mode 100644 index 373f3048..00000000 --- a/crates/artifacts/vyper/src/settings.rs +++ /dev/null @@ -1,113 +0,0 @@ -use foundry_compilers_artifacts_solc::{ - output_selection::OutputSelection, serde_helpers, EvmVersion, -}; -use semver::Version; -use serde::{Deserialize, Serialize}; -use std::{ - collections::BTreeSet, - path::{Path, PathBuf}, -}; - -pub const VYPER_SEARCH_PATHS: Version = Version::new(0, 4, 0); -pub const VYPER_BERLIN: Version = Version::new(0, 3, 0); -pub const VYPER_PARIS: Version = Version::new(0, 3, 7); -pub const VYPER_SHANGHAI: Version = Version::new(0, 3, 8); -pub const VYPER_CANCUN: Version = Version::new(0, 3, 8); - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "lowercase")] -pub enum VyperOptimizationMode { - Gas, - Codesize, - None, -} - -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct VyperSettings { - #[serde( - default, - with = "serde_helpers::display_from_str_opt", - skip_serializing_if = "Option::is_none" - )] - pub evm_version: Option, - /// Optimization mode - #[serde(skip_serializing_if = "Option::is_none")] - pub optimize: Option, - /// Whether or not the bytecode should include Vyper's signature - #[serde(skip_serializing_if = "Option::is_none")] - pub bytecode_metadata: Option, - pub output_selection: OutputSelection, - #[serde(rename = "search_paths", skip_serializing_if = "Option::is_none")] - pub search_paths: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub experimental_codegen: Option, -} - -impl VyperSettings { - pub fn strip_prefix(&mut self, base: &Path) { - self.output_selection = OutputSelection( - std::mem::take(&mut self.output_selection.0) - .into_iter() - .map(|(file, selection)| { - ( - Path::new(&file) - .strip_prefix(base) - .map(|p| p.display().to_string()) - .unwrap_or(file), - selection, - ) - }) - .collect(), - ); - self.search_paths = self.search_paths.as_ref().map(|paths| { - paths.iter().map(|p| p.strip_prefix(base).unwrap_or(p.as_path()).into()).collect() - }); - } - - /// During caching we prune output selection for some of the sources, however, Vyper will reject - /// [] as an output selection, so we are adding "abi" as a default output selection which is - /// cheap to be produced. - pub fn sanitize_output_selection(&mut self) { - self.output_selection.0.values_mut().for_each(|selection| { - selection.values_mut().for_each(|selection| { - if selection.is_empty() { - selection.push("abi".to_string()) - } - }) - }); - } - - /// Sanitize the settings based on the compiler version. - pub fn sanitize(&mut self, version: &Version) { - if version < &VYPER_SEARCH_PATHS { - self.search_paths = None; - } - - self.sanitize_output_selection(); - self.normalize_evm_version(version); - } - - /// Sanitize the settings based on the compiler version. - pub fn sanitized(mut self, version: &Version) -> Self { - self.sanitize(version); - self - } - - /// Adjusts the EVM version based on the compiler version. - pub fn normalize_evm_version(&mut self, version: &Version) { - if let Some(evm_version) = &mut self.evm_version { - *evm_version = if *evm_version >= EvmVersion::Cancun && *version >= VYPER_CANCUN { - EvmVersion::Cancun - } else if *evm_version >= EvmVersion::Shanghai && *version >= VYPER_SHANGHAI { - EvmVersion::Shanghai - } else if *evm_version >= EvmVersion::Paris && *version >= VYPER_PARIS { - EvmVersion::Paris - } else if *evm_version >= EvmVersion::Berlin && *version >= VYPER_BERLIN { - EvmVersion::Berlin - } else { - *evm_version - }; - } - } -} diff --git a/crates/artifacts/zksolc/Cargo.toml b/crates/artifacts/zksolc/Cargo.toml deleted file mode 100644 index 0f3b1efd..00000000 --- a/crates/artifacts/zksolc/Cargo.toml +++ /dev/null @@ -1,45 +0,0 @@ -[package] -name = "foundry-compilers-artifacts-zksolc" -description = "Rust bindings for ZkSolc JSON artifacts" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -foundry-compilers-core.workspace = true -foundry-compilers-artifacts-solc.workspace = true - -serde.workspace = true -semver.workspace = true -serde_json.workspace = true -tracing.workspace = true -alloy-primitives.workspace = true -alloy-json-abi.workspace = true -rayon.workspace = true -thiserror.workspace = true -md-5.workspace = true -yansi.workspace = true -futures-util = { workspace = true, optional = true } -tokio = { workspace = true, optional = true } - -walkdir = "2.4" - -[target.'cfg(windows)'.dependencies] -path-slash.workspace = true - -[dev-dependencies] -serde_path_to_error = "0.1" -similar-asserts.workspace = true -foundry-compilers-core = { workspace = true, features = ["test-utils"] } - -[features] -async = ["dep:tokio", "futures-util", "tokio/fs"] diff --git a/crates/compilers/Cargo.toml b/crates/compilers/Cargo.toml deleted file mode 100644 index 0694c23e..00000000 --- a/crates/compilers/Cargo.toml +++ /dev/null @@ -1,116 +0,0 @@ -[package] -name = "foundry-compilers" -description = "Compiler abstraction and Foundry project implementation" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -foundry-compilers-artifacts.workspace = true -foundry-compilers-core.workspace = true -serde.workspace = true -semver.workspace = true -alloy-primitives.workspace = true -serde_json.workspace = true -tracing.workspace = true -alloy-json-abi.workspace = true -rayon.workspace = true -md-5.workspace = true -thiserror.workspace = true -path-slash.workspace = true -yansi.workspace = true -solang-parser.workspace = true -once_cell = { workspace = true, optional = true } -futures-util = { workspace = true, optional = true } -tokio = { workspace = true, optional = true } - -auto_impl = "1" -winnow = "0.6" -dyn-clone = "1" -derivative = "2.2" -home = "0.5" -dirs = "5.0" -itertools = "0.13" - -# project-util -tempfile = { version = "3.9", optional = true } -fs_extra = { version = "1.3", optional = true } -rand = { version = "0.8", optional = true } - -# svm -svm = { workspace = true, optional = true } -svm-builds = { package = "svm-rs-builds", version = "0.5", default-features = false, optional = true } -sha2 = { version = "0.10", default-features = false, optional = true } - -# zksync -reqwest = { version = "0.12", default-features = false, optional = true } -walkdir = "2.4" -fs4 = "0.8" - -[dev-dependencies] -tracing-subscriber = { version = "0.3", default-features = false, features = [ - "env-filter", - "fmt", -] } -similar-asserts.workspace = true -fd-lock = "4.0.0" -tokio = { version = "1.35", features = ["rt-multi-thread", "macros"] } -reqwest = "0.12" -tempfile = "3.9" -snapbox.workspace = true -foundry-compilers-core = { workspace = true, features = ["test-utils"] } - -[features] -default = ["rustls", "async", "svm-solc", "project-util"] -test-utils = [] - -full = ["async", "svm-solc"] - -# Adds extra `async` methods using `tokio` to some types. -async = [ - "dep:futures-util", - "dep:tokio", - "tokio/fs", - "tokio/process", - "tokio/io-util", - "foundry-compilers-artifacts/async", - "reqwest", -] -# Enables `svm` to auto-detect and manage `solc` builds. -svm-solc = [ - "dep:svm", - "dep:svm-builds", - "dep:sha2", - "foundry-compilers-core/svm-solc", - "dep:once_cell", -] -# Utilities for creating and testing project workspaces. -project-util = [ - "dep:tempfile", - "dep:fs_extra", - "dep:rand", - "svm-solc", - "foundry-compilers-core/project-util", -] - -rustls = ["svm?/rustls"] -openssl = ["svm?/openssl"] - -[[test]] -name = "project" -path = "tests/project.rs" -required-features = ["full", "project-util", "test-utils"] - -[[test]] -name = "mocked" -path = "tests/mocked.rs" -required-features = ["full", "project-util"] diff --git a/crates/compilers/README.md b/crates/compilers/README.md deleted file mode 120000 index fe840054..00000000 --- a/crates/compilers/README.md +++ /dev/null @@ -1 +0,0 @@ -../../README.md \ No newline at end of file diff --git a/crates/compilers/src/artifact_output/configurable.rs b/crates/compilers/src/artifact_output/configurable.rs deleted file mode 100644 index 699a5fbc..00000000 --- a/crates/compilers/src/artifact_output/configurable.rs +++ /dev/null @@ -1,841 +0,0 @@ -//! A configurable artifacts handler implementation -//! -//! Configuring artifacts requires two pieces: the `ConfigurableArtifacts` handler, which contains -//! the configuration of how to construct the `ConfigurableArtifact` type based on a `Contract`. The -//! `ConfigurableArtifacts` populates a single `Artifact`, the `ConfigurableArtifact`, by default -//! with essential entries only, such as `abi`, `bytecode`,..., but may include additional values -//! based on its `ExtraOutputValues` that maps to various objects in the solc contract output, see -//! also: [`OutputSelection`](foundry_compilers_artifacts::output_selection::OutputSelection). In -//! addition to that some output values can also be emitted as standalone files. - -use crate::{ - sources::VersionedSourceFile, Artifact, ArtifactFile, ArtifactOutput, SolcConfig, SolcError, - SourceFile, -}; -use alloy_json_abi::JsonAbi; -use alloy_primitives::hex; -use foundry_compilers_artifacts::{ - bytecode::{CompactBytecode, CompactDeployedBytecode}, - contract::Contract, - output_selection::{ - BytecodeOutputSelection, ContractOutputSelection, DeployedBytecodeOutputSelection, - EvmOutputSelection, EwasmOutputSelection, - }, - BytecodeObject, ConfigurableContractArtifact, Evm, Ewasm, GeneratedSource, LosslessMetadata, - Metadata, Settings, -}; -use foundry_compilers_core::utils; -use std::{fs, path::Path}; - -/// An `Artifact` implementation that can be configured to include additional content and emit -/// additional files -/// -/// Creates a single json artifact with -/// ```json -/// { -/// "abi": [], -/// "bytecode": {...}, -/// "deployedBytecode": {...}, -/// "methodIdentifiers": {...}, -/// // additional values -/// } -/// ``` -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub struct ConfigurableArtifacts { - /// A set of additional values to include in the contract's artifact file - pub additional_values: ExtraOutputValues, - - /// A set of values that should be written to a separate file - pub additional_files: ExtraOutputFiles, - - /// PRIVATE: This structure may grow, As such, constructing this structure should - /// _always_ be done using a public constructor or update syntax: - /// - /// ``` - /// use foundry_compilers::{ConfigurableArtifacts, ExtraOutputFiles}; - /// - /// let config = ConfigurableArtifacts { - /// additional_files: ExtraOutputFiles { metadata: true, ..Default::default() }, - /// ..Default::default() - /// }; - /// ``` - #[doc(hidden)] - pub __non_exhaustive: (), -} - -impl ConfigurableArtifacts { - pub fn new( - extra_values: impl IntoIterator, - extra_files: impl IntoIterator, - ) -> Self { - Self { - additional_values: ExtraOutputValues::from_output_selection(extra_values), - additional_files: ExtraOutputFiles::from_output_selection(extra_files), - ..Default::default() - } - } - - /// Returns the `Settings` this configuration corresponds to - pub fn solc_settings(&self) -> Settings { - SolcConfig::builder() - .additional_outputs(self.output_selection()) - .ast(self.additional_values.ast) - .build() - } - - /// Returns the output selection corresponding to this configuration - pub fn output_selection(&self) -> Vec { - let mut selection = ContractOutputSelection::basic(); - - let ExtraOutputValues { - // handled above - ast: _, - userdoc, - devdoc, - method_identifiers, - storage_layout, - transient_storage_layout, - assembly, - legacy_assembly, - gas_estimates, - metadata, - ir, - ir_optimized, - ir_optimized_ast, - ewasm, - function_debug_data, - generated_sources, - source_map, - opcodes, - __non_exhaustive, - } = self.additional_values; - - if ir || self.additional_files.ir { - selection.push(ContractOutputSelection::Ir); - } - if ir_optimized || self.additional_files.ir_optimized { - selection.push(ContractOutputSelection::IrOptimized); - } - if metadata || self.additional_files.metadata { - selection.push(ContractOutputSelection::Metadata); - } - if storage_layout { - selection.push(ContractOutputSelection::StorageLayout); - } - if devdoc { - selection.push(ContractOutputSelection::DevDoc); - } - if userdoc { - selection.push(ContractOutputSelection::UserDoc); - } - if gas_estimates { - selection.push(EvmOutputSelection::GasEstimates.into()); - } - if assembly || self.additional_files.assembly { - selection.push(EvmOutputSelection::Assembly.into()); - } - if legacy_assembly || self.additional_files.legacy_assembly { - selection.push(EvmOutputSelection::LegacyAssembly.into()); - } - if ewasm || self.additional_files.ewasm { - selection.push(EwasmOutputSelection::All.into()); - } - if function_debug_data { - selection.push(BytecodeOutputSelection::FunctionDebugData.into()); - } - if method_identifiers { - selection.push(EvmOutputSelection::MethodIdentifiers.into()); - } - if generated_sources { - selection.push( - EvmOutputSelection::ByteCode(BytecodeOutputSelection::GeneratedSources).into(), - ); - } - if source_map { - selection.push(EvmOutputSelection::ByteCode(BytecodeOutputSelection::SourceMap).into()); - } - if ir_optimized_ast { - selection.push(ContractOutputSelection::IrOptimizedAst); - } - if opcodes { - selection.push(EvmOutputSelection::ByteCode(BytecodeOutputSelection::Opcodes).into()); - } - if transient_storage_layout { - selection.push(ContractOutputSelection::TransientStorageLayout); - } - selection - } -} - -impl ArtifactOutput for ConfigurableArtifacts { - type Artifact = ConfigurableContractArtifact; - - /// Writes extra files for compiled artifact based on [Self::additional_files] - fn handle_artifacts( - &self, - contracts: &crate::VersionedContracts, - artifacts: &crate::Artifacts, - ) -> Result<(), SolcError> { - for (file, contracts) in contracts.as_ref().iter() { - for (name, versioned_contracts) in contracts { - for contract in versioned_contracts { - if let Some(artifact) = artifacts.find_artifact(file, name, &contract.version) { - let file = &artifact.file; - utils::create_parent_dir_all(file)?; - self.additional_files.write_extras(&contract.contract, file)?; - } - } - } - } - Ok(()) - } - - fn contract_to_artifact( - &self, - _file: &Path, - _name: &str, - contract: Contract, - source_file: Option<&SourceFile>, - ) -> Self::Artifact { - let mut artifact_userdoc = None; - let mut artifact_devdoc = None; - let mut artifact_raw_metadata = None; - let mut artifact_metadata = None; - let mut artifact_ir = None; - let mut artifact_ir_optimized = None; - let mut artifact_ir_optimized_ast = None; - let mut artifact_ewasm = None; - let mut artifact_bytecode = None; - let mut artifact_deployed_bytecode = None; - let mut artifact_gas_estimates = None; - let mut artifact_function_debug_data = None; - let mut artifact_method_identifiers = None; - let mut artifact_assembly = None; - let mut artifact_legacy_assembly = None; - let mut artifact_storage_layout = None; - let mut artifact_transient_storage_layout = None; - let mut generated_sources = None; - let mut opcodes = None; - - let Contract { - abi, - metadata, - userdoc, - devdoc, - ir, - storage_layout, - transient_storage_layout, - evm, - ewasm, - ir_optimized, - ir_optimized_ast, - } = contract; - - if self.additional_values.metadata { - if let Some(LosslessMetadata { raw_metadata, metadata }) = metadata { - artifact_raw_metadata = Some(raw_metadata); - artifact_metadata = Some(metadata); - } - } - if self.additional_values.userdoc { - artifact_userdoc = Some(userdoc); - } - if self.additional_values.devdoc { - artifact_devdoc = Some(devdoc); - } - if self.additional_values.ewasm { - artifact_ewasm = ewasm; - } - if self.additional_values.ir { - artifact_ir = ir; - } - if self.additional_values.ir_optimized { - artifact_ir_optimized = ir_optimized; - } - if self.additional_values.ir_optimized_ast { - artifact_ir_optimized_ast = ir_optimized_ast; - } - if self.additional_values.storage_layout { - artifact_storage_layout = Some(storage_layout); - } - if self.additional_values.transient_storage_layout { - artifact_transient_storage_layout = Some(transient_storage_layout); - } - - if let Some(evm) = evm { - let Evm { - assembly, - mut bytecode, - deployed_bytecode, - method_identifiers, - gas_estimates, - legacy_assembly, - } = evm; - - if self.additional_values.function_debug_data { - artifact_function_debug_data = - bytecode.as_mut().map(|code| std::mem::take(&mut code.function_debug_data)); - } - if self.additional_values.generated_sources { - generated_sources = - bytecode.as_mut().map(|code| std::mem::take(&mut code.generated_sources)); - } - - if self.additional_values.opcodes { - opcodes = bytecode.as_mut().and_then(|code| code.opcodes.take()) - } - - artifact_bytecode = bytecode.map(Into::into); - artifact_deployed_bytecode = deployed_bytecode.map(Into::into); - artifact_method_identifiers = Some(method_identifiers); - - if self.additional_values.gas_estimates { - artifact_gas_estimates = gas_estimates; - } - if self.additional_values.assembly { - artifact_assembly = assembly; - } - - if self.additional_values.legacy_assembly { - artifact_legacy_assembly = legacy_assembly; - } - } - - ConfigurableContractArtifact { - abi, - bytecode: artifact_bytecode, - deployed_bytecode: artifact_deployed_bytecode, - assembly: artifact_assembly, - legacy_assembly: artifact_legacy_assembly, - opcodes, - function_debug_data: artifact_function_debug_data, - method_identifiers: artifact_method_identifiers, - gas_estimates: artifact_gas_estimates, - raw_metadata: artifact_raw_metadata, - metadata: artifact_metadata, - storage_layout: artifact_storage_layout, - transient_storage_layout: artifact_transient_storage_layout, - userdoc: artifact_userdoc, - devdoc: artifact_devdoc, - ir: artifact_ir, - ir_optimized: artifact_ir_optimized, - ir_optimized_ast: artifact_ir_optimized_ast, - ewasm: artifact_ewasm, - id: source_file.as_ref().map(|s| s.id), - ast: source_file.and_then(|s| s.ast.clone()), - generated_sources: generated_sources.unwrap_or_default(), - } - } - - fn standalone_source_file_to_artifact( - &self, - _path: &Path, - file: &VersionedSourceFile, - ) -> Option { - file.source_file.ast.clone().map(|ast| ConfigurableContractArtifact { - abi: Some(JsonAbi::default()), - id: Some(file.source_file.id), - ast: Some(ast), - bytecode: Some(CompactBytecode::empty()), - deployed_bytecode: Some(CompactDeployedBytecode::empty()), - ..Default::default() - }) - } - - /// We want to enforce recompilation if artifact is missing data we need for writing extra - /// files. - fn is_dirty(&self, artifact_file: &ArtifactFile) -> Result { - let artifact = &artifact_file.artifact; - let ExtraOutputFiles { - abi: _, - metadata, - ir, - ir_optimized, - ewasm, - assembly, - legacy_assembly, - source_map, - generated_sources, - bytecode: _, - deployed_bytecode: _, - __non_exhaustive: _, - } = self.additional_files; - - if metadata && artifact.metadata.is_none() { - return Ok(true); - } - if ir && artifact.ir.is_none() { - return Ok(true); - } - if ir_optimized && artifact.ir_optimized.is_none() { - return Ok(true); - } - if ewasm && artifact.ewasm.is_none() { - return Ok(true); - } - if assembly && artifact.assembly.is_none() { - return Ok(true); - } - if assembly && artifact.assembly.is_none() { - return Ok(true); - } - if legacy_assembly && artifact.legacy_assembly.is_none() { - return Ok(true); - } - if source_map && artifact.get_source_map_str().is_none() { - return Ok(true); - } - if generated_sources { - // We can't check if generated sources are missing or just empty. - return Ok(true); - } - Ok(false) - } - - /// Writes extra files for cached artifacts based on [Self::additional_files]. - fn handle_cached_artifacts( - &self, - artifacts: &crate::Artifacts, - ) -> Result<(), SolcError> { - for artifacts in artifacts.values() { - for artifacts in artifacts.values() { - for artifact_file in artifacts { - let file = &artifact_file.file; - let artifact = &artifact_file.artifact; - self.additional_files.process_abi(artifact.abi.as_ref(), file)?; - self.additional_files.process_assembly(artifact.assembly.as_deref(), file)?; - self.additional_files - .process_legacy_assembly(artifact.legacy_assembly.clone(), file)?; - self.additional_files - .process_bytecode(artifact.bytecode.as_ref().map(|b| &b.object), file)?; - self.additional_files.process_deployed_bytecode( - artifact - .deployed_bytecode - .as_ref() - .and_then(|d| d.bytecode.as_ref()) - .map(|b| &b.object), - file, - )?; - self.additional_files - .process_generated_sources(Some(&artifact.generated_sources), file)?; - self.additional_files.process_ir(artifact.ir.as_deref(), file)?; - self.additional_files - .process_ir_optimized(artifact.ir_optimized.as_deref(), file)?; - self.additional_files.process_ewasm(artifact.ewasm.as_ref(), file)?; - self.additional_files.process_metadata(artifact.metadata.as_ref(), file)?; - self.additional_files - .process_source_map(artifact.get_source_map_str().as_deref(), file)?; - } - } - } - - Ok(()) - } -} - -/// Determines the additional values to include in the contract's artifact file -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub struct ExtraOutputValues { - pub ast: bool, - pub userdoc: bool, - pub devdoc: bool, - pub method_identifiers: bool, - pub storage_layout: bool, - pub transient_storage_layout: bool, - pub assembly: bool, - pub legacy_assembly: bool, - pub gas_estimates: bool, - pub metadata: bool, - pub ir: bool, - pub ir_optimized: bool, - pub ir_optimized_ast: bool, - pub ewasm: bool, - pub function_debug_data: bool, - pub generated_sources: bool, - pub source_map: bool, - pub opcodes: bool, - - /// PRIVATE: This structure may grow, As such, constructing this structure should - /// _always_ be done using a public constructor or update syntax: - /// - /// ``` - /// use foundry_compilers::ExtraOutputValues; - /// - /// let config = ExtraOutputValues { ir: true, ..Default::default() }; - /// ``` - #[doc(hidden)] - pub __non_exhaustive: (), -} - -impl ExtraOutputValues { - /// Returns an instance where all values are set to `true` - pub fn all() -> Self { - Self { - ast: true, - userdoc: true, - devdoc: true, - method_identifiers: true, - storage_layout: true, - transient_storage_layout: true, - assembly: true, - legacy_assembly: true, - gas_estimates: true, - metadata: true, - ir: true, - ir_optimized: true, - ir_optimized_ast: true, - ewasm: true, - function_debug_data: true, - generated_sources: true, - source_map: true, - opcodes: true, - __non_exhaustive: (), - } - } - - /// Sets the values based on a set of `ContractOutputSelection` - pub fn from_output_selection( - settings: impl IntoIterator, - ) -> Self { - let mut config = Self::default(); - for value in settings.into_iter() { - match value { - ContractOutputSelection::DevDoc => { - config.devdoc = true; - } - ContractOutputSelection::UserDoc => { - config.userdoc = true; - } - ContractOutputSelection::Metadata => { - config.metadata = true; - } - ContractOutputSelection::Ir => { - config.ir = true; - } - ContractOutputSelection::IrOptimized => { - config.ir_optimized = true; - } - ContractOutputSelection::StorageLayout => { - config.storage_layout = true; - } - ContractOutputSelection::Evm(evm) => match evm { - EvmOutputSelection::All => { - config.assembly = true; - config.legacy_assembly = true; - config.gas_estimates = true; - config.method_identifiers = true; - config.generated_sources = true; - config.source_map = true; - config.opcodes = true; - } - EvmOutputSelection::Assembly => { - config.assembly = true; - } - EvmOutputSelection::LegacyAssembly => { - config.legacy_assembly = true; - } - EvmOutputSelection::MethodIdentifiers => { - config.method_identifiers = true; - } - EvmOutputSelection::GasEstimates => { - config.gas_estimates = true; - } - EvmOutputSelection::ByteCode(BytecodeOutputSelection::FunctionDebugData) => { - config.function_debug_data = true; - } - EvmOutputSelection::ByteCode(BytecodeOutputSelection::Opcodes) => { - config.opcodes = true; - } - EvmOutputSelection::ByteCode(BytecodeOutputSelection::GeneratedSources) => { - config.generated_sources = true; - } - EvmOutputSelection::ByteCode(BytecodeOutputSelection::SourceMap) => { - config.source_map = true; - } - _ => {} - }, - ContractOutputSelection::Ewasm(_) => { - config.ewasm = true; - } - ContractOutputSelection::IrOptimizedAst => { - config.ir_optimized_ast = true; - } - ContractOutputSelection::TransientStorageLayout => { - config.transient_storage_layout = true; - } - ContractOutputSelection::Abi => {} - } - } - - config - } -} - -/// Determines what to emit as an additional file -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub struct ExtraOutputFiles { - pub abi: bool, - pub metadata: bool, - pub ir: bool, - pub ir_optimized: bool, - pub ewasm: bool, - pub assembly: bool, - pub legacy_assembly: bool, - pub source_map: bool, - pub generated_sources: bool, - pub bytecode: bool, - pub deployed_bytecode: bool, - - /// PRIVATE: This structure may grow, As such, constructing this structure should - /// _always_ be done using a public constructor or update syntax: - /// - /// ``` - /// use foundry_compilers::ExtraOutputFiles; - /// - /// let config = ExtraOutputFiles { metadata: true, ..Default::default() }; - /// ``` - #[doc(hidden)] - pub __non_exhaustive: (), -} - -impl ExtraOutputFiles { - /// Returns an instance where all values are set to `true` - pub fn all() -> Self { - Self { - abi: true, - metadata: true, - ir: true, - ir_optimized: true, - ewasm: true, - assembly: true, - legacy_assembly: true, - source_map: true, - generated_sources: true, - bytecode: true, - deployed_bytecode: true, - __non_exhaustive: (), - } - } - - /// Sets the values based on a set of `ContractOutputSelection` - pub fn from_output_selection( - settings: impl IntoIterator, - ) -> Self { - let mut config = Self::default(); - for value in settings.into_iter() { - match value { - ContractOutputSelection::Abi => { - config.abi = true; - } - ContractOutputSelection::Metadata => { - config.metadata = true; - } - ContractOutputSelection::Ir => { - config.ir = true; - } - ContractOutputSelection::IrOptimized => { - config.ir_optimized = true; - } - ContractOutputSelection::Evm(evm) => match evm { - EvmOutputSelection::All => { - config.assembly = true; - config.legacy_assembly = true; - config.generated_sources = true; - config.source_map = true; - config.bytecode = true; - config.deployed_bytecode = true; - } - EvmOutputSelection::Assembly => { - config.assembly = true; - } - EvmOutputSelection::LegacyAssembly => { - config.legacy_assembly = true; - } - EvmOutputSelection::ByteCode(BytecodeOutputSelection::GeneratedSources) => { - config.generated_sources = true; - } - EvmOutputSelection::ByteCode(BytecodeOutputSelection::Object) => { - config.bytecode = true; - } - EvmOutputSelection::ByteCode(BytecodeOutputSelection::SourceMap) => { - config.source_map = true; - } - EvmOutputSelection::DeployedByteCode(DeployedBytecodeOutputSelection::All) - | EvmOutputSelection::DeployedByteCode( - DeployedBytecodeOutputSelection::Object, - ) => { - config.deployed_bytecode = true; - } - _ => {} - }, - ContractOutputSelection::Ewasm(_) => { - config.ewasm = true; - } - _ => {} - } - } - config - } - - fn process_abi(&self, abi: Option<&JsonAbi>, file: &Path) -> Result<(), SolcError> { - if self.abi { - if let Some(abi) = abi { - let file = file.with_extension("abi.json"); - fs::write(&file, serde_json::to_string_pretty(abi)?) - .map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn process_metadata(&self, metadata: Option<&Metadata>, file: &Path) -> Result<(), SolcError> { - if self.metadata { - if let Some(metadata) = metadata { - let file = file.with_extension("metadata.json"); - fs::write(&file, serde_json::to_string_pretty(metadata)?) - .map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn process_ir(&self, ir: Option<&str>, file: &Path) -> Result<(), SolcError> { - if self.ir { - if let Some(ir) = ir { - let file = file.with_extension("ir"); - fs::write(&file, ir).map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn process_ir_optimized( - &self, - ir_optimized: Option<&str>, - file: &Path, - ) -> Result<(), SolcError> { - if self.ir_optimized { - if let Some(ir_optimized) = ir_optimized { - let file = file.with_extension("iropt"); - fs::write(&file, ir_optimized).map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn process_ewasm(&self, ewasm: Option<&Ewasm>, file: &Path) -> Result<(), SolcError> { - if self.ewasm { - if let Some(ewasm) = ewasm { - let file = file.with_extension("ewasm"); - fs::write(&file, serde_json::to_vec_pretty(ewasm)?) - .map_err(|err| SolcError::io(err, file))?; - } - } - Ok(()) - } - - fn process_assembly(&self, asm: Option<&str>, file: &Path) -> Result<(), SolcError> { - if self.assembly { - if let Some(asm) = asm { - let file = file.with_extension("asm"); - fs::write(&file, asm).map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn process_legacy_assembly( - &self, - asm: Option, - file: &Path, - ) -> Result<(), SolcError> { - if self.legacy_assembly { - if let Some(legacy_asm) = asm { - let file = file.with_extension("legacyAssembly.json"); - fs::write(&file, format!("{legacy_asm}")).map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn process_generated_sources( - &self, - generated_sources: Option<&Vec>, - file: &Path, - ) -> Result<(), SolcError> { - if self.generated_sources { - if let Some(generated_sources) = generated_sources { - let file = file.with_extension("gensources"); - fs::write(&file, serde_json::to_vec_pretty(generated_sources)?) - .map_err(|err| SolcError::io(err, file))?; - } - } - Ok(()) - } - - fn process_source_map(&self, source_map: Option<&str>, file: &Path) -> Result<(), SolcError> { - if self.source_map { - if let Some(source_map) = source_map { - let file = file.with_extension("sourcemap"); - fs::write(&file, source_map).map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn process_bytecode( - &self, - bytecode: Option<&BytecodeObject>, - file: &Path, - ) -> Result<(), SolcError> { - if self.bytecode { - if let Some(bytecode) = bytecode { - let code = hex::encode(bytecode.as_ref()); - let file = file.with_extension("bin"); - fs::write(&file, code).map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - fn process_deployed_bytecode( - &self, - deployed: Option<&BytecodeObject>, - file: &Path, - ) -> Result<(), SolcError> { - if self.deployed_bytecode { - if let Some(deployed) = deployed { - let code = hex::encode(deployed.as_ref()); - let file = file.with_extension("deployed-bin"); - fs::write(&file, code).map_err(|err| SolcError::io(err, file))? - } - } - Ok(()) - } - - /// Write the set values as separate files - pub fn write_extras(&self, contract: &Contract, file: &Path) -> Result<(), SolcError> { - self.process_abi(contract.abi.as_ref(), file)?; - self.process_metadata(contract.metadata.as_ref().map(|m| &m.metadata), file)?; - self.process_ir(contract.ir.as_deref(), file)?; - self.process_ir_optimized(contract.ir_optimized.as_deref(), file)?; - self.process_ewasm(contract.ewasm.as_ref(), file)?; - - let evm = contract.evm.as_ref(); - self.process_assembly(evm.and_then(|evm| evm.assembly.as_deref()), file)?; - self.process_legacy_assembly(evm.and_then(|evm| evm.legacy_assembly.clone()), file)?; - - let bytecode = evm.and_then(|evm| evm.bytecode.as_ref()); - self.process_generated_sources(bytecode.map(|b| &b.generated_sources), file)?; - - let deployed_bytecode = evm.and_then(|evm| evm.deployed_bytecode.as_ref()); - self.process_source_map(bytecode.and_then(|b| b.source_map.as_deref()), file)?; - self.process_bytecode(bytecode.map(|b| &b.object), file)?; - self.process_deployed_bytecode( - deployed_bytecode.and_then(|d| d.bytecode.as_ref()).map(|b| &b.object), - file, - )?; - - Ok(()) - } -} diff --git a/crates/compilers/src/artifact_output/hh.rs b/crates/compilers/src/artifact_output/hh.rs deleted file mode 100644 index fe47a9ed..00000000 --- a/crates/compilers/src/artifact_output/hh.rs +++ /dev/null @@ -1,79 +0,0 @@ -use crate::{output::sources::VersionedSourceFile, ArtifactOutput}; -use foundry_compilers_artifacts::{ - hh::{HardhatArtifact, HH_ARTIFACT_VERSION}, - Contract, SourceFile, -}; -use std::path::Path; - -/// Hardhat style artifacts handler -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub struct HardhatArtifacts { - _priv: (), -} - -impl ArtifactOutput for HardhatArtifacts { - type Artifact = HardhatArtifact; - - fn contract_to_artifact( - &self, - file: &Path, - name: &str, - contract: Contract, - _source_file: Option<&SourceFile>, - ) -> Self::Artifact { - let (bytecode, link_references, deployed_bytecode, deployed_link_references) = - if let Some(evm) = contract.evm { - let (deployed_bytecode, deployed_link_references) = - if let Some(code) = evm.deployed_bytecode.and_then(|code| code.bytecode) { - (Some(code.object), code.link_references) - } else { - (None, Default::default()) - }; - - let (bytecode, link_ref) = if let Some(bc) = evm.bytecode { - (Some(bc.object), bc.link_references) - } else { - (None, Default::default()) - }; - - (bytecode, link_ref, deployed_bytecode, deployed_link_references) - } else { - (Default::default(), Default::default(), None, Default::default()) - }; - - HardhatArtifact { - format: HH_ARTIFACT_VERSION.to_string(), - contract_name: name.to_string(), - source_name: file.to_string_lossy().to_string(), - abi: contract.abi.unwrap_or_default(), - bytecode, - deployed_bytecode, - link_references, - deployed_link_references, - } - } - - fn standalone_source_file_to_artifact( - &self, - _path: &Path, - _file: &VersionedSourceFile, - ) -> Option { - None - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::Artifact; - - #[test] - fn can_parse_hh_artifact() { - let s = include_str!("../../../../test-data/hh-greeter-artifact.json"); - let artifact = serde_json::from_str::(s).unwrap(); - let compact = artifact.into_compact_contract(); - assert!(compact.abi.is_some()); - assert!(compact.bin.is_some()); - assert!(compact.bin_runtime.is_some()); - } -} diff --git a/crates/compilers/src/artifact_output/mod.rs b/crates/compilers/src/artifact_output/mod.rs deleted file mode 100644 index 681cd45a..00000000 --- a/crates/compilers/src/artifact_output/mod.rs +++ /dev/null @@ -1,1216 +0,0 @@ -//! Output artifact handling - -use alloy_json_abi::JsonAbi; -use alloy_primitives::Bytes; -use foundry_compilers_artifacts::{ - hh::HardhatArtifact, - sourcemap::{SourceMap, SyntaxError}, - BytecodeObject, CompactBytecode, CompactContract, CompactContractBytecode, - CompactContractBytecodeCow, CompactDeployedBytecode, Contract, FileToContractsMap, SourceFile, -}; -use foundry_compilers_core::{ - error::{Result, SolcError, SolcIoError}, - utils::{self, strip_prefix_owned}, -}; -use path_slash::PathBufExt; -use semver::Version; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; -use std::{ - borrow::Cow, - collections::{btree_map::BTreeMap, HashSet}, - ffi::OsString, - fmt, fs, - hash::Hash, - ops::Deref, - path::{Path, PathBuf}, -}; - -mod configurable; -pub use configurable::*; - -mod hh; -pub use hh::*; - -use crate::{ - cache::{CachedArtifact, CompilerCache}, - output::{ - contracts::VersionedContracts, - sources::{VersionedSourceFile, VersionedSourceFiles}, - }, - ProjectPathsConfig, -}; - -/// Represents unique artifact metadata for identifying artifacts on output -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -pub struct ArtifactId { - /// `artifact` cache path - pub path: PathBuf, - pub name: String, - /// Original source file path - pub source: PathBuf, - /// `solc` version that produced this artifact - pub version: Version, - /// `solc` build id - pub build_id: String, -} - -impl ArtifactId { - /// Converts any `\\` separators in the `path` to `/` - pub fn slash_paths(&mut self) { - #[cfg(windows)] - { - self.path = self.path.to_slash_lossy().as_ref().into(); - self.source = self.source.to_slash_lossy().as_ref().into(); - } - } - - /// Convenience function fo [`Self::slash_paths()`] - pub fn with_slashed_paths(mut self) -> Self { - self.slash_paths(); - self - } - - /// Removes `base` from the source's path. - pub fn strip_file_prefixes(&mut self, base: &Path) { - if let Ok(stripped) = self.source.strip_prefix(base) { - self.source = stripped.to_path_buf(); - } - } - - /// Convenience function for [`Self::strip_file_prefixes()`] - pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self { - self.strip_file_prefixes(base); - self - } - - /// Returns a `:` slug that identifies an artifact - /// - /// Note: This identifier is not necessarily unique. If two contracts have the same name, they - /// will share the same slug. For a unique identifier see [ArtifactId::identifier]. - pub fn slug(&self) -> String { - format!("{}.json:{}", self.path.file_stem().unwrap().to_string_lossy(), self.name) - } - - /// Returns a `:` slug that uniquely identifies an artifact - pub fn identifier(&self) -> String { - format!("{}:{}", self.source.display(), self.name) - } - - /// Returns a `:` slug that identifies an artifact - pub fn slug_versioned(&self) -> String { - format!( - "{}.{}.{}.{}.json:{}", - self.path.file_stem().unwrap().to_string_lossy(), - self.version.major, - self.version.minor, - self.version.patch, - self.name - ) - } -} - -/// Represents an artifact file representing a [`crate::Contract`] -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct ArtifactFile { - /// The Artifact that was written - pub artifact: T, - /// path to the file where the `artifact` was written to - pub file: PathBuf, - /// `solc` version that produced this artifact - pub version: Version, - pub build_id: String, -} - -impl ArtifactFile { - /// Writes the given contract to the `out` path creating all parent directories - pub fn write(&self) -> Result<()> { - trace!("writing artifact file {:?} {}", self.file, self.version); - utils::create_parent_dir_all(&self.file)?; - utils::write_json_file(&self.artifact, &self.file, 64 * 1024) - } -} - -impl ArtifactFile { - /// Sets the file to `root` adjoined to `self.file`. - pub fn join(&mut self, root: &Path) { - self.file = root.join(&self.file); - } - - /// Removes `base` from the artifact's path - pub fn strip_prefix(&mut self, base: &Path) { - if let Ok(stripped) = self.file.strip_prefix(base) { - self.file = stripped.to_path_buf(); - } - } -} - -/// local helper type alias `file name -> (contract name -> Vec<..>)` -pub(crate) type ArtifactsMap = FileToContractsMap>>; - -/// Represents a set of Artifacts -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct Artifacts(pub ArtifactsMap); - -impl From> for Artifacts { - fn from(m: ArtifactsMap) -> Self { - Self(m) - } -} - -impl<'a, T> IntoIterator for &'a Artifacts { - type Item = (&'a PathBuf, &'a BTreeMap>>); - type IntoIter = - std::collections::btree_map::Iter<'a, PathBuf, BTreeMap>>>; - - fn into_iter(self) -> Self::IntoIter { - self.0.iter() - } -} - -impl IntoIterator for Artifacts { - type Item = (PathBuf, BTreeMap>>); - type IntoIter = - std::collections::btree_map::IntoIter>>>; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -impl Default for Artifacts { - fn default() -> Self { - Self(Default::default()) - } -} - -impl AsRef> for Artifacts { - fn as_ref(&self) -> &ArtifactsMap { - &self.0 - } -} - -impl AsMut> for Artifacts { - fn as_mut(&mut self) -> &mut ArtifactsMap { - &mut self.0 - } -} - -impl Deref for Artifacts { - type Target = ArtifactsMap; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl Artifacts { - /// Writes all artifacts into the given `artifacts_root` folder - pub fn write_all(&self) -> Result<()> { - for artifact in self.artifact_files() { - artifact.write()?; - } - Ok(()) - } -} - -impl Artifacts { - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - #[cfg(windows)] - { - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(path, files)| (PathBuf::from(path.to_slash_lossy().as_ref()), files)) - .collect() - } - } - - pub fn into_inner(self) -> ArtifactsMap { - self.0 - } - - /// Sets the artifact files location to `root` adjoined to `self.file`. - pub fn join_all(&mut self, root: &Path) -> &mut Self { - self.artifact_files_mut().for_each(|artifact| artifact.join(root)); - self - } - - /// Removes `base` from all artifacts - pub fn strip_prefix_all(&mut self, base: &Path) -> &mut Self { - self.artifact_files_mut().for_each(|artifact| artifact.strip_prefix(base)); - self - } - - /// Returns all `ArtifactFile`s for the contract with the matching name - fn get_contract_artifact_files(&self, contract_name: &str) -> Option<&Vec>> { - self.0.values().find_map(|all| all.get(contract_name)) - } - - /// Returns the `Artifact` with matching file, contract name and version - pub fn find_artifact( - &self, - file: &Path, - contract_name: &str, - version: &Version, - ) -> Option<&ArtifactFile> { - self.0 - .get(file) - .and_then(|contracts| contracts.get(contract_name)) - .and_then(|artifacts| artifacts.iter().find(|artifact| artifact.version == *version)) - } - - /// Returns true if this type contains an artifact with the given path for the given contract - pub fn has_contract_artifact(&self, contract_name: &str, artifact_path: &Path) -> bool { - self.get_contract_artifact_files(contract_name) - .map(|artifacts| artifacts.iter().any(|artifact| artifact.file == artifact_path)) - .unwrap_or_default() - } - - /// Returns true if this type contains an artifact with the given path - pub fn has_artifact(&self, artifact_path: &Path) -> bool { - self.artifact_files().any(|artifact| artifact.file == artifact_path) - } - - /// Iterate over all artifact files - pub fn artifact_files(&self) -> impl Iterator> { - self.0.values().flat_map(BTreeMap::values).flatten() - } - - /// Iterate over all artifact files - pub fn artifact_files_mut(&mut self) -> impl Iterator> { - self.0.values_mut().flat_map(BTreeMap::values_mut).flatten() - } - - /// Returns an iterator over _all_ artifacts and ``. - /// - /// Borrowed version of [`Self::into_artifacts`]. - pub fn artifacts>( - &self, - ) -> impl Iterator + '_ { - self.0.iter().flat_map(|(source, contract_artifacts)| { - contract_artifacts.iter().flat_map(move |(_contract_name, artifacts)| { - artifacts.iter().filter_map(move |artifact| { - O::contract_name(&artifact.file).map(|name| { - ( - ArtifactId { - path: PathBuf::from(&artifact.file), - name, - source: source.clone(), - version: artifact.version.clone(), - build_id: artifact.build_id.clone(), - } - .with_slashed_paths(), - &artifact.artifact, - ) - }) - }) - }) - }) - } - - /// Returns an iterator over _all_ artifacts and `` - pub fn into_artifacts>( - self, - ) -> impl Iterator { - self.0.into_iter().flat_map(|(source, contract_artifacts)| { - contract_artifacts.into_iter().flat_map(move |(_contract_name, artifacts)| { - let source = source.clone(); - artifacts.into_iter().filter_map(move |artifact| { - O::contract_name(&artifact.file).map(|name| { - ( - ArtifactId { - path: PathBuf::from(&artifact.file), - name, - source: source.clone(), - version: artifact.version, - build_id: artifact.build_id.clone(), - } - .with_slashed_paths(), - artifact.artifact, - ) - }) - }) - }) - }) - } - - /// Returns an iterator that yields the tuple `(file, contract name, artifact)` - /// - /// **NOTE** this returns the path as is - /// - /// Borrowed version of [`Self::into_artifacts_with_files`]. - pub fn artifacts_with_files(&self) -> impl Iterator + '_ { - self.0.iter().flat_map(|(f, contract_artifacts)| { - contract_artifacts.iter().flat_map(move |(name, artifacts)| { - artifacts.iter().map(move |artifact| (f, name, &artifact.artifact)) - }) - }) - } - - /// Returns an iterator that yields the tuple `(file, contract name, artifact)` - /// - /// **NOTE** this returns the path as is - pub fn into_artifacts_with_files(self) -> impl Iterator { - self.0.into_iter().flat_map(|(f, contract_artifacts)| { - contract_artifacts.into_iter().flat_map(move |(name, artifacts)| { - let contract_name = name; - let file = f.clone(); - artifacts - .into_iter() - .map(move |artifact| (file.clone(), contract_name.clone(), artifact.artifact)) - }) - }) - } - - /// Strips the given prefix from all artifact file paths to make them relative to the given - /// `root` argument - pub fn into_stripped_file_prefixes(self, base: &Path) -> Self { - let artifacts = - self.0.into_iter().map(|(path, c)| (strip_prefix_owned(path, base), c)).collect(); - Self(artifacts) - } - - /// Finds the first artifact `T` with a matching contract name - pub fn find_first(&self, contract_name: &str) -> Option<&T> { - self.0.iter().find_map(|(_file, contracts)| { - contracts.get(contract_name).and_then(|c| c.first().map(|a| &a.artifact)) - }) - } - - /// Finds the artifact with a matching path and name - pub fn find(&self, contract_path: &Path, contract_name: &str) -> Option<&T> { - self.0.iter().filter(|(path, _)| path.as_path() == contract_path).find_map( - |(_file, contracts)| { - contracts.get(contract_name).and_then(|c| c.first().map(|a| &a.artifact)) - }, - ) - } - - /// Removes the artifact with matching file and name - pub fn remove(&mut self, contract_path: &Path, contract_name: &str) -> Option { - self.0.iter_mut().filter(|(path, _)| path.as_path() == contract_path).find_map( - |(_file, contracts)| { - let mut artifact = None; - if let Some((c, mut artifacts)) = contracts.remove_entry(contract_name) { - if !artifacts.is_empty() { - artifact = Some(artifacts.remove(0).artifact); - } - if !artifacts.is_empty() { - contracts.insert(c, artifacts); - } - } - artifact - }, - ) - } - - /// Removes the first artifact `T` with a matching contract name - /// - /// *Note:* if there are multiple artifacts (contract compiled with different solc) then this - /// returns the first artifact in that set - pub fn remove_first(&mut self, contract_name: &str) -> Option { - self.0.iter_mut().find_map(|(_file, contracts)| { - let mut artifact = None; - if let Some((c, mut artifacts)) = contracts.remove_entry(contract_name) { - if !artifacts.is_empty() { - artifact = Some(artifacts.remove(0).artifact); - } - if !artifacts.is_empty() { - contracts.insert(c, artifacts); - } - } - artifact - }) - } -} - -/// A trait representation for a [`crate::Contract`] artifact -pub trait Artifact { - /// Returns the artifact's [`JsonAbi`] and bytecode. - fn into_inner(self) -> (Option, Option); - - /// Turns the artifact into a container type for abi, compact bytecode and deployed bytecode - fn into_compact_contract(self) -> CompactContract; - - /// Turns the artifact into a container type for abi, full bytecode and deployed bytecode - fn into_contract_bytecode(self) -> CompactContractBytecode; - - /// Returns the contents of this type as a single tuple of abi, bytecode and deployed bytecode - fn into_parts(self) -> (Option, Option, Option); - - /// Consumes the type and returns the [JsonAbi] - fn into_abi(self) -> Option - where - Self: Sized, - { - self.into_parts().0 - } - - /// Consumes the type and returns the `bytecode` - fn into_bytecode_bytes(self) -> Option - where - Self: Sized, - { - self.into_parts().1 - } - /// Consumes the type and returns the `deployed bytecode` - fn into_deployed_bytecode_bytes(self) -> Option - where - Self: Sized, - { - self.into_parts().2 - } - - /// Same as [`Self::into_parts()`] but returns `Err` if an element is `None` - fn try_into_parts(self) -> Result<(JsonAbi, Bytes, Bytes)> - where - Self: Sized, - { - let (abi, bytecode, deployed_bytecode) = self.into_parts(); - - Ok(( - abi.ok_or_else(|| SolcError::msg("abi missing"))?, - bytecode.ok_or_else(|| SolcError::msg("bytecode missing"))?, - deployed_bytecode.ok_or_else(|| SolcError::msg("deployed bytecode missing"))?, - )) - } - - /// Returns the reference of container type for abi, compact bytecode and deployed bytecode if - /// available - fn get_contract_bytecode(&self) -> CompactContractBytecodeCow<'_>; - - /// Returns the reference to the `bytecode` - fn get_bytecode(&self) -> Option> { - self.get_contract_bytecode().bytecode - } - - /// Returns the reference to the `bytecode` object - fn get_bytecode_object(&self) -> Option> { - let val = match self.get_bytecode()? { - Cow::Borrowed(b) => Cow::Borrowed(&b.object), - Cow::Owned(b) => Cow::Owned(b.object), - }; - Some(val) - } - - /// Returns the bytes of the `bytecode` object - fn get_bytecode_bytes(&self) -> Option> { - let val = match self.get_bytecode_object()? { - Cow::Borrowed(b) => Cow::Borrowed(b.as_bytes()?), - Cow::Owned(b) => Cow::Owned(b.into_bytes()?), - }; - Some(val) - } - - /// Returns the reference to the `deployedBytecode` - fn get_deployed_bytecode(&self) -> Option> { - self.get_contract_bytecode().deployed_bytecode - } - - /// Returns the reference to the `bytecode` object - fn get_deployed_bytecode_object(&self) -> Option> { - let val = match self.get_deployed_bytecode()? { - Cow::Borrowed(b) => Cow::Borrowed(&b.bytecode.as_ref()?.object), - Cow::Owned(b) => Cow::Owned(b.bytecode?.object), - }; - Some(val) - } - - /// Returns the bytes of the `deployed bytecode` object - fn get_deployed_bytecode_bytes(&self) -> Option> { - let val = match self.get_deployed_bytecode_object()? { - Cow::Borrowed(b) => Cow::Borrowed(b.as_bytes()?), - Cow::Owned(b) => Cow::Owned(b.into_bytes()?), - }; - Some(val) - } - - /// Returns the reference to the [JsonAbi] if available - fn get_abi(&self) -> Option> { - self.get_contract_bytecode().abi - } - - /// Returns the `sourceMap` of the creation bytecode - /// - /// Returns `None` if no `sourceMap` string was included in the compiler output - /// Returns `Some(Err)` if parsing the sourcemap failed - fn get_source_map(&self) -> Option> { - self.get_bytecode()?.source_map() - } - - /// Returns the creation bytecode `sourceMap` as str if it was included in the compiler output - fn get_source_map_str(&self) -> Option> { - match self.get_bytecode()? { - Cow::Borrowed(code) => code.source_map.as_deref().map(Cow::Borrowed), - Cow::Owned(code) => code.source_map.map(Cow::Owned), - } - } - - /// Returns the `sourceMap` of the runtime bytecode - /// - /// Returns `None` if no `sourceMap` string was included in the compiler output - /// Returns `Some(Err)` if parsing the sourcemap failed - fn get_source_map_deployed(&self) -> Option> { - self.get_deployed_bytecode()?.source_map() - } - - /// Returns the runtime bytecode `sourceMap` as str if it was included in the compiler output - fn get_source_map_deployed_str(&self) -> Option> { - match self.get_bytecode()? { - Cow::Borrowed(code) => code.source_map.as_deref().map(Cow::Borrowed), - Cow::Owned(code) => code.source_map.map(Cow::Owned), - } - } -} - -impl Artifact for T -where - T: Into + Into, - for<'a> &'a T: Into>, -{ - fn into_inner(self) -> (Option, Option) { - let artifact = self.into_compact_contract(); - (artifact.abi, artifact.bin.and_then(|bin| bin.into_bytes())) - } - - fn into_compact_contract(self) -> CompactContract { - self.into() - } - - fn into_contract_bytecode(self) -> CompactContractBytecode { - self.into() - } - - fn into_parts(self) -> (Option, Option, Option) { - self.into_compact_contract().into_parts() - } - - fn get_contract_bytecode(&self) -> CompactContractBytecodeCow<'_> { - self.into() - } -} - -/// Handler invoked with the output of `solc` -/// -/// Implementers of this trait are expected to take care of [`crate::Contract`] to -/// [`crate::ArtifactOutput::Artifact`] conversion and how that `Artifact` type is stored on disk, -/// this includes artifact file location and naming. -/// -/// Depending on the [`crate::Project`] contracts and their compatible versions, -/// The project compiler may invoke different `solc` executables on the same -/// solidity file leading to multiple [`crate::CompilerOutput`]s for the same `.sol` file. -/// In addition to the `solidity file` to `contract` relationship (1-N*) -/// [`crate::VersionedContracts`] also tracks the `contract` to (`artifact` + `solc version`) -/// relationship (1-N+). -pub trait ArtifactOutput { - /// Represents the artifact that will be stored for a `Contract` - type Artifact: Artifact + DeserializeOwned + Serialize + fmt::Debug + Send + Sync; - - /// Handle the aggregated set of compiled contracts from the solc [`crate::CompilerOutput`]. - /// - /// This will be invoked with all aggregated contracts from (multiple) solc `CompilerOutput`. - /// See [`crate::AggregatedCompilerOutput`] - fn on_output( - &self, - contracts: &VersionedContracts, - sources: &VersionedSourceFiles, - layout: &ProjectPathsConfig, - ctx: OutputContext<'_>, - ) -> Result> { - let mut artifacts = self.output_to_artifacts(contracts, sources, ctx, layout); - fs::create_dir_all(&layout.artifacts).map_err(|err| { - error!(dir=?layout.artifacts, "Failed to create artifacts folder"); - SolcIoError::new(err, &layout.artifacts) - })?; - - artifacts.join_all(&layout.artifacts); - artifacts.write_all()?; - - self.handle_artifacts(contracts, &artifacts)?; - - Ok(artifacts) - } - - /// Invoked after artifacts has been written to disk for additional processing. - fn handle_artifacts( - &self, - _contracts: &VersionedContracts, - _artifacts: &Artifacts, - ) -> Result<()> { - Ok(()) - } - - /// Returns the file name for the contract's artifact - /// `Greeter.json` - fn output_file_name(name: &str) -> PathBuf { - format!("{name}.json").into() - } - - /// Returns the file name for the contract's artifact and the given version - /// `Greeter.0.8.11.json` - fn output_file_name_versioned(name: &str, version: &Version) -> PathBuf { - format!("{}.{}.{}.{}.json", name, version.major, version.minor, version.patch).into() - } - - /// Returns the appropriate file name for the conflicting file. - /// - /// This should ensure that the resulting `PathBuf` is conflict free, which could be possible if - /// there are two separate contract files (in different folders) that contain the same contract: - /// - /// `src/A.sol::A` - /// `src/nested/A.sol::A` - /// - /// Which would result in the same `PathBuf` if only the file and contract name is taken into - /// account, [`Self::output_file`]. - /// - /// This return a unique output file - fn conflict_free_output_file( - already_taken: &HashSet, - conflict: PathBuf, - contract_file: &Path, - artifacts_folder: &Path, - ) -> PathBuf { - let mut rel_candidate = conflict; - if let Ok(stripped) = rel_candidate.strip_prefix(artifacts_folder) { - rel_candidate = stripped.to_path_buf(); - } - #[allow(clippy::redundant_clone)] // false positive - let mut candidate = rel_candidate.clone(); - let mut current_parent = contract_file.parent(); - - while let Some(parent_name) = current_parent.and_then(|f| f.file_name()) { - // this is problematic if both files are absolute - candidate = Path::new(parent_name).join(&candidate); - let out_path = artifacts_folder.join(&candidate); - if !already_taken.contains(&out_path.to_slash_lossy().to_lowercase()) { - trace!("found alternative output file={:?} for {:?}", out_path, contract_file); - return out_path; - } - current_parent = current_parent.and_then(|f| f.parent()); - } - - // this means we haven't found an alternative yet, which shouldn't actually happen since - // `contract_file` are unique, but just to be safe, handle this case in which case - // we simply numerate the parent folder - - trace!("no conflict free output file found after traversing the file"); - - let mut num = 1; - - loop { - // this will attempt to find an alternate path by numerating the first component in the - // path: `+_/....sol` - let mut components = rel_candidate.components(); - let first = components.next().expect("path not empty"); - let name = first.as_os_str(); - let mut numerated = OsString::with_capacity(name.len() + 2); - numerated.push(name); - numerated.push("_"); - numerated.push(num.to_string()); - - let candidate: PathBuf = Some(numerated.as_os_str()) - .into_iter() - .chain(components.map(|c| c.as_os_str())) - .collect(); - if !already_taken.contains(&candidate.to_slash_lossy().to_lowercase()) { - trace!("found alternative output file={:?} for {:?}", candidate, contract_file); - return candidate; - } - - num += 1; - } - } - - /// Returns the path to the contract's artifact location based on the contract's file and name - /// - /// This returns `contract.sol/contract.json` by default - fn output_file(contract_file: &Path, name: &str) -> PathBuf { - contract_file - .file_name() - .map(Path::new) - .map(|p| p.join(Self::output_file_name(name))) - .unwrap_or_else(|| Self::output_file_name(name)) - } - - /// Returns the path to the contract's artifact location based on the contract's file, name and - /// version - /// - /// This returns `contract.sol/contract.0.8.11.json` by default - fn output_file_versioned(contract_file: &Path, name: &str, version: &Version) -> PathBuf { - contract_file - .file_name() - .map(Path::new) - .map(|p| p.join(Self::output_file_name_versioned(name, version))) - .unwrap_or_else(|| Self::output_file_name_versioned(name, version)) - } - - /// The inverse of `contract_file_name` - /// - /// Expected to return the solidity contract's name derived from the file path - /// `sources/Greeter.sol` -> `Greeter` - fn contract_name(file: &Path) -> Option { - file.file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) - } - - /// Whether the corresponding artifact of the given contract file and name exists - fn output_exists(contract_file: &Path, name: &str, root: &Path) -> bool { - root.join(Self::output_file(contract_file, name)).exists() - } - - /// Read the artifact that's stored at the given path - /// - /// # Errors - /// - /// Returns an error if - /// - The file does not exist - /// - The file's content couldn't be deserialized into the `Artifact` type - fn read_cached_artifact(path: &Path) -> Result { - utils::read_json_file(path) - } - - /// Read the cached artifacts that are located the paths the iterator yields - /// - /// See [`Self::read_cached_artifact()`] - fn read_cached_artifacts(files: I) -> Result> - where - I: IntoIterator, - T: Into, - { - let mut artifacts = BTreeMap::default(); - for path in files.into_iter() { - let path = path.into(); - let artifact = Self::read_cached_artifact(&path)?; - artifacts.insert(path, artifact); - } - Ok(artifacts) - } - - /// Convert a contract to the artifact type - /// - /// This is the core conversion function that takes care of converting a `Contract` into the - /// associated `Artifact` type. - /// The `SourceFile` is also provided - fn contract_to_artifact( - &self, - _file: &Path, - _name: &str, - contract: Contract, - source_file: Option<&SourceFile>, - ) -> Self::Artifact; - - /// Generates a path for an artifact based on already taken paths by either cached or compiled - /// artifacts. - fn get_artifact_path( - ctx: &OutputContext<'_>, - already_taken: &HashSet, - file: &Path, - name: &str, - artifacts_folder: &Path, - version: &Version, - versioned: bool, - ) -> PathBuf { - // if an artifact for the contract already exists (from a previous compile job) - // we reuse the path, this will make sure that even if there are conflicting - // files (files for witch `T::output_file()` would return the same path) we use - // consistent output paths - if let Some(existing_artifact) = ctx.existing_artifact(file, name, version) { - trace!("use existing artifact file {:?}", existing_artifact,); - existing_artifact.to_path_buf() - } else { - let path = if versioned { - Self::output_file_versioned(file, name, version) - } else { - Self::output_file(file, name) - }; - - let path = artifacts_folder.join(path); - - if already_taken.contains(&path.to_slash_lossy().to_lowercase()) { - // preventing conflict - Self::conflict_free_output_file(already_taken, path, file, artifacts_folder) - } else { - path - } - } - } - - /// Convert the compiler output into a set of artifacts - /// - /// **Note:** This does only convert, but _NOT_ write the artifacts to disk, See - /// [`Self::on_output()`] - fn output_to_artifacts( - &self, - contracts: &VersionedContracts, - sources: &VersionedSourceFiles, - ctx: OutputContext<'_>, - layout: &ProjectPathsConfig, - ) -> Artifacts { - let mut artifacts = ArtifactsMap::new(); - - // this tracks all the `SourceFile`s that we successfully mapped to a contract - let mut non_standalone_sources = HashSet::new(); - - // prepopulate taken paths set with cached artifacts - let mut taken_paths_lowercase = ctx - .existing_artifacts - .values() - .flat_map(|artifacts| artifacts.values().flat_map(|artifacts| artifacts.values())) - .map(|a| a.path.to_slash_lossy().to_lowercase()) - .collect::>(); - - let mut files = contracts.keys().collect::>(); - // Iterate starting with top-most files to ensure that they get the shortest paths. - files.sort_by(|file1, file2| { - (file1.components().count(), file1).cmp(&(file2.components().count(), file2)) - }); - for file in files { - for (name, versioned_contracts) in &contracts[file] { - for contract in versioned_contracts { - // track `SourceFile`s that can be mapped to contracts - let source_file = sources.find_file_and_version(file, &contract.version); - - if let Some(source) = source_file { - non_standalone_sources.insert((source.id, &contract.version)); - } - - let artifact_path = Self::get_artifact_path( - &ctx, - &taken_paths_lowercase, - file, - name, - layout.artifacts.as_path(), - &contract.version, - versioned_contracts.len() > 1, - ); - - taken_paths_lowercase.insert(artifact_path.to_slash_lossy().to_lowercase()); - - trace!( - "use artifact file {:?} for contract file {} {}", - artifact_path, - file.display(), - contract.version - ); - - let artifact = self.contract_to_artifact( - file, - name, - contract.contract.clone(), - source_file, - ); - - let artifact = ArtifactFile { - artifact, - file: artifact_path, - version: contract.version.clone(), - build_id: contract.build_id.clone(), - }; - - artifacts - .entry(file.to_path_buf()) - .or_default() - .entry(name.to_string()) - .or_default() - .push(artifact); - } - } - } - - // extend with standalone source files and convert them to artifacts - // this is unfortunately necessary, so we can "mock" `Artifacts` for solidity files without - // any contract definition, which are not included in the `CompilerOutput` but we want to - // create Artifacts for them regardless - for (file, sources) in sources.as_ref().iter() { - for source in sources { - if !non_standalone_sources.contains(&(source.source_file.id, &source.version)) { - // scan the ast as a safe measure to ensure this file does not include any - // source units - // there's also no need to create a standalone artifact for source files that - // don't contain an ast - if source.source_file.ast.is_none() - || source.source_file.contains_contract_definition() - { - continue; - } - - // we use file and file stem - if let Some(name) = Path::new(file).file_stem().and_then(|stem| stem.to_str()) { - if let Some(artifact) = - self.standalone_source_file_to_artifact(file, source) - { - let artifact_path = Self::get_artifact_path( - &ctx, - &taken_paths_lowercase, - file, - name, - &layout.artifacts, - &source.version, - sources.len() > 1, - ); - - let entries = artifacts - .entry(file.clone()) - .or_default() - .entry(name.to_string()) - .or_default(); - - if entries.iter().all(|entry| entry.version != source.version) { - taken_paths_lowercase - .insert(artifact_path.to_slash_lossy().to_lowercase()); - - entries.push(ArtifactFile { - artifact, - file: artifact_path, - version: source.version.clone(), - build_id: source.build_id.clone(), - }); - } - } - } - } - } - } - - Artifacts(artifacts) - } - - /// This converts a `SourceFile` that doesn't contain _any_ contract definitions (interfaces, - /// contracts, libraries) to an artifact. - /// - /// We do this because not all `SourceFile`s emitted by solc have at least 1 corresponding entry - /// in the `contracts` - /// section of the solc output. For example for an `errors.sol` that only contains custom error - /// definitions and no contract, no `Contract` object will be generated by solc. However, we - /// still want to emit an `Artifact` for that file that may include the `ast`, docs etc., - /// because other tools depend on this, such as slither. - fn standalone_source_file_to_artifact( - &self, - _path: &Path, - _file: &VersionedSourceFile, - ) -> Option; - - /// Handler allowing artifacts handler to enforce artifact recompilation. - fn is_dirty(&self, _artifact_file: &ArtifactFile) -> Result { - Ok(false) - } - - /// Invoked with all artifacts that were not recompiled. - fn handle_cached_artifacts(&self, _artifacts: &Artifacts) -> Result<()> { - Ok(()) - } -} - -/// Additional context to use during [`ArtifactOutput::on_output()`] -#[derive(Clone, Debug, Default)] -#[non_exhaustive] -pub struct OutputContext<'a> { - /// Cache file of the project or empty if no caching is enabled - /// - /// This context is required for partially cached recompile with conflicting files, so that we - /// can use the same adjusted output path for conflicting files like: - /// - /// ```text - /// src - /// ├── a.sol - /// └── inner - /// └── a.sol - /// ``` - pub existing_artifacts: - BTreeMap<&'a Path, &'a BTreeMap>>, -} - -// === impl OutputContext - -impl<'a> OutputContext<'a> { - /// Create a new context with the given cache file - pub fn new(cache: &'a CompilerCache) -> Self { - let existing_artifacts = cache - .files - .iter() - .map(|(file, entry)| (file.as_path(), &entry.artifacts)) - .collect::>(); - - Self { existing_artifacts } - } - - /// Returns the path of the already existing artifact for the `contract` of the `file` compiled - /// with the `version`. - /// - /// Returns `None` if no file exists - pub fn existing_artifact( - &self, - file: &Path, - contract: &str, - version: &Version, - ) -> Option<&Path> { - self.existing_artifacts.get(file).and_then(|contracts| { - contracts - .get(contract) - .and_then(|versions| versions.get(version)) - .map(|a| a.path.as_path()) - }) - } -} - -/// An `Artifact` implementation that uses a compact representation -/// -/// Creates a single json artifact with -/// ```json -/// { -/// "abi": [], -/// "bytecode": {...}, -/// "deployedBytecode": {...} -/// } -/// ``` -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub struct MinimalCombinedArtifacts { - _priv: (), -} - -impl ArtifactOutput for MinimalCombinedArtifacts { - type Artifact = CompactContractBytecode; - - fn contract_to_artifact( - &self, - _file: &Path, - _name: &str, - contract: Contract, - _source_file: Option<&SourceFile>, - ) -> Self::Artifact { - Self::Artifact::from(contract) - } - - fn standalone_source_file_to_artifact( - &self, - _path: &Path, - _file: &VersionedSourceFile, - ) -> Option { - None - } -} - -/// An Artifacts handler implementation that works the same as `MinimalCombinedArtifacts` but also -/// supports reading hardhat artifacts if an initial attempt to deserialize an artifact failed -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub struct MinimalCombinedArtifactsHardhatFallback { - _priv: (), -} - -impl ArtifactOutput for MinimalCombinedArtifactsHardhatFallback { - type Artifact = CompactContractBytecode; - - fn on_output( - &self, - output: &VersionedContracts, - sources: &VersionedSourceFiles, - layout: &ProjectPathsConfig, - ctx: OutputContext<'_>, - ) -> Result> { - MinimalCombinedArtifacts::default().on_output(output, sources, layout, ctx) - } - - fn read_cached_artifact(path: &Path) -> Result { - let content = fs::read_to_string(path).map_err(|err| SolcError::io(err, path))?; - if let Ok(a) = serde_json::from_str(&content) { - Ok(a) - } else { - error!("Failed to deserialize compact artifact"); - trace!("Fallback to hardhat artifact deserialization"); - let artifact = serde_json::from_str::(&content)?; - trace!("successfully deserialized hardhat artifact"); - Ok(artifact.into_contract_bytecode()) - } - } - - fn contract_to_artifact( - &self, - file: &Path, - name: &str, - contract: Contract, - source_file: Option<&SourceFile>, - ) -> Self::Artifact { - MinimalCombinedArtifacts::default().contract_to_artifact(file, name, contract, source_file) - } - - fn standalone_source_file_to_artifact( - &self, - path: &Path, - file: &VersionedSourceFile, - ) -> Option { - MinimalCombinedArtifacts::default().standalone_source_file_to_artifact(path, file) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn is_artifact() { - fn assert_artifact() {} - - assert_artifact::(); - assert_artifact::(); - } - - #[test] - fn can_find_alternate_paths() { - let mut already_taken = HashSet::new(); - - let file = Path::new("v1/tokens/Greeter.sol"); - let conflict = PathBuf::from("out/Greeter.sol/Greeter.json"); - let artifacts_folder = Path::new("out"); - - let alternative = ConfigurableArtifacts::conflict_free_output_file( - &already_taken, - conflict.clone(), - file, - artifacts_folder, - ); - assert_eq!(alternative.to_slash_lossy(), "out/tokens/Greeter.sol/Greeter.json"); - - already_taken.insert("out/tokens/Greeter.sol/Greeter.json".to_lowercase()); - let alternative = ConfigurableArtifacts::conflict_free_output_file( - &already_taken, - conflict.clone(), - file, - artifacts_folder, - ); - assert_eq!(alternative.to_slash_lossy(), "out/v1/tokens/Greeter.sol/Greeter.json"); - - already_taken.insert("out/v1/tokens/Greeter.sol/Greeter.json".to_lowercase()); - let alternative = ConfigurableArtifacts::conflict_free_output_file( - &already_taken, - conflict, - file, - artifacts_folder, - ); - assert_eq!(alternative, PathBuf::from("Greeter.sol_1/Greeter.json")); - } - - #[test] - fn can_find_alternate_path_conflict() { - let mut already_taken = HashSet::new(); - - let file = "/Users/carter/dev/goldfinch/mono/packages/protocol/test/forge/mainnet/utils/BaseMainnetForkingTest.t.sol"; - let conflict = PathBuf::from("/Users/carter/dev/goldfinch/mono/packages/protocol/artifacts/BaseMainnetForkingTest.t.sol/BaseMainnetForkingTest.json"); - already_taken.insert("/Users/carter/dev/goldfinch/mono/packages/protocol/artifacts/BaseMainnetForkingTest.t.sol/BaseMainnetForkingTest.json".into()); - - let alternative = ConfigurableArtifacts::conflict_free_output_file( - &already_taken, - conflict, - file.as_ref(), - "/Users/carter/dev/goldfinch/mono/packages/protocol/artifacts".as_ref(), - ); - - assert_eq!(alternative.to_slash_lossy(), "/Users/carter/dev/goldfinch/mono/packages/protocol/artifacts/utils/BaseMainnetForkingTest.t.sol/BaseMainnetForkingTest.json"); - } - - fn assert_artifact() {} - - #[test] - fn test() { - assert_artifact::(); - assert_artifact::>(); - } -} diff --git a/crates/compilers/src/buildinfo.rs b/crates/compilers/src/buildinfo.rs deleted file mode 100644 index 571e8471..00000000 --- a/crates/compilers/src/buildinfo.rs +++ /dev/null @@ -1,150 +0,0 @@ -//! Represents an entire build - -use crate::compilers::{CompilationError, CompilerInput, CompilerOutput, Language}; -use alloy_primitives::hex; -use foundry_compilers_core::{error::Result, utils}; -use md5::Digest; -use semver::Version; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; -use std::{ - collections::{BTreeMap, HashSet}, - path::{Path, PathBuf}, -}; - -pub const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-build-info-1"; - -// A hardhat compatible build info representation -#[derive(Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct BuildInfo { - pub id: String, - #[serde(rename = "_format")] - pub format: String, - pub solc_version: Version, - pub solc_long_version: Version, - pub input: I, - pub output: O, -} - -impl BuildInfo { - /// Deserializes the `BuildInfo` object from the given file - pub fn read(path: &Path) -> Result { - utils::read_json_file(path) - } -} - -/// Additional context we cache for each compiler run. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct BuildContext { - /// Mapping from internal compiler source id to path of the source file. - pub source_id_to_path: BTreeMap, - /// Language of the compiler. - pub language: L, -} - -impl BuildContext { - pub fn new(input: &I, output: &CompilerOutput) -> Result - where - I: CompilerInput, - { - let mut source_id_to_path = BTreeMap::new(); - - let input_sources = input.sources().map(|(path, _)| path).collect::>(); - for (path, source) in output.sources.iter() { - if input_sources.contains(path.as_path()) { - source_id_to_path.insert(source.id, path.to_path_buf()); - } - } - - Ok(Self { source_id_to_path, language: input.language() }) - } - - pub fn join_all(&mut self, root: &Path) { - self.source_id_to_path.values_mut().for_each(|path| { - *path = root.join(path.as_path()); - }); - } - - pub fn with_joined_paths(mut self, root: &Path) -> Self { - self.join_all(root); - self - } -} - -/// Represents `BuildInfo` object -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct RawBuildInfo { - /// The hash that identifies the BuildInfo - pub id: String, - #[serde(flatten)] - pub build_context: BuildContext, - /// serialized `BuildInfo` json - #[serde(flatten)] - pub build_info: BTreeMap, -} - -// === impl RawBuildInfo === - -impl RawBuildInfo { - /// Serializes a `BuildInfo` object - pub fn new, E: CompilationError>( - input: &I, - output: &CompilerOutput, - full_build_info: bool, - ) -> Result { - let version = input.version().clone(); - let build_context = BuildContext::new(input, output)?; - - let mut hasher = md5::Md5::new(); - - hasher.update(ETHERS_FORMAT_VERSION); - - let solc_short = format!("{}.{}.{}", version.major, version.minor, version.patch); - hasher.update(&solc_short); - hasher.update(version.to_string()); - - let input = serde_json::to_value(input)?; - hasher.update(&serde_json::to_string(&input)?); - - // create the hash for `{_format,solcVersion,solcLongVersion,input}` - // N.B. this is not exactly the same as hashing the json representation of these values but - // the must efficient one - let result = hasher.finalize(); - let id = hex::encode(result); - - let mut build_info = BTreeMap::new(); - - if full_build_info { - build_info.insert("_format".to_string(), serde_json::to_value(ETHERS_FORMAT_VERSION)?); - build_info.insert("solcVersion".to_string(), serde_json::to_value(&solc_short)?); - build_info.insert("solcLongVersion".to_string(), serde_json::to_value(&version)?); - build_info.insert("input".to_string(), input); - build_info.insert("output".to_string(), serde_json::to_value(output)?); - } - - Ok(Self { id, build_info, build_context }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::compilers::solc::SolcVersionedInput; - use foundry_compilers_artifacts::{sources::Source, Error, SolcLanguage, Sources}; - use std::path::PathBuf; - - #[test] - fn build_info_serde() { - let v: Version = "0.8.4+commit.c7e474f2".parse().unwrap(); - let input = SolcVersionedInput::build( - Sources::from([(PathBuf::from("input.sol"), Source::new(""))]), - Default::default(), - SolcLanguage::Solidity, - v, - ); - let output = CompilerOutput::::default(); - let raw_info = RawBuildInfo::new(&input, &output, true).unwrap(); - let _info: BuildInfo> = - serde_json::from_str(&serde_json::to_string(&raw_info).unwrap()).unwrap(); - } -} diff --git a/crates/compilers/src/cache.rs b/crates/compilers/src/cache.rs deleted file mode 100644 index 0d5d1613..00000000 --- a/crates/compilers/src/cache.rs +++ /dev/null @@ -1,1073 +0,0 @@ -//! Support for compiling contracts. - -use crate::{ - buildinfo::RawBuildInfo, - compilers::{Compiler, CompilerSettings, Language}, - output::Builds, - resolver::GraphEdges, - ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Graph, OutputContext, Project, - ProjectPaths, ProjectPathsConfig, SourceCompilationKind, -}; -use foundry_compilers_artifacts::{ - sources::{Source, Sources}, - Settings, -}; -use foundry_compilers_core::{ - error::{Result, SolcError}, - utils::{self, strip_prefix}, -}; -use semver::Version; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; -use std::{ - collections::{btree_map::BTreeMap, hash_map, BTreeSet, HashMap, HashSet}, - fs, - path::{Path, PathBuf}, - time::{Duration, UNIX_EPOCH}, -}; - -/// ethers-rs format version -/// -/// `ethers-solc` uses a different format version id, but the actual format is consistent with -/// hardhat This allows ethers-solc to detect if the cache file was written by hardhat or -/// `ethers-solc` -const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-3"; - -/// The file name of the default cache file -pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json"; - -/// A multi version cache file -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct CompilerCache { - #[serde(rename = "_format")] - pub format: String, - /// contains all directories used for the project - pub paths: ProjectPaths, - pub files: BTreeMap>, - pub builds: BTreeSet, -} - -impl CompilerCache { - pub fn new(format: String, paths: ProjectPaths) -> Self { - Self { format, paths, files: Default::default(), builds: Default::default() } - } -} - -impl CompilerCache { - pub fn is_empty(&self) -> bool { - self.files.is_empty() - } - - /// Returns `true` if the cache contains any artifacts for the given file and version. - pub fn contains(&self, file: &Path, version: &Version) -> bool { - self.files.get(file).map_or(true, |entry| !entry.contains_version(version)) - } - - /// Removes entry for the given file - pub fn remove(&mut self, file: &Path) -> Option> { - self.files.remove(file) - } - - /// How many entries the cache contains where each entry represents a sourc file - pub fn len(&self) -> usize { - self.files.len() - } - - /// How many `Artifacts` this cache references, where a source file can have multiple artifacts - pub fn artifacts_len(&self) -> usize { - self.entries().map(|entry| entry.artifacts().count()).sum() - } - - /// Returns an iterator over all `CacheEntry` this cache contains - pub fn entries(&self) -> impl Iterator> { - self.files.values() - } - - /// Returns the corresponding `CacheEntry` for the file if it exists - pub fn entry(&self, file: &Path) -> Option<&CacheEntry> { - self.files.get(file) - } - - /// Returns the corresponding `CacheEntry` for the file if it exists - pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> { - self.files.get_mut(file) - } - - /// Reads the cache json file from the given path - /// - /// See also [`Self::read_joined()`] - /// - /// # Errors - /// - /// If the cache file does not exist - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{cache::CompilerCache, solc::SolcSettings, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let mut cache = CompilerCache::::read(project.cache_path())?; - /// cache.join_artifacts_files(project.artifacts_path()); - /// # Ok::<_, Box>(()) - /// ``` - #[instrument(skip_all, name = "sol-files-cache::read")] - pub fn read(path: &Path) -> Result { - trace!("reading solfiles cache at {}", path.display()); - let cache: Self = utils::read_json_file(path)?; - trace!("read cache \"{}\" with {} entries", cache.format, cache.files.len()); - Ok(cache) - } - - /// Reads the cache json file from the given path and returns the cache with paths adjoined to - /// the `ProjectPathsConfig`. - /// - /// This expects the `artifact` files to be relative to the artifacts dir of the `paths` and the - /// `CachEntry` paths to be relative to the root dir of the `paths` - /// - /// - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{cache::CompilerCache, solc::SolcSettings, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let cache: CompilerCache = CompilerCache::read_joined(&project.paths)?; - /// # Ok::<_, Box>(()) - /// ``` - pub fn read_joined(paths: &ProjectPathsConfig) -> Result { - let mut cache = Self::read(&paths.cache)?; - cache.join_entries(&paths.root).join_artifacts_files(&paths.artifacts); - Ok(cache) - } - - /// Write the cache as json file to the given path - pub fn write(&self, path: &Path) -> Result<()> { - trace!("writing cache with {} entries to json file: \"{}\"", self.len(), path.display()); - utils::create_parent_dir_all(path)?; - utils::write_json_file(self, path, 128 * 1024)?; - trace!("cache file located: \"{}\"", path.display()); - Ok(()) - } - - /// Removes build infos which don't have any artifacts linked to them. - pub fn remove_outdated_builds(&mut self) { - let mut outdated = Vec::new(); - for build_id in &self.builds { - if !self - .entries() - .flat_map(|e| e.artifacts.values()) - .flat_map(|a| a.values()) - .any(|a| a.build_id == *build_id) - { - outdated.push(build_id.to_owned()); - } - } - - for build_id in outdated { - self.builds.remove(&build_id); - let path = self.paths.build_infos.join(build_id).with_extension("json"); - let _ = std::fs::remove_file(path); - } - } - - /// Sets the `CacheEntry`'s file paths to `root` adjoined to `self.file`. - pub fn join_entries(&mut self, root: &Path) -> &mut Self { - self.files = std::mem::take(&mut self.files) - .into_iter() - .map(|(path, entry)| (root.join(path), entry)) - .collect(); - self - } - - /// Removes `base` from all `CacheEntry` paths - pub fn strip_entries_prefix(&mut self, base: &Path) -> &mut Self { - self.files = std::mem::take(&mut self.files) - .into_iter() - .map(|(path, entry)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), entry)) - .collect(); - self - } - - /// Sets the artifact files location to `base` adjoined to the `CachEntries` artifacts. - pub fn join_artifacts_files(&mut self, base: &Path) -> &mut Self { - self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base)); - self - } - - /// Removes `base` from all artifact file paths - pub fn strip_artifact_files_prefixes(&mut self, base: &Path) -> &mut Self { - self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base)); - self - } - - /// Removes all `CacheEntry` which source files don't exist on disk - /// - /// **NOTE:** this assumes the `files` are absolute - pub fn remove_missing_files(&mut self) { - trace!("remove non existing files from cache"); - self.files.retain(|file, _| { - let exists = file.exists(); - if !exists { - trace!("remove {} from cache", file.display()); - } - exists - }) - } - - /// Checks if all artifact files exist - pub fn all_artifacts_exist(&self) -> bool { - self.files.values().all(|entry| entry.all_artifacts_exist()) - } - - /// Strips the given prefix from all `file` paths that identify a `CacheEntry` to make them - /// relative to the given `base` argument - /// - /// In other words this sets the keys (the file path of a solidity file) relative to the `base` - /// argument, so that the key `/Users/me/project/src/Greeter.sol` will be changed to - /// `src/Greeter.sol` if `base` is `/Users/me/project` - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{ - /// artifacts::contract::CompactContract, cache::CompilerCache, solc::SolcSettings, Project, - /// }; - /// - /// let project = Project::builder().build(Default::default())?; - /// let cache: CompilerCache = - /// CompilerCache::read(project.cache_path())?.with_stripped_file_prefixes(project.root()); - /// let artifact: CompactContract = cache.read_artifact("src/Greeter.sol".as_ref(), "Greeter")?; - /// # Ok::<_, Box>(()) - /// ``` - /// - /// **Note:** this only affects the source files, see [`Self::strip_artifact_files_prefixes()`] - pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self { - self.files = self - .files - .into_iter() - .map(|(f, e)| (utils::source_name(&f, base).to_path_buf(), e)) - .collect(); - self - } - - /// Returns the path to the artifact of the given `(file, contract)` pair - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{cache::CompilerCache, solc::SolcSettings, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let cache: CompilerCache = CompilerCache::read_joined(&project.paths)?; - /// cache.find_artifact_path("/Users/git/myproject/src/Greeter.sol".as_ref(), "Greeter"); - /// # Ok::<_, Box>(()) - /// ``` - pub fn find_artifact_path(&self, contract_file: &Path, contract_name: &str) -> Option<&Path> { - let entry = self.entry(contract_file)?; - entry.find_artifact_path(contract_name) - } - - /// Finds the path to the artifact of the given `(file, contract)` pair (see - /// [`Self::find_artifact_path()`]) and deserializes the artifact file as JSON. - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{ - /// artifacts::contract::CompactContract, cache::CompilerCache, solc::SolcSettings, Project, - /// }; - /// - /// let project = Project::builder().build(Default::default())?; - /// let cache = CompilerCache::::read_joined(&project.paths)?; - /// let artifact: CompactContract = - /// cache.read_artifact("/Users/git/myproject/src/Greeter.sol".as_ref(), "Greeter")?; - /// # Ok::<_, Box>(()) - /// ``` - /// - /// **NOTE**: unless the cache's `files` keys were modified `contract_file` is expected to be - /// absolute. - pub fn read_artifact( - &self, - contract_file: &Path, - contract_name: &str, - ) -> Result { - let artifact_path = - self.find_artifact_path(contract_file, contract_name).ok_or_else(|| { - SolcError::ArtifactNotFound(contract_file.to_path_buf(), contract_name.to_string()) - })?; - utils::read_json_file(artifact_path) - } - - /// Reads all cached artifacts from disk using the given ArtifactOutput handler - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{ - /// artifacts::contract::CompactContractBytecode, cache::CompilerCache, solc::SolcSettings, - /// Project, - /// }; - /// - /// let project = Project::builder().build(Default::default())?; - /// let cache: CompilerCache = CompilerCache::read_joined(&project.paths)?; - /// let artifacts = cache.read_artifacts::()?; - /// # Ok::<_, Box>(()) - /// ``` - pub fn read_artifacts( - &self, - ) -> Result> { - use rayon::prelude::*; - - let artifacts = self - .files - .par_iter() - .map(|(file, entry)| entry.read_artifact_files().map(|files| (file.clone(), files))) - .collect::>>()?; - Ok(Artifacts(artifacts)) - } - - /// Reads all cached [BuildContext]s from disk. [BuildContext] is inlined into [RawBuildInfo] - /// objects, so we are basically just partially deserializing build infos here. - /// - /// [BuildContext]: crate::buildinfo::BuildContext - pub fn read_builds(&self, build_info_dir: &Path) -> Result> { - use rayon::prelude::*; - - self.builds - .par_iter() - .map(|build_id| { - utils::read_json_file(&build_info_dir.join(build_id).with_extension("json")) - .map(|b| (build_id.clone(), b)) - }) - .collect::>() - .map(|b| Builds(b)) - } -} - -#[cfg(feature = "async")] -impl CompilerCache { - pub async fn async_read(path: &Path) -> Result { - let path = path.to_owned(); - Self::asyncify(move || Self::read(&path)).await - } - - pub async fn async_write(&self, path: &Path) -> Result<()> { - let content = serde_json::to_vec(self)?; - tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path)) - } - - async fn asyncify(f: F) -> Result - where - F: FnOnce() -> Result + Send + 'static, - T: Send + 'static, - { - match tokio::task::spawn_blocking(f).await { - Ok(res) => res, - Err(_) => Err(SolcError::io( - std::io::Error::new(std::io::ErrorKind::Other, "background task failed"), - "", - )), - } - } -} - -impl Default for CompilerCache { - fn default() -> Self { - Self { - format: ETHERS_FORMAT_VERSION.to_string(), - builds: Default::default(), - files: Default::default(), - paths: Default::default(), - } - } -} - -impl<'a, S: CompilerSettings> From<&'a ProjectPathsConfig> for CompilerCache { - fn from(config: &'a ProjectPathsConfig) -> Self { - let paths = config.paths_relative(); - Self::new(Default::default(), paths) - } -} - -/// Cached artifact data. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct CachedArtifact { - /// Path to the artifact file. - pub path: PathBuf, - /// Build id which produced the given artifact. - pub build_id: String, -} - -/// A `CacheEntry` in the cache file represents a solidity file -/// -/// A solidity file can contain several contracts, for every contract a separate `Artifact` is -/// emitted. so the `CacheEntry` tracks the artifacts by name. A file can be compiled with multiple -/// `solc` versions generating version specific artifacts. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CacheEntry { - /// the last modification time of this file - pub last_modification_date: u64, - /// hash to identify whether the content of the file changed - pub content_hash: String, - /// identifier name see [`foundry_compilers_core::utils::source_name()`] - pub source_name: PathBuf, - /// what config was set when compiling this file - pub compiler_settings: S, - /// fully resolved imports of the file - /// - /// all paths start relative from the project's root: `src/importedFile.sol` - pub imports: BTreeSet, - /// The solidity version pragma - pub version_requirement: Option, - /// all artifacts produced for this file - /// - /// In theory a file can be compiled by different solc versions: - /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` - /// file `C` would be compiled twice, with `0.8.10` and `0.8.11`, producing two different - /// artifacts. - /// - /// This map tracks the artifacts by `name -> (Version -> PathBuf)`. - /// This mimics the default artifacts directory structure - pub artifacts: BTreeMap>, - /// Whether this file was compiled at least once. - /// - /// If this is true and `artifacts` are empty, it means that given version of the file does - /// not produce any artifacts and it should not be compiled again. - /// - /// If this is false, then artifacts are definitely empty and it should be compiled if we may - /// need artifacts. - pub seen_by_compiler: bool, -} - -impl CacheEntry { - /// Returns the last modified timestamp `Duration` - pub fn last_modified(&self) -> Duration { - Duration::from_millis(self.last_modification_date) - } - - /// Returns the artifact path for the contract name. - /// - /// # Examples - /// - /// ```no_run - /// use foundry_compilers::cache::CacheEntry; - /// - /// # fn t(entry: CacheEntry) { - /// # stringify!( - /// let entry: CacheEntry = ...; - /// # ); - /// entry.find_artifact_path("Greeter"); - /// # } - /// ``` - pub fn find_artifact_path(&self, contract_name: &str) -> Option<&Path> { - self.artifacts.get(contract_name)?.iter().next().map(|(_, p)| p.path.as_path()) - } - - /// Reads the last modification date from the file's metadata - pub fn read_last_modification_date(file: &Path) -> Result { - let last_modification_date = fs::metadata(file) - .map_err(|err| SolcError::io(err, file.to_path_buf()))? - .modified() - .map_err(|err| SolcError::io(err, file.to_path_buf()))? - .duration_since(UNIX_EPOCH) - .map_err(SolcError::msg)? - .as_millis() as u64; - Ok(last_modification_date) - } - - /// Reads all artifact files associated with the `CacheEntry` - /// - /// **Note:** all artifact file paths should be absolute. - fn read_artifact_files( - &self, - ) -> Result>>> { - let mut artifacts = BTreeMap::new(); - for (artifact_name, versioned_files) in self.artifacts.iter() { - let mut files = Vec::with_capacity(versioned_files.len()); - for (version, cached_artifact) in versioned_files { - let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?; - files.push(ArtifactFile { - artifact, - file: cached_artifact.path.clone(), - version: version.clone(), - build_id: cached_artifact.build_id.clone(), - }); - } - artifacts.insert(artifact_name.clone(), files); - } - Ok(artifacts) - } - - pub(crate) fn merge_artifacts<'a, A, I, T: 'a>(&mut self, artifacts: I) - where - I: IntoIterator, - A: IntoIterator>, - { - for (name, artifacts) in artifacts.into_iter() { - for artifact in artifacts { - self.artifacts.entry(name.clone()).or_default().insert( - artifact.version.clone(), - CachedArtifact { - build_id: artifact.build_id.clone(), - path: artifact.file.clone(), - }, - ); - } - } - } - - /// Returns `true` if the artifacts set contains the given version - pub fn contains_version(&self, version: &Version) -> bool { - self.artifacts_versions().any(|(v, _)| v == version) - } - - /// Iterator that yields all artifact files and their version - pub fn artifacts_versions(&self) -> impl Iterator { - self.artifacts.values().flatten() - } - - /// Returns the artifact file for the contract and version pair - pub fn find_artifact(&self, contract: &str, version: &Version) -> Option<&CachedArtifact> { - self.artifacts.get(contract).and_then(|files| files.get(version)) - } - - /// Iterator that yields all artifact files and their version - pub fn artifacts_for_version<'a>( - &'a self, - version: &'a Version, - ) -> impl Iterator + 'a { - self.artifacts_versions().filter_map(move |(ver, file)| (ver == version).then_some(file)) - } - - /// Iterator that yields all artifact files - pub fn artifacts(&self) -> impl Iterator { - self.artifacts.values().flat_map(BTreeMap::values) - } - - /// Mutable iterator over all artifact files - pub fn artifacts_mut(&mut self) -> impl Iterator { - self.artifacts.values_mut().flat_map(BTreeMap::values_mut) - } - - /// Checks if all artifact files exist - pub fn all_artifacts_exist(&self) -> bool { - self.artifacts().all(|a| a.path.exists()) - } - - /// Sets the artifact's paths to `base` adjoined to the artifact's `path`. - pub fn join_artifacts_files(&mut self, base: &Path) { - self.artifacts_mut().for_each(|a| a.path = base.join(&a.path)) - } - - /// Removes `base` from the artifact's path - pub fn strip_artifact_files_prefixes(&mut self, base: &Path) { - self.artifacts_mut().for_each(|a| { - if let Ok(rem) = a.path.strip_prefix(base) { - a.path = rem.to_path_buf(); - } - }) - } -} - -/// Collection of source file paths mapped to versions. -#[derive(Clone, Debug, Default)] -pub struct GroupedSources { - pub inner: HashMap>, -} - -impl GroupedSources { - /// Inserts provided source and version into the collection. - pub fn insert(&mut self, file: PathBuf, version: Version) { - match self.inner.entry(file) { - hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().insert(version); - } - hash_map::Entry::Vacant(entry) => { - entry.insert(HashSet::from([version])); - } - } - } - - /// Returns true if the file was included with the given version. - pub fn contains(&self, file: &Path, version: &Version) -> bool { - self.inner.get(file).map_or(false, |versions| versions.contains(version)) - } -} - -/// A helper abstraction over the [`CompilerCache`] used to determine what files need to compiled -/// and which `Artifacts` can be reused. -#[derive(Debug)] -pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput, C: Compiler> { - /// The preexisting cache file. - pub cache: CompilerCache, - - /// All already existing artifacts. - pub cached_artifacts: Artifacts, - - /// All already existing build infos. - pub cached_builds: Builds, - - /// Relationship between all the files. - pub edges: GraphEdges, - - /// The project. - pub project: &'a Project, - - /// Files that were invalidated and removed from cache. - /// Those are not grouped by version and purged completely. - pub dirty_sources: HashSet, - - /// Artifact+version pairs which are in scope for each solc version. - /// - /// Only those files will be included into cached artifacts list for each version. - pub sources_in_scope: GroupedSources, - - /// The file hashes. - pub content_hashes: HashMap, -} - -impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { - /// Creates a new cache entry for the file - fn create_cache_entry(&mut self, file: PathBuf, source: &Source) { - let imports = self - .edges - .imports(&file) - .into_iter() - .map(|import| strip_prefix(import, self.project.root()).into()) - .collect(); - - let entry = CacheEntry { - last_modification_date: CacheEntry::::read_last_modification_date(&file) - .unwrap_or_default(), - content_hash: source.content_hash(), - source_name: strip_prefix(&file, self.project.root()).into(), - compiler_settings: self.project.settings.clone(), - imports, - version_requirement: self.edges.version_requirement(&file).map(|v| v.to_string()), - // artifacts remain empty until we received the compiler output - artifacts: Default::default(), - seen_by_compiler: false, - }; - - self.cache.files.insert(file, entry.clone()); - } - - /// Returns the set of [Source]s that need to be compiled to produce artifacts for requested - /// input. - /// - /// Source file may have one of the two [SourceCompilationKind]s: - /// 1. [SourceCompilationKind::Complete] - the file has been modified or compiled with different - /// settings and its cache is invalidated. For such sources we request full data needed for - /// artifact construction. - /// 2. [SourceCompilationKind::Optimized] - the file is not dirty, but is imported by a dirty - /// file and thus will be processed by solc. For such files we don't need full data, so we - /// are marking them as clean to optimize output selection later. - fn filter(&mut self, sources: &mut Sources, version: &Version) { - // sources that should be passed to compiler. - let mut compile_complete = HashSet::new(); - let mut compile_optimized = HashSet::new(); - - for (file, source) in sources.iter() { - self.sources_in_scope.insert(file.clone(), version.clone()); - - // If we are missing artifact for file, compile it. - if self.is_missing_artifacts(file, version) { - compile_complete.insert(file.clone()); - } - - // Ensure that we have a cache entry for all sources. - if !self.cache.files.contains_key(file) { - self.create_cache_entry(file.clone(), source); - } - } - - // Prepare optimization by collecting sources which are imported by files requiring complete - // compilation. - for source in &compile_complete { - for import in self.edges.imports(source) { - if !compile_complete.contains(import) { - compile_optimized.insert(import.clone()); - } - } - } - - sources.retain(|file, source| { - source.kind = if compile_complete.contains(file) { - SourceCompilationKind::Complete - } else if compile_optimized.contains(file) { - SourceCompilationKind::Optimized - } else { - return false; - }; - true - }); - } - - /// Returns whether we are missing artifacts for the given file and version. - #[instrument(level = "trace", skip(self))] - fn is_missing_artifacts(&self, file: &Path, version: &Version) -> bool { - let Some(entry) = self.cache.entry(file) else { - trace!("missing cache entry"); - return true; - }; - - // only check artifact's existence if the file generated artifacts. - // e.g. a solidity file consisting only of import statements (like interfaces that - // re-export) do not create artifacts - if entry.seen_by_compiler && entry.artifacts.is_empty() { - trace!("no artifacts"); - return false; - } - - if !entry.contains_version(version) { - trace!("missing linked artifacts"); - return true; - } - - if entry.artifacts_for_version(version).any(|artifact| { - let missing_artifact = !self.cached_artifacts.has_artifact(&artifact.path); - if missing_artifact { - trace!("missing artifact \"{}\"", artifact.path.display()); - } - missing_artifact - }) { - return true; - } - - false - } - - // Walks over all cache entires, detects dirty files and removes them from cache. - fn find_and_remove_dirty(&mut self) { - fn populate_dirty_files( - file: &Path, - dirty_files: &mut HashSet, - edges: &GraphEdges, - ) { - for file in edges.importers(file) { - // If file is marked as dirty we either have already visited it or it was marked as - // dirty initially and will be visited at some point later. - if !dirty_files.contains(file) { - dirty_files.insert(file.to_path_buf()); - populate_dirty_files(file, dirty_files, edges); - } - } - } - - // Iterate over existing cache entries. - let files = self.cache.files.keys().cloned().collect::>(); - - let mut sources = Sources::new(); - - // Read all sources, marking entries as dirty on I/O errors. - for file in &files { - let Ok(source) = Source::read(file) else { - self.dirty_sources.insert(file.clone()); - continue; - }; - sources.insert(file.clone(), source); - } - - // Build a temporary graph for walking imports. We need this because `self.edges` - // only contains graph data for in-scope sources but we are operating on cache entries. - if let Ok(graph) = Graph::::resolve_sources(&self.project.paths, sources) { - let (sources, edges) = graph.into_sources(); - - // Calculate content hashes for later comparison. - self.fill_hashes(&sources); - - // Pre-add all sources that are guaranteed to be dirty - for file in sources.keys() { - if self.is_dirty_impl(file) { - self.dirty_sources.insert(file.clone()); - } - } - - // Perform DFS to find direct/indirect importers of dirty files. - for file in self.dirty_sources.clone().iter() { - populate_dirty_files(file, &mut self.dirty_sources, &edges); - } - } else { - // Purge all sources on graph resolution error. - self.dirty_sources.extend(files); - } - - // Remove all dirty files from cache. - for file in &self.dirty_sources { - debug!("removing dirty file from cache: {}", file.display()); - self.cache.remove(file); - } - } - - fn is_dirty_impl(&self, file: &Path) -> bool { - let Some(hash) = self.content_hashes.get(file) else { - trace!("missing content hash"); - return true; - }; - - let Some(entry) = self.cache.entry(file) else { - trace!("missing cache entry"); - return true; - }; - - if entry.content_hash != *hash { - trace!("content hash changed"); - return true; - } - - if !self.project.settings.can_use_cached(&entry.compiler_settings) { - trace!("solc config not compatible"); - return true; - } - - // If any requested extra files are missing for any artifact, mark source as dirty to - // generate them - for artifacts in self.cached_artifacts.values() { - for artifacts in artifacts.values() { - for artifact_file in artifacts { - if self.project.artifacts_handler().is_dirty(artifact_file).unwrap_or(true) { - return true; - } - } - } - } - - // all things match, can be reused - false - } - - /// Adds the file's hashes to the set if not set yet - fn fill_hashes(&mut self, sources: &Sources) { - for (file, source) in sources { - if let hash_map::Entry::Vacant(entry) = self.content_hashes.entry(file.clone()) { - entry.insert(source.content_hash()); - } - } - } -} - -/// Abstraction over configured caching which can be either non-existent or an already loaded cache -#[allow(clippy::large_enum_variant)] -#[derive(Debug)] -pub(crate) enum ArtifactsCache<'a, T: ArtifactOutput, C: Compiler> { - /// Cache nothing on disk - Ephemeral(GraphEdges, &'a Project), - /// Handles the actual cached artifacts, detects artifacts that can be reused - Cached(ArtifactsCacheInner<'a, T, C>), -} - -impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { - /// Create a new cache instance with the given files - pub fn new(project: &'a Project, edges: GraphEdges) -> Result { - /// Returns the [CompilerCache] to use - /// - /// Returns a new empty cache if the cache does not exist or `invalidate_cache` is set. - fn get_cache( - project: &Project, - invalidate_cache: bool, - ) -> CompilerCache { - // the currently configured paths - let paths = project.paths.paths_relative(); - - if !invalidate_cache && project.cache_path().exists() { - if let Ok(cache) = CompilerCache::read_joined(&project.paths) { - if cache.paths == paths { - // unchanged project paths - return cache; - } - } - } - - // new empty cache - CompilerCache::new(Default::default(), paths) - } - - let cache = if project.cached { - // we only read the existing cache if we were able to resolve the entire graph - // if we failed to resolve an import we invalidate the cache so don't get any false - // positives - let invalidate_cache = !edges.unresolved_imports().is_empty(); - - // read the cache file if it already exists - let mut cache = get_cache(project, invalidate_cache); - - cache.remove_missing_files(); - - // read all artifacts - let mut cached_artifacts = if project.paths.artifacts.exists() { - trace!("reading artifacts from cache..."); - // if we failed to read the whole set of artifacts we use an empty set - let artifacts = cache.read_artifacts::().unwrap_or_default(); - trace!("read {} artifacts from cache", artifacts.artifact_files().count()); - artifacts - } else { - Default::default() - }; - - trace!("reading build infos from cache..."); - let cached_builds = cache.read_builds(&project.paths.build_infos).unwrap_or_default(); - - // Remove artifacts for which we are missing a build info. - cached_artifacts.0.retain(|_, artifacts| { - artifacts.retain(|_, artifacts| { - artifacts.retain(|artifact| cached_builds.contains_key(&artifact.build_id)); - !artifacts.is_empty() - }); - !artifacts.is_empty() - }); - - let cache = ArtifactsCacheInner { - cache, - cached_artifacts, - cached_builds, - edges, - project, - dirty_sources: Default::default(), - content_hashes: Default::default(), - sources_in_scope: Default::default(), - }; - - ArtifactsCache::Cached(cache) - } else { - // nothing to cache - ArtifactsCache::Ephemeral(edges, project) - }; - - Ok(cache) - } - - /// Returns the graph data for this project - pub fn graph(&self) -> &GraphEdges { - match self { - ArtifactsCache::Ephemeral(graph, _) => graph, - ArtifactsCache::Cached(inner) => &inner.edges, - } - } - - #[cfg(test)] - #[allow(unused)] - #[doc(hidden)] - // only useful for debugging for debugging purposes - pub fn as_cached(&self) -> Option<&ArtifactsCacheInner<'a, T, C>> { - match self { - ArtifactsCache::Ephemeral(..) => None, - ArtifactsCache::Cached(cached) => Some(cached), - } - } - - pub fn output_ctx(&self) -> OutputContext<'_> { - match self { - ArtifactsCache::Ephemeral(..) => Default::default(), - ArtifactsCache::Cached(inner) => OutputContext::new(&inner.cache), - } - } - - pub fn project(&self) -> &'a Project { - match self { - ArtifactsCache::Ephemeral(_, project) => project, - ArtifactsCache::Cached(cache) => cache.project, - } - } - - /// Adds the file's hashes to the set if not set yet - pub fn remove_dirty_sources(&mut self) { - match self { - ArtifactsCache::Ephemeral(..) => {} - ArtifactsCache::Cached(cache) => cache.find_and_remove_dirty(), - } - } - - /// Filters out those sources that don't need to be compiled - pub fn filter(&mut self, sources: &mut Sources, version: &Version) { - match self { - ArtifactsCache::Ephemeral(..) => {} - ArtifactsCache::Cached(cache) => cache.filter(sources, version), - } - } - - /// Consumes the `Cache`, rebuilds the `SolFileCache` by merging all artifacts that were - /// filtered out in the previous step (`Cache::filtered`) and the artifacts that were just - /// compiled and written to disk `written_artifacts`. - /// - /// Returns all the _cached_ artifacts. - pub fn consume( - self, - written_artifacts: &Artifacts, - written_build_infos: &Vec>, - write_to_disk: bool, - ) -> Result<(Artifacts, Builds)> - where - T: ArtifactOutput, - { - let ArtifactsCache::Cached(cache) = self else { - trace!("no cache configured, ephemeral"); - return Ok(Default::default()); - }; - - let ArtifactsCacheInner { - mut cache, - mut cached_artifacts, - cached_builds, - dirty_sources, - sources_in_scope, - project, - .. - } = cache; - - // Remove cached artifacts which are out of scope, dirty or appear in `written_artifacts`. - cached_artifacts.0.retain(|file, artifacts| { - let file = Path::new(file); - artifacts.retain(|name, artifacts| { - artifacts.retain(|artifact| { - let version = &artifact.version; - - if !sources_in_scope.contains(file, version) { - return false; - } - if dirty_sources.contains(file) { - return false; - } - if written_artifacts.find_artifact(file, name, version).is_some() { - return false; - } - true - }); - !artifacts.is_empty() - }); - !artifacts.is_empty() - }); - - // Update cache entries with newly written artifacts. We update data for any artifacts as - // `written_artifacts` always contain the most recent data. - for (file, artifacts) in written_artifacts.as_ref() { - let file_path = Path::new(file); - // Only update data for existing entries, we should have entries for all in-scope files - // by now. - if let Some(entry) = cache.files.get_mut(file_path) { - entry.merge_artifacts(artifacts); - } - } - - for build_info in written_build_infos { - cache.builds.insert(build_info.id.clone()); - } - - // write to disk - if write_to_disk { - cache.remove_outdated_builds(); - // make all `CacheEntry` paths relative to the project root and all artifact - // paths relative to the artifact's directory - cache - .strip_entries_prefix(project.root()) - .strip_artifact_files_prefixes(project.artifacts_path()); - cache.write(project.cache_path())?; - } - - Ok((cached_artifacts, cached_builds)) - } - - /// Marks the cached entry as seen by the compiler, if it's cached. - pub fn compiler_seen(&mut self, file: &Path) { - if let ArtifactsCache::Cached(cache) = self { - if let Some(entry) = cache.cache.entry_mut(file) { - entry.seen_by_compiler = true; - } - } - } -} diff --git a/crates/compilers/src/compile/many.rs b/crates/compilers/src/compile/many.rs deleted file mode 100644 index a92b3cda..00000000 --- a/crates/compilers/src/compile/many.rs +++ /dev/null @@ -1,43 +0,0 @@ -use foundry_compilers_artifacts::{CompilerOutput, SolcInput}; -use foundry_compilers_core::error::Result; - -use crate::compilers::solc::Solc; - -/// The result of a `solc` process bundled with its `Solc` and `CompilerInput` -type CompileElement = (Result, Solc, SolcInput); - -/// The bundled output of multiple `solc` processes. -#[derive(Debug)] -pub struct CompiledMany { - outputs: Vec, -} - -impl CompiledMany { - pub fn new(outputs: Vec) -> Self { - Self { outputs } - } - - /// Returns an iterator over all output elements - pub fn outputs(&self) -> impl Iterator { - self.outputs.iter() - } - - /// Returns an iterator over all output elements - pub fn into_outputs(self) -> impl Iterator { - self.outputs.into_iter() - } - - /// Returns all `CompilerOutput` or the first error that occurred - pub fn flattened(self) -> Result> { - self.into_iter().collect() - } -} - -impl IntoIterator for CompiledMany { - type Item = Result; - type IntoIter = std::vec::IntoIter>; - - fn into_iter(self) -> Self::IntoIter { - self.outputs.into_iter().map(|(res, _, _)| res).collect::>().into_iter() - } -} diff --git a/crates/compilers/src/compile/mod.rs b/crates/compilers/src/compile/mod.rs deleted file mode 100644 index a577eb8e..00000000 --- a/crates/compilers/src/compile/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub mod many; - -pub mod output; -pub use output::{contracts, info, sources}; - -pub mod project; diff --git a/crates/compilers/src/compile/output/contracts.rs b/crates/compilers/src/compile/output/contracts.rs deleted file mode 100644 index 99a0e0bd..00000000 --- a/crates/compilers/src/compile/output/contracts.rs +++ /dev/null @@ -1,316 +0,0 @@ -use crate::ArtifactId; -use foundry_compilers_artifacts::{ - CompactContractBytecode, CompactContractRef, Contract, FileToContractsMap, -}; -use semver::Version; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use std::{ - collections::BTreeMap, - ops::{Deref, DerefMut}, - path::{Path, PathBuf}, -}; - -/// file -> [(contract name -> Contract + solc version)] -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[serde(transparent)] -pub struct VersionedContracts(pub FileToContractsMap>); - -impl VersionedContracts { - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - #[cfg(windows)] - { - use path_slash::PathExt; - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(path, files)| (PathBuf::from(path.to_slash_lossy().as_ref()), files)) - .collect() - } - } - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns an iterator over all files - pub fn files(&self) -> impl Iterator + '_ { - self.0.keys() - } - - /// Finds the _first_ contract with the given name - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?.into_output(); - /// let contract = output.find_first("Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn find_first(&self, contract_name: &str) -> Option> { - self.contracts().find_map(|(name, contract)| { - (name == contract_name).then(|| CompactContractRef::from(contract)) - }) - } - - /// Finds the contract with matching path and name - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?.into_output(); - /// let contract = output.contracts.find("src/Greeter.sol".as_ref(), "Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn find( - &self, - contract_path: &Path, - contract_name: &str, - ) -> Option> { - self.contracts_with_files().find_map(|(path, name, contract)| { - (path == contract_path && name == contract_name) - .then(|| CompactContractRef::from(contract)) - }) - } - - /// Removes the _first_ contract with the given name from the set - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let (_, mut contracts) = project.compile()?.into_output().split(); - /// let contract = contracts.remove_first("Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove_first(&mut self, contract_name: &str) -> Option { - self.0.values_mut().find_map(|all_contracts| { - let mut contract = None; - if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { - if !contracts.is_empty() { - contract = Some(contracts.remove(0).contract); - } - if !contracts.is_empty() { - all_contracts.insert(c, contracts); - } - } - contract - }) - } - - /// Removes the contract with matching path and name - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let (_, mut contracts) = project.compile()?.into_output().split(); - /// let contract = contracts.remove("src/Greeter.sol".as_ref(), "Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove(&mut self, path: &Path, contract_name: &str) -> Option { - let (key, mut all_contracts) = self.0.remove_entry(path)?; - let mut contract = None; - if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { - if !contracts.is_empty() { - contract = Some(contracts.remove(0).contract); - } - if !contracts.is_empty() { - all_contracts.insert(c, contracts); - } - } - - if !all_contracts.is_empty() { - self.0.insert(key, all_contracts); - } - contract - } - - /// Given the contract file's path and the contract's name, tries to return the contract's - /// bytecode, runtime bytecode, and ABI. - pub fn get(&self, path: &Path, contract: &str) -> Option> { - self.0 - .get(path) - .and_then(|contracts| { - contracts.get(contract).and_then(|c| c.first().map(|c| &c.contract)) - }) - .map(CompactContractRef::from) - } - - /// Returns an iterator over all contracts and their names. - pub fn contracts(&self) -> impl Iterator { - self.0 - .values() - .flat_map(|c| c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract)))) - } - - /// Returns an iterator over (`file`, `name`, `Contract`). - pub fn contracts_with_files(&self) -> impl Iterator { - self.0.iter().flat_map(|(file, contracts)| { - contracts - .iter() - .flat_map(move |(name, c)| c.iter().map(move |c| (file, name, &c.contract))) - }) - } - - /// Returns an iterator over (`file`, `name`, `Contract`, `Version`). - pub fn contracts_with_files_and_version( - &self, - ) -> impl Iterator { - self.0.iter().flat_map(|(file, contracts)| { - contracts.iter().flat_map(move |(name, c)| { - c.iter().map(move |c| (file, name, &c.contract, &c.version)) - }) - }) - } - - /// Returns an iterator over all contracts and their source names. - pub fn into_contracts(self) -> impl Iterator { - self.0.into_values().flat_map(|c| { - c.into_iter() - .flat_map(|(name, c)| c.into_iter().map(move |c| (name.clone(), c.contract))) - }) - } - - /// Returns an iterator over (`file`, `name`, `Contract`) - pub fn into_contracts_with_files(self) -> impl Iterator { - self.0.into_iter().flat_map(|(file, contracts)| { - contracts.into_iter().flat_map(move |(name, c)| { - let file = file.clone(); - c.into_iter().map(move |c| (file.clone(), name.clone(), c.contract)) - }) - }) - } - - /// Returns an iterator over (`file`, `name`, `Contract`, `Version`) - pub fn into_contracts_with_files_and_version( - self, - ) -> impl Iterator { - self.0.into_iter().flat_map(|(file, contracts)| { - contracts.into_iter().flat_map(move |(name, c)| { - let file = file.clone(); - c.into_iter().map(move |c| (file.clone(), name.clone(), c.contract, c.version)) - }) - }) - } - - /// Sets the contract's file paths to `root` adjoined to `self.file`. - pub fn join_all(&mut self, root: &Path) -> &mut Self { - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(contract_path, contracts)| (root.join(contract_path), contracts)) - .collect(); - self - } - - /// Removes `base` from all contract paths - pub fn strip_prefix_all(&mut self, base: &Path) -> &mut Self { - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(contract_path, contracts)| { - ( - contract_path.strip_prefix(base).unwrap_or(&contract_path).to_path_buf(), - contracts, - ) - }) - .collect(); - self - } -} - -impl AsRef>> for VersionedContracts { - fn as_ref(&self) -> &FileToContractsMap> { - &self.0 - } -} - -impl AsMut>> for VersionedContracts { - fn as_mut(&mut self) -> &mut FileToContractsMap> { - &mut self.0 - } -} - -impl Deref for VersionedContracts { - type Target = FileToContractsMap>; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl IntoIterator for VersionedContracts { - type Item = (PathBuf, BTreeMap>); - type IntoIter = - std::collections::btree_map::IntoIter>>; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -/// A contract and the compiler version used to compile it -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct VersionedContract { - pub contract: Contract, - pub version: Version, - pub build_id: String, -} - -/// A mapping of `ArtifactId` and their `CompactContractBytecode` -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct ArtifactContracts(pub BTreeMap); - -impl Serialize for ArtifactContracts { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.0.serialize(serializer) - } -} - -impl<'de, T: Deserialize<'de>> Deserialize<'de> for ArtifactContracts { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - Ok(Self(BTreeMap::<_, _>::deserialize(deserializer)?)) - } -} - -impl Deref for ArtifactContracts { - type Target = BTreeMap; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for ArtifactContracts { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl> FromIterator<(ArtifactId, C)> for ArtifactContracts { - fn from_iter>(iter: T) -> Self { - Self(iter.into_iter().map(|(k, v)| (k, v.into())).collect()) - } -} - -impl IntoIterator for ArtifactContracts { - type Item = (ArtifactId, T); - type IntoIter = std::collections::btree_map::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} diff --git a/crates/compilers/src/compile/output/info.rs b/crates/compilers/src/compile/output/info.rs deleted file mode 100644 index 0947b288..00000000 --- a/crates/compilers/src/compile/output/info.rs +++ /dev/null @@ -1,155 +0,0 @@ -//! Commonly used identifiers for contracts in the compiled output. - -use std::{borrow::Cow, fmt, str::FromStr}; - -#[derive(Clone, Debug, PartialEq, Eq, thiserror::Error)] -#[error("{0}")] -pub struct ParseContractInfoError(String); - -/// Represents the common contract argument pattern for `:` where `:` is -/// optional. -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct ContractInfo { - /// Location of the contract - pub path: Option, - /// Name of the contract - pub name: String, -} - -// === impl ContractInfo === - -impl ContractInfo { - /// Creates a new `ContractInfo` from the `info` str. - /// - /// This will attempt `ContractInfo::from_str`, if `info` matches the `:` format, - /// the `ContractInfo`'s `path` will be set. - /// - /// otherwise the `name` of the new object will be `info`. - /// - /// # Examples - /// - /// ``` - /// use foundry_compilers::info::ContractInfo; - /// - /// let info = ContractInfo::new("src/Greeter.sol:Greeter"); - /// assert_eq!( - /// info, - /// ContractInfo { path: Some("src/Greeter.sol".to_string()), name: "Greeter".to_string() } - /// ); - /// ``` - pub fn new(info: &str) -> Self { - info.parse().unwrap_or_else(|_| Self { path: None, name: info.to_string() }) - } -} - -impl fmt::Display for ContractInfo { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(ref path) = self.path { - write!(f, "{path}:{}", self.name) - } else { - write!(f, "{}", self.name) - } - } -} - -impl FromStr for ContractInfo { - type Err = ParseContractInfoError; - - fn from_str(s: &str) -> Result { - let err = || { - ParseContractInfoError( - "contract source info format must be `:` or ``" - .to_string(), - ) - }; - let mut iter = s.rsplit(':'); - let name = iter.next().ok_or_else(err)?.trim().to_string(); - let path = iter.next().map(str::to_string); - - if name.ends_with(".sol") || name.contains('/') { - return Err(err()); - } - - Ok(Self { path, name }) - } -} - -impl From for ContractInfo { - fn from(info: FullContractInfo) -> Self { - let FullContractInfo { path, name } = info; - Self { path: Some(path), name } - } -} - -/// The reference type for `ContractInfo` -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct ContractInfoRef<'a> { - pub path: Option>, - pub name: Cow<'a, str>, -} - -impl<'a> From for ContractInfoRef<'a> { - fn from(info: ContractInfo) -> Self { - ContractInfoRef { path: info.path.map(Into::into), name: info.name.into() } - } -} - -impl<'a> From<&'a ContractInfo> for ContractInfoRef<'a> { - fn from(info: &'a ContractInfo) -> Self { - ContractInfoRef { - path: info.path.as_deref().map(Into::into), - name: info.name.as_str().into(), - } - } -} -impl<'a> From for ContractInfoRef<'a> { - fn from(info: FullContractInfo) -> Self { - ContractInfoRef { path: Some(info.path.into()), name: info.name.into() } - } -} - -impl<'a> From<&'a FullContractInfo> for ContractInfoRef<'a> { - fn from(info: &'a FullContractInfo) -> Self { - ContractInfoRef { path: Some(info.path.as_str().into()), name: info.name.as_str().into() } - } -} - -/// Represents the common contract argument pattern `:` -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct FullContractInfo { - /// Location of the contract - pub path: String, - /// Name of the contract - pub name: String, -} - -impl fmt::Display for FullContractInfo { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}:{}", self.path, self.name) - } -} - -impl FromStr for FullContractInfo { - type Err = ParseContractInfoError; - - fn from_str(s: &str) -> Result { - let (path, name) = s.split_once(':').ok_or_else(|| { - ParseContractInfoError("Expected `:`, got `{s}`".to_string()) - })?; - Ok(Self { path: path.to_string(), name: name.trim().to_string() }) - } -} - -impl TryFrom for FullContractInfo { - type Error = ParseContractInfoError; - - fn try_from(value: ContractInfo) -> Result { - let ContractInfo { path, name } = value; - Ok(Self { - path: path.ok_or_else(|| { - ParseContractInfoError("path to contract must be present".to_string()) - })?, - name, - }) - } -} diff --git a/crates/compilers/src/compile/output/mod.rs b/crates/compilers/src/compile/output/mod.rs deleted file mode 100644 index 06b5224d..00000000 --- a/crates/compilers/src/compile/output/mod.rs +++ /dev/null @@ -1,932 +0,0 @@ -//! The output of a compiled project -use contracts::{VersionedContract, VersionedContracts}; -use foundry_compilers_artifacts::{ - CompactContractBytecode, CompactContractRef, Contract, Severity, -}; -use foundry_compilers_core::error::{SolcError, SolcIoError}; -use info::ContractInfoRef; -use semver::Version; -use serde::{Deserialize, Serialize}; -use sources::{VersionedSourceFile, VersionedSourceFiles}; -use std::{ - collections::BTreeMap, - fmt, - ops::{Deref, DerefMut}, - path::{Path, PathBuf}, -}; -use yansi::Paint; - -use crate::{ - buildinfo::{BuildContext, RawBuildInfo}, - compilers::{multi::MultiCompiler, CompilationError, Compiler, CompilerOutput}, - Artifact, ArtifactId, ArtifactOutput, Artifacts, ConfigurableArtifacts, -}; - -pub mod contracts; -pub mod info; -pub mod sources; - -/// A mapping from build_id to [BuildContext]. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(transparent)] -pub struct Builds(pub BTreeMap>); - -impl Default for Builds { - fn default() -> Self { - Self(Default::default()) - } -} - -impl Deref for Builds { - type Target = BTreeMap>; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for Builds { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl IntoIterator for Builds { - type Item = (String, BuildContext); - type IntoIter = std::collections::btree_map::IntoIter>; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -/// Contains a mixture of already compiled/cached artifacts and the input set of sources that still -/// need to be compiled. -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct ProjectCompileOutput< - C: Compiler = MultiCompiler, - T: ArtifactOutput = ConfigurableArtifacts, -> { - /// contains the aggregated `CompilerOutput` - pub(crate) compiler_output: AggregatedCompilerOutput, - /// all artifact files from `output` that were freshly compiled and written - pub(crate) compiled_artifacts: Artifacts, - /// All artifacts that were read from cache - pub(crate) cached_artifacts: Artifacts, - /// errors that should be omitted - pub(crate) ignored_error_codes: Vec, - /// paths that should be omitted - pub(crate) ignored_file_paths: Vec, - /// set minimum level of severity that is treated as an error - pub(crate) compiler_severity_filter: Severity, - /// all build infos that were just compiled - pub(crate) builds: Builds, -} - -impl ProjectCompileOutput { - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - self.compiler_output.slash_paths(); - self.compiled_artifacts.slash_paths(); - self.cached_artifacts.slash_paths(); - } - - /// Convenience function fo [`Self::slash_paths()`] - pub fn with_slashed_paths(mut self) -> Self { - self.slash_paths(); - self - } - - /// All artifacts together with their contract file name and name `:`. - /// - /// This returns a chained iterator of both cached and recompiled contract artifacts. - /// - /// Borrowed version of [`Self::into_artifacts`]. - pub fn artifact_ids(&self) -> impl Iterator + '_ { - let Self { cached_artifacts, compiled_artifacts, .. } = self; - cached_artifacts.artifacts::().chain(compiled_artifacts.artifacts::()) - } - - /// All artifacts together with their contract file name and name `:` - /// - /// This returns a chained iterator of both cached and recompiled contract artifacts - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::ConfigurableContractArtifact, ArtifactId, Project}; - /// use std::collections::btree_map::BTreeMap; - /// - /// let project = Project::builder().build(Default::default())?; - /// let contracts: BTreeMap = - /// project.compile()?.into_artifacts().collect(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn into_artifacts(self) -> impl Iterator { - let Self { cached_artifacts, compiled_artifacts, .. } = self; - cached_artifacts.into_artifacts::().chain(compiled_artifacts.into_artifacts::()) - } - - /// This returns a chained iterator of both cached and recompiled contract artifacts that yields - /// the contract name and the corresponding artifact - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::ConfigurableContractArtifact, Project}; - /// use std::collections::btree_map::BTreeMap; - /// - /// let project = Project::builder().build(Default::default())?; - /// let artifacts: BTreeMap = - /// project.compile()?.artifacts().collect(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn artifacts(&self) -> impl Iterator { - self.versioned_artifacts().map(|(name, (artifact, _))| (name, artifact)) - } - - /// This returns a chained iterator of both cached and recompiled contract artifacts that yields - /// the contract name and the corresponding artifact with its version - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::ConfigurableContractArtifact, Project}; - /// use semver::Version; - /// use std::collections::btree_map::BTreeMap; - /// - /// let project = Project::builder().build(Default::default())?; - /// let artifacts: BTreeMap = - /// project.compile()?.versioned_artifacts().collect(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn versioned_artifacts(&self) -> impl Iterator { - self.cached_artifacts - .artifact_files() - .chain(self.compiled_artifacts.artifact_files()) - .filter_map(|artifact| { - T::contract_name(&artifact.file) - .map(|name| (name, (&artifact.artifact, &artifact.version))) - }) - } - - /// All artifacts together with their contract file and name as tuple `(file, contract - /// name, artifact)` - /// - /// This returns a chained iterator of both cached and recompiled contract artifacts - /// - /// Borrowed version of [`Self::into_artifacts_with_files`]. - /// - /// **NOTE** the `file` will be returned as is, see also - /// [`Self::with_stripped_file_prefixes()`]. - pub fn artifacts_with_files( - &self, - ) -> impl Iterator + '_ { - let Self { cached_artifacts, compiled_artifacts, .. } = self; - cached_artifacts.artifacts_with_files().chain(compiled_artifacts.artifacts_with_files()) - } - - /// All artifacts together with their contract file and name as tuple `(file, contract - /// name, artifact)` - /// - /// This returns a chained iterator of both cached and recompiled contract artifacts - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::ConfigurableContractArtifact, Project}; - /// use std::{collections::btree_map::BTreeMap, path::PathBuf}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let contracts: Vec<(PathBuf, String, ConfigurableContractArtifact)> = - /// project.compile()?.into_artifacts_with_files().collect(); - /// # Ok::<_, Box>(()) - /// ``` - /// - /// **NOTE** the `file` will be returned as is, see also [`Self::with_stripped_file_prefixes()`] - pub fn into_artifacts_with_files(self) -> impl Iterator { - let Self { cached_artifacts, compiled_artifacts, .. } = self; - cached_artifacts - .into_artifacts_with_files() - .chain(compiled_artifacts.into_artifacts_with_files()) - } - - /// All artifacts together with their ID and the sources of the project. - /// - /// Note: this only returns the `SourceFiles` for freshly compiled contracts because, if not - /// included in the `Artifact` itself (see - /// [`foundry_compilers_artifacts::ConfigurableContractArtifact::source_file()`]), is only - /// available via the solc `CompilerOutput` - pub fn into_artifacts_with_sources( - self, - ) -> (BTreeMap, VersionedSourceFiles) { - let Self { cached_artifacts, compiled_artifacts, compiler_output, .. } = self; - - ( - cached_artifacts - .into_artifacts::() - .chain(compiled_artifacts.into_artifacts::()) - .collect(), - compiler_output.sources, - ) - } - - /// Strips the given prefix from all artifact file paths to make them relative to the given - /// `base` argument - /// - /// # Examples - /// - /// Make all artifact files relative to the project's root directory - /// ```no_run - /// use foundry_compilers::Project; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?.with_stripped_file_prefixes(project.root()); - /// # Ok::<_, Box>(()) - pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self { - self.cached_artifacts = self.cached_artifacts.into_stripped_file_prefixes(base); - self.compiled_artifacts = self.compiled_artifacts.into_stripped_file_prefixes(base); - self.compiler_output.strip_prefix_all(base); - self - } - - /// Returns a reference to the (merged) solc compiler output. - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::contract::Contract, Project}; - /// use std::collections::btree_map::BTreeMap; - /// - /// let project = Project::builder().build(Default::default())?; - /// let contracts: BTreeMap = - /// project.compile()?.into_output().contracts_into_iter().collect(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn output(&self) -> &AggregatedCompilerOutput { - &self.compiler_output - } - - /// Returns a mutable reference to the (merged) solc compiler output. - pub fn output_mut(&mut self) -> &mut AggregatedCompilerOutput { - &mut self.compiler_output - } - - /// Consumes the output and returns the (merged) solc compiler output. - pub fn into_output(self) -> AggregatedCompilerOutput { - self.compiler_output - } - - /// Returns whether this type has a compiler output. - pub fn has_compiled_contracts(&self) -> bool { - self.compiler_output.is_empty() - } - - /// Returns whether this type does not contain compiled contracts. - pub fn is_unchanged(&self) -> bool { - self.compiler_output.is_unchanged() - } - - /// Returns the set of `Artifacts` that were cached and got reused during - /// [`crate::Project::compile()`] - pub fn cached_artifacts(&self) -> &Artifacts { - &self.cached_artifacts - } - - /// Returns the set of `Artifacts` that were compiled with `solc` in - /// [`crate::Project::compile()`] - pub fn compiled_artifacts(&self) -> &Artifacts { - &self.compiled_artifacts - } - - /// Sets the compiled artifacts for this output. - pub fn set_compiled_artifacts(&mut self, new_compiled_artifacts: Artifacts) { - self.compiled_artifacts = new_compiled_artifacts; - } - - /// Returns a `BTreeMap` that maps the compiler version used during - /// [`crate::Project::compile()`] to a Vector of tuples containing the contract name and the - /// `Contract` - pub fn compiled_contracts_by_compiler_version( - &self, - ) -> BTreeMap> { - let mut contracts: BTreeMap<_, Vec<_>> = BTreeMap::new(); - let versioned_contracts = &self.compiler_output.contracts; - for (_, name, contract, version) in versioned_contracts.contracts_with_files_and_version() { - contracts - .entry(version.to_owned()) - .or_default() - .push((name.to_string(), contract.clone())); - } - contracts - } - - /// Removes the contract with matching path and name using the `:` pattern - /// where `path` is optional. - /// - /// If the `path` segment is `None`, then the first matching `Contract` is returned, see - /// [`Self::remove_first`]. - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, info::ContractInfo, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?; - /// let info = ContractInfo::new("src/Greeter.sol:Greeter"); - /// let contract = output.find_contract(&info).unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn find_contract<'a>(&self, info: impl Into>) -> Option<&T::Artifact> { - let ContractInfoRef { path, name } = info.into(); - if let Some(path) = path { - self.find(path[..].as_ref(), &name) - } else { - self.find_first(&name) - } - } - - /// Finds the artifact with matching path and name - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?; - /// let contract = output.find("src/Greeter.sol".as_ref(), "Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn find(&self, path: &Path, name: &str) -> Option<&T::Artifact> { - if let artifact @ Some(_) = self.compiled_artifacts.find(path, name) { - return artifact; - } - self.cached_artifacts.find(path, name) - } - - /// Finds the first contract with the given name - pub fn find_first(&self, name: &str) -> Option<&T::Artifact> { - if let artifact @ Some(_) = self.compiled_artifacts.find_first(name) { - return artifact; - } - self.cached_artifacts.find_first(name) - } - - /// Finds the artifact with matching path and name - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?; - /// let contract = output.find("src/Greeter.sol".as_ref(), "Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove(&mut self, path: &Path, name: &str) -> Option { - if let artifact @ Some(_) = self.compiled_artifacts.remove(path, name) { - return artifact; - } - self.cached_artifacts.remove(path, name) - } - - /// Removes the _first_ contract with the given name from the set - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let mut output = project.compile()?; - /// let contract = output.remove_first("Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove_first(&mut self, name: &str) -> Option { - if let artifact @ Some(_) = self.compiled_artifacts.remove_first(name) { - return artifact; - } - self.cached_artifacts.remove_first(name) - } - - /// Removes the contract with matching path and name using the `:` pattern - /// where `path` is optional. - /// - /// If the `path` segment is `None`, then the first matching `Contract` is returned, see - /// [Self::remove_first] - /// - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, info::ContractInfo, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let mut output = project.compile()?; - /// let info = ContractInfo::new("src/Greeter.sol:Greeter"); - /// let contract = output.remove_contract(&info).unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove_contract<'a>( - &mut self, - info: impl Into>, - ) -> Option { - let ContractInfoRef { path, name } = info.into(); - if let Some(path) = path { - self.remove(path[..].as_ref(), &name) - } else { - self.remove_first(&name) - } - } - - /// A helper functions that extracts the underlying [`CompactContractBytecode`] from the - /// [`foundry_compilers_artifacts::ConfigurableContractArtifact`] - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{ - /// artifacts::contract::CompactContractBytecode, contracts::ArtifactContracts, ArtifactId, - /// Project, - /// }; - /// use std::collections::btree_map::BTreeMap; - /// - /// let project = Project::builder().build(Default::default())?; - /// let contracts: ArtifactContracts = project.compile()?.into_contract_bytecodes().collect(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn into_contract_bytecodes( - self, - ) -> impl Iterator { - self.into_artifacts() - .map(|(artifact_id, artifact)| (artifact_id, artifact.into_contract_bytecode())) - } - - pub fn builds(&self) -> impl Iterator)> { - self.builds.iter() - } -} - -impl ProjectCompileOutput { - /// Returns whether any errors were emitted by the compiler. - pub fn has_compiler_errors(&self) -> bool { - self.compiler_output.has_error( - &self.ignored_error_codes, - &self.ignored_file_paths, - &self.compiler_severity_filter, - ) - } - - /// Returns whether any warnings were emitted by the compiler. - pub fn has_compiler_warnings(&self) -> bool { - self.compiler_output.has_warning(&self.ignored_error_codes, &self.ignored_file_paths) - } - - /// Panics if any errors were emitted by the compiler. - #[track_caller] - pub fn succeeded(self) -> Self { - self.assert_success(); - self - } - - /// Panics if any errors were emitted by the compiler. - #[track_caller] - pub fn assert_success(&self) { - assert!(!self.has_compiler_errors(), "\n{self}\n"); - } -} - -impl fmt::Display for ProjectCompileOutput { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.compiler_output.is_unchanged() { - f.write_str("Nothing to compile") - } else { - self.compiler_output - .diagnostics( - &self.ignored_error_codes, - &self.ignored_file_paths, - self.compiler_severity_filter, - ) - .fmt(f) - } - } -} - -/// The aggregated output of (multiple) compile jobs -/// -/// This is effectively a solc version aware `CompilerOutput` -#[derive(Clone, Debug, PartialEq, Eq, Serialize)] -pub struct AggregatedCompilerOutput { - /// all errors from all `CompilerOutput` - pub errors: Vec, - /// All source files combined with the solc version used to compile them - pub sources: VersionedSourceFiles, - /// All compiled contracts combined with the solc version used to compile them - pub contracts: VersionedContracts, - // All the `BuildInfo`s of solc invocations. - pub build_infos: Vec>, -} - -impl Default for AggregatedCompilerOutput { - fn default() -> Self { - Self { - errors: Vec::new(), - sources: Default::default(), - contracts: Default::default(), - build_infos: Default::default(), - } - } -} - -impl AggregatedCompilerOutput { - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - self.sources.slash_paths(); - self.contracts.slash_paths(); - } - - pub fn diagnostics<'a>( - &'a self, - ignored_error_codes: &'a [u64], - ignored_file_paths: &'a [PathBuf], - compiler_severity_filter: Severity, - ) -> OutputDiagnostics<'a, C> { - OutputDiagnostics { - compiler_output: self, - ignored_error_codes, - ignored_file_paths, - compiler_severity_filter, - } - } - - pub fn is_empty(&self) -> bool { - self.contracts.is_empty() - } - - pub fn is_unchanged(&self) -> bool { - self.contracts.is_empty() && self.errors.is_empty() - } - - /// adds a new `CompilerOutput` to the aggregated output - pub fn extend( - &mut self, - version: Version, - build_info: RawBuildInfo, - output: CompilerOutput, - ) { - let build_id = build_info.id.clone(); - self.build_infos.push(build_info); - - let CompilerOutput { errors, sources, contracts } = output; - self.errors.extend(errors); - - for (path, source_file) in sources { - let sources = self.sources.as_mut().entry(path).or_default(); - sources.push(VersionedSourceFile { - source_file, - version: version.clone(), - build_id: build_id.clone(), - }); - } - - for (file_name, new_contracts) in contracts { - let contracts = self.contracts.as_mut().entry(file_name).or_default(); - for (contract_name, contract) in new_contracts { - let versioned = contracts.entry(contract_name).or_default(); - versioned.push(VersionedContract { - contract, - version: version.clone(), - build_id: build_id.clone(), - }); - } - } - } - - /// Creates all `BuildInfo` files in the given `build_info_dir` - /// - /// There can be multiple `BuildInfo`, since we support multiple versions. - /// - /// The created files have the md5 hash `{_format,solcVersion,solcLongVersion,input}` as their - /// file name - pub fn write_build_infos(&self, build_info_dir: &Path) -> Result<(), SolcError> { - if self.build_infos.is_empty() { - return Ok(()); - } - std::fs::create_dir_all(build_info_dir) - .map_err(|err| SolcIoError::new(err, build_info_dir))?; - for build_info in &self.build_infos { - trace!("writing build info file {}", build_info.id); - let file_name = format!("{}.json", build_info.id); - let file = build_info_dir.join(file_name); - std::fs::write(&file, &serde_json::to_string(build_info)?) - .map_err(|err| SolcIoError::new(err, file))?; - } - Ok(()) - } - - /// Finds the _first_ contract with the given name - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?.into_output(); - /// let contract = output.find_first("Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn find_first(&self, contract: &str) -> Option> { - self.contracts.find_first(contract) - } - - /// Removes the _first_ contract with the given name from the set - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let mut output = project.compile()?.into_output(); - /// let contract = output.remove_first("Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove_first(&mut self, contract: &str) -> Option { - self.contracts.remove_first(contract) - } - - /// Removes the contract with matching path and name - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let mut output = project.compile()?.into_output(); - /// let contract = output.remove("src/Greeter.sol".as_ref(), "Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove(&mut self, path: &Path, contract: &str) -> Option { - self.contracts.remove(path, contract) - } - - /// Removes the contract with matching path and name using the `:` pattern - /// where `path` is optional. - /// - /// If the `path` segment is `None`, then the first matching `Contract` is returned, see - /// [Self::remove_first] - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, info::ContractInfo, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let mut output = project.compile()?.into_output(); - /// let info = ContractInfo::new("src/Greeter.sol:Greeter"); - /// let contract = output.remove_contract(&info).unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove_contract<'a>( - &mut self, - info: impl Into>, - ) -> Option { - let ContractInfoRef { path, name } = info.into(); - if let Some(path) = path { - self.remove(path[..].as_ref(), &name) - } else { - self.remove_first(&name) - } - } - - /// Iterate over all contracts and their names - pub fn contracts_iter(&self) -> impl Iterator { - self.contracts.contracts() - } - - /// Iterate over all contracts and their names - pub fn contracts_into_iter(self) -> impl Iterator { - self.contracts.into_contracts() - } - - /// Returns an iterator over (`file`, `name`, `Contract`) - pub fn contracts_with_files_iter( - &self, - ) -> impl Iterator { - self.contracts.contracts_with_files() - } - - /// Returns an iterator over (`file`, `name`, `Contract`) - pub fn contracts_with_files_into_iter( - self, - ) -> impl Iterator { - self.contracts.into_contracts_with_files() - } - - /// Returns an iterator over (`file`, `name`, `Contract`, `Version`) - pub fn contracts_with_files_and_version_iter( - &self, - ) -> impl Iterator { - self.contracts.contracts_with_files_and_version() - } - - /// Returns an iterator over (`file`, `name`, `Contract`, `Version`) - pub fn contracts_with_files_and_version_into_iter( - self, - ) -> impl Iterator { - self.contracts.into_contracts_with_files_and_version() - } - - /// Given the contract file's path and the contract's name, tries to return the contract's - /// bytecode, runtime bytecode, and ABI. - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?.into_output(); - /// let contract = output.get("src/Greeter.sol".as_ref(), "Greeter").unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn get(&self, path: &Path, contract: &str) -> Option> { - self.contracts.get(path, contract) - } - - /// Returns the output's source files and contracts separately, wrapped in helper types that - /// provide several helper methods - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::Project; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?.into_output(); - /// let (sources, contracts) = output.split(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn split(self) -> (VersionedSourceFiles, VersionedContracts) { - (self.sources, self.contracts) - } - - /// Joins all file path with `root` - pub fn join_all(&mut self, root: &Path) -> &mut Self { - self.contracts.join_all(root); - self.sources.join_all(root); - self - } - - /// Strips the given prefix from all file paths to make them relative to the given - /// `base` argument. - /// - /// Convenience method for [Self::strip_prefix_all()] that consumes the type. - /// - /// # Examples - /// - /// Make all sources and contracts relative to the project's root directory - /// ```no_run - /// use foundry_compilers::Project; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?.into_output().with_stripped_file_prefixes(project.root()); - /// # Ok::<_, Box>(()) - /// ``` - pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self { - self.contracts.strip_prefix_all(base); - self.sources.strip_prefix_all(base); - self - } - - /// Removes `base` from all contract paths - pub fn strip_prefix_all(&mut self, base: &Path) -> &mut Self { - self.contracts.strip_prefix_all(base); - self.sources.strip_prefix_all(base); - self - } -} - -impl AggregatedCompilerOutput { - /// Whether the output contains a compiler error - /// - /// This adheres to the given `compiler_severity_filter` and also considers [CompilationError] - /// with the given [Severity] as errors. For example [Severity::Warning] will consider - /// [CompilationError]s with [Severity::Warning] and [Severity::Error] as errors. - pub fn has_error( - &self, - ignored_error_codes: &[u64], - ignored_file_paths: &[PathBuf], - compiler_severity_filter: &Severity, - ) -> bool { - self.errors.iter().any(|err| { - if err.is_error() { - // [Severity::Error] is always treated as an error - return true; - } - // check if the filter is set to something higher than the error's severity - if compiler_severity_filter.ge(&err.severity()) { - if compiler_severity_filter.is_warning() { - // skip ignored error codes and file path from warnings - return self.has_warning(ignored_error_codes, ignored_file_paths); - } - return true; - } - false - }) - } - - /// Checks if there are any compiler warnings that are not ignored by the specified error codes - /// and file paths. - pub fn has_warning(&self, ignored_error_codes: &[u64], ignored_file_paths: &[PathBuf]) -> bool { - self.errors - .iter() - .any(|error| !self.should_ignore(ignored_error_codes, ignored_file_paths, error)) - } - - pub fn should_ignore( - &self, - ignored_error_codes: &[u64], - ignored_file_paths: &[PathBuf], - error: &C::CompilationError, - ) -> bool { - if !error.is_warning() { - return false; - } - - let mut ignore = false; - - if let Some(code) = error.error_code() { - ignore |= ignored_error_codes.contains(&code); - if let Some(loc) = error.source_location() { - let path = Path::new(&loc.file); - ignore |= - ignored_file_paths.iter().any(|ignored_path| path.starts_with(ignored_path)); - - // we ignore spdx and contract size warnings in test - // files. if we are looking at one of these warnings - // from a test file we skip - ignore |= self.is_test(path) && (code == 1878 || code == 5574); - } - } - - ignore - } - - /// Returns true if the contract is a expected to be a test - fn is_test(&self, contract_path: &Path) -> bool { - if contract_path.to_string_lossy().ends_with(".t.sol") { - return true; - } - - self.contracts.contracts_with_files().filter(|(path, _, _)| *path == contract_path).any( - |(_, _, contract)| { - contract.abi.as_ref().map_or(false, |abi| abi.functions.contains_key("IS_TEST")) - }, - ) - } -} - -/// Helper type to implement display for solc errors -#[derive(Clone, Debug)] -pub struct OutputDiagnostics<'a, C: Compiler> { - /// output of the compiled project - compiler_output: &'a AggregatedCompilerOutput, - /// the error codes to ignore - ignored_error_codes: &'a [u64], - /// the file paths to ignore - ignored_file_paths: &'a [PathBuf], - /// set minimum level of severity that is treated as an error - compiler_severity_filter: Severity, -} - -impl<'a, C: Compiler> OutputDiagnostics<'a, C> { - /// Returns true if there is at least one error of high severity - pub fn has_error(&self) -> bool { - self.compiler_output.has_error( - self.ignored_error_codes, - self.ignored_file_paths, - &self.compiler_severity_filter, - ) - } - - /// Returns true if there is at least one warning - pub fn has_warning(&self) -> bool { - self.compiler_output.has_warning(self.ignored_error_codes, self.ignored_file_paths) - } -} - -impl<'a, C: Compiler> fmt::Display for OutputDiagnostics<'a, C> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("Compiler run ")?; - if self.has_error() { - Paint::red("failed:") - } else if self.has_warning() { - Paint::yellow("successful with warnings:") - } else { - Paint::green("successful!") - } - .fmt(f)?; - - for err in &self.compiler_output.errors { - if !self.compiler_output.should_ignore( - self.ignored_error_codes, - self.ignored_file_paths, - err, - ) { - f.write_str("\n")?; - fmt::Display::fmt(&err, f)?; - } - } - - Ok(()) - } -} diff --git a/crates/compilers/src/compile/output/sources.rs b/crates/compilers/src/compile/output/sources.rs deleted file mode 100644 index e34fb464..00000000 --- a/crates/compilers/src/compile/output/sources.rs +++ /dev/null @@ -1,227 +0,0 @@ -use crate::SourceFile; -use foundry_compilers_core::utils::strip_prefix_owned; -use semver::Version; -use serde::{Deserialize, Serialize}; -use std::{ - collections::BTreeMap, - path::{Path, PathBuf}, -}; - -/// (source_file path -> `SourceFile` + solc version) -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[serde(transparent)] -pub struct VersionedSourceFiles(pub BTreeMap>); - -impl VersionedSourceFiles { - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - #[cfg(windows)] - { - use path_slash::PathExt; - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(path, files)| (PathBuf::from(path.to_slash_lossy().as_ref()), files)) - .collect() - } - } - - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns an iterator over all files - pub fn files(&self) -> impl Iterator { - self.0.keys() - } - - /// Returns an iterator over the source files' IDs and path. - pub fn into_ids(self) -> impl Iterator { - self.into_sources().map(|(path, source)| (source.id, path)) - } - - /// Returns an iterator over the source files' paths and IDs. - pub fn into_paths(self) -> impl Iterator { - self.into_ids().map(|(id, path)| (path, id)) - } - - /// Returns an iterator over the source files' IDs and path. - pub fn into_ids_with_version(self) -> impl Iterator { - self.into_sources_with_version().map(|(path, source, version)| (source.id, path, version)) - } - - /// Finds the _first_ source file with the given path. - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?.into_output(); - /// let source_file = output.sources.find_file("src/Greeter.sol".as_ref()).unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn find_file(&self, path: &Path) -> Option<&SourceFile> { - self.sources().find(|&(p, _)| p == path).map(|(_, sf)| sf) - } - - /// Same as [Self::find_file] but also checks for version - pub fn find_file_and_version(&self, path: &Path, version: &Version) -> Option<&SourceFile> { - self.0.get(path).and_then(|contracts| { - contracts.iter().find_map(|source| { - if source.version == *version { - Some(&source.source_file) - } else { - None - } - }) - }) - } - - /// Finds the _first_ source file with the given id - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?.into_output(); - /// let source_file = output.sources.find_id(0).unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn find_id(&self, id: u32) -> Option<&SourceFile> { - self.sources().filter(|(_, source)| source.id == id).map(|(_, source)| source).next() - } - - /// Same as [Self::find_id] but also checks for version - pub fn find_id_and_version(&self, id: u32, version: &Version) -> Option<&SourceFile> { - self.sources_with_version() - .filter(|(_, source, v)| source.id == id && *v == version) - .map(|(_, source, _)| source) - .next() - } - - /// Removes the _first_ source_file with the given path from the set - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let (mut sources, _) = project.compile()?.into_output().split(); - /// let source_file = sources.remove_by_path("src/Greeter.sol".as_ref()).unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove_by_path(&mut self, path: &Path) -> Option { - self.0.get_mut(path).and_then(|all_sources| { - if !all_sources.is_empty() { - Some(all_sources.remove(0).source_file) - } else { - None - } - }) - } - - /// Removes the _first_ source_file with the given id from the set - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{artifacts::*, Project}; - /// - /// let project = Project::builder().build(Default::default())?; - /// let (mut sources, _) = project.compile()?.into_output().split(); - /// let source_file = sources.remove_by_id(0).unwrap(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn remove_by_id(&mut self, id: u32) -> Option { - self.0 - .values_mut() - .filter_map(|sources| { - sources - .iter() - .position(|source| source.source_file.id == id) - .map(|pos| sources.remove(pos).source_file) - }) - .next() - } - - /// Returns an iterator over all contracts and their names. - pub fn sources(&self) -> impl Iterator { - self.0.iter().flat_map(|(path, sources)| { - sources.iter().map(move |source| (path, &source.source_file)) - }) - } - - /// Returns an iterator over (`file`, `SourceFile`, `Version`) - pub fn sources_with_version(&self) -> impl Iterator { - self.0.iter().flat_map(|(file, sources)| { - sources.iter().map(move |c| (file, &c.source_file, &c.version)) - }) - } - - /// Returns an iterator over all contracts and their source names. - pub fn into_sources(self) -> impl Iterator { - self.0.into_iter().flat_map(|(path, sources)| { - sources.into_iter().map(move |source| (path.clone(), source.source_file)) - }) - } - - /// Returns an iterator over all contracts and their source names. - pub fn into_sources_with_version(self) -> impl Iterator { - self.0.into_iter().flat_map(|(path, sources)| { - sources - .into_iter() - .map(move |source| (path.clone(), source.source_file, source.version)) - }) - } - - /// Sets the sources' file paths to `root` adjoined to `self.file`. - pub fn join_all(&mut self, root: &Path) -> &mut Self { - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(file_path, sources)| (root.join(file_path), sources)) - .collect(); - self - } - - /// Removes `base` from all source file paths - pub fn strip_prefix_all(&mut self, base: &Path) -> &mut Self { - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(file, sources)| (strip_prefix_owned(file, base), sources)) - .collect(); - self - } -} - -impl AsRef>> for VersionedSourceFiles { - fn as_ref(&self) -> &BTreeMap> { - &self.0 - } -} - -impl AsMut>> for VersionedSourceFiles { - fn as_mut(&mut self) -> &mut BTreeMap> { - &mut self.0 - } -} - -impl IntoIterator for VersionedSourceFiles { - type Item = (PathBuf, Vec); - type IntoIter = std::collections::btree_map::IntoIter>; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -/// A [SourceFile] and the compiler version used to compile it -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct VersionedSourceFile { - pub source_file: SourceFile, - pub version: Version, - pub build_id: String, -} diff --git a/crates/compilers/src/compile/project.rs b/crates/compilers/src/compile/project.rs deleted file mode 100644 index b76c47a5..00000000 --- a/crates/compilers/src/compile/project.rs +++ /dev/null @@ -1,786 +0,0 @@ -//! Manages compiling of a `Project` -//! -//! The compilation of a project is performed in several steps. -//! -//! First the project's dependency graph [`crate::Graph`] is constructed and all imported -//! dependencies are resolved. The graph holds all the relationships between the files and their -//! versions. From there the appropriate version set is derived -//! [`crate::Graph`] which need to be compiled with different -//! [`crate::compilers::solc::Solc`] versions. -//! -//! At this point we check if we need to compile a source file or whether we can reuse an _existing_ -//! `Artifact`. We don't to compile if: -//! - caching is enabled -//! - the file is **not** dirty -//! - the artifact for that file exists -//! -//! This concludes the preprocessing, and we now have either -//! - only `Source` files that need to be compiled -//! - only cached `Artifacts`, compilation can be skipped. This is considered an unchanged, -//! cached project -//! - Mix of both `Source` and `Artifacts`, only the `Source` files need to be compiled, the -//! `Artifacts` can be reused. -//! -//! The final step is invoking `Solc` via the standard JSON format. -//! -//! ### Notes on [Import Path Resolution](https://docs.soliditylang.org/en/develop/path-resolution.html#path-resolution) -//! -//! In order to be able to support reproducible builds on all platforms, the Solidity compiler has -//! to abstract away the details of the filesystem where source files are stored. Paths used in -//! imports must work the same way everywhere while the command-line interface must be able to work -//! with platform-specific paths to provide good user experience. This section aims to explain in -//! detail how Solidity reconciles these requirements. -//! -//! The compiler maintains an internal database (virtual filesystem or VFS for short) where each -//! source unit is assigned a unique source unit name which is an opaque and unstructured -//! identifier. When you use the import statement, you specify an import path that references a -//! source unit name. If the compiler does not find any source unit name matching the import path in -//! the VFS, it invokes the callback, which is responsible for obtaining the source code to be -//! placed under that name. -//! -//! This becomes relevant when dealing with resolved imports -//! -//! #### Relative Imports -//! -//! ```solidity -//! import "./math/math.sol"; -//! import "contracts/tokens/token.sol"; -//! ``` -//! In the above `./math/math.sol` and `contracts/tokens/token.sol` are import paths while the -//! source unit names they translate to are `contracts/math/math.sol` and -//! `contracts/tokens/token.sol` respectively. -//! -//! #### Direct Imports -//! -//! An import that does not start with `./` or `../` is a direct import. -//! -//! ```solidity -//! import "/project/lib/util.sol"; // source unit name: /project/lib/util.sol -//! import "lib/util.sol"; // source unit name: lib/util.sol -//! import "@openzeppelin/address.sol"; // source unit name: @openzeppelin/address.sol -//! import "https://example.com/token.sol"; // source unit name: -//! ``` -//! -//! After applying any import remappings the import path simply becomes the source unit name. -//! -//! ##### Import Remapping -//! -//! ```solidity -//! import "github.com/ethereum/dapp-bin/library/math.sol"; // source unit name: dapp-bin/library/math.sol -//! ``` -//! -//! If compiled with `solc github.com/ethereum/dapp-bin/=dapp-bin/` the compiler will look for the -//! file in the VFS under `dapp-bin/library/math.sol`. If the file is not available there, the -//! source unit name will be passed to the Host Filesystem Loader, which will then look in -//! `/project/dapp-bin/library/iterable_mapping.sol` -//! -//! -//! ### Caching and Change detection -//! -//! If caching is enabled in the [Project] a cache file will be created upon a successful solc -//! build. The [cache file](crate::cache::CompilerCache) stores metadata for all the files that were -//! provided to solc. -//! For every file the cache file contains a dedicated [cache entry](crate::cache::CacheEntry), -//! which represents the state of the file. A solidity file can contain several contracts, for every -//! contract a separate [artifact](crate::Artifact) is emitted. Therefor the entry also tracks all -//! artifacts emitted by a file. A solidity file can also be compiled with several solc versions. -//! -//! For example in `A(<=0.8.10) imports C(>0.4.0)` and -//! `B(0.8.11) imports C(>0.4.0)`, both `A` and `B` import `C` but there's no solc version that's -//! compatible with `A` and `B`, in which case two sets are compiled: [`A`, `C`] and [`B`, `C`]. -//! This is reflected in the cache entry which tracks the file's artifacts by version. -//! -//! The cache makes it possible to detect changes during recompilation, so that only the changed, -//! dirty, files need to be passed to solc. A file will be considered as dirty if: -//! - the file is new, not included in the existing cache -//! - the file was modified since the last compiler run, detected by comparing content hashes -//! - any of the imported files is dirty -//! - the file's artifacts don't exist, were deleted. -//! -//! Recompiling a project with cache enabled detects all files that meet these criteria and provides -//! solc with only these dirty files instead of the entire source set. - -use crate::{ - artifact_output::Artifacts, - buildinfo::RawBuildInfo, - cache::ArtifactsCache, - compilers::{Compiler, CompilerInput, CompilerOutput, Language}, - filter::SparseOutputFilter, - output::{AggregatedCompilerOutput, Builds}, - report, - resolver::GraphEdges, - ArtifactOutput, CompilerSettings, Graph, Project, ProjectCompileOutput, Sources, -}; -use foundry_compilers_core::error::Result; -use rayon::prelude::*; -use semver::Version; -use std::{collections::HashMap, path::PathBuf, time::Instant}; - -/// A set of different Solc installations with their version and the sources to be compiled -pub(crate) type VersionedSources = HashMap>; - -#[derive(Debug)] -pub struct ProjectCompiler<'a, T: ArtifactOutput, C: Compiler> { - /// Contains the relationship of the source files and their imports - edges: GraphEdges, - project: &'a Project, - /// how to compile all the sources - sources: CompilerSources, -} - -impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { - /// Create a new `ProjectCompiler` to bootstrap the compilation process of the project's - /// sources. - pub fn new(project: &'a Project) -> Result { - Self::with_sources(project, project.paths.read_input_files()?) - } - - /// Bootstraps the compilation process by resolving the dependency graph of all sources and the - /// appropriate `Solc` -> `Sources` set as well as the compile mode to use (parallel, - /// sequential) - /// - /// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows - /// multiple `jobs`, see [`crate::Project::set_solc_jobs()`]. - pub fn with_sources(project: &'a Project, mut sources: Sources) -> Result { - if let Some(filter) = &project.sparse_output { - sources.retain(|f, _| filter.is_match(f)) - } - let graph = Graph::resolve_sources(&project.paths, sources)?; - let (sources, edges) = graph.into_sources_by_version( - project.offline, - &project.locked_versions, - &project.compiler, - )?; - - // If there are multiple different versions, and we can use multiple jobs we can compile - // them in parallel. - let jobs_cnt = || sources.values().map(|v| v.len()).sum::(); - let sources = CompilerSources { - jobs: (project.solc_jobs > 1 && jobs_cnt() > 1).then_some(project.solc_jobs), - sources, - }; - - Ok(Self { edges, project, sources }) - } - - /// Compiles all the sources of the `Project` in the appropriate mode - /// - /// If caching is enabled, the sources are filtered and only _dirty_ sources are recompiled. - /// - /// The output of the compile process can be a mix of reused artifacts and freshly compiled - /// `Contract`s - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::Project; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile()?; - /// # Ok::<(), Box>(()) - /// ``` - pub fn compile(self) -> Result> { - let slash_paths = self.project.slash_paths; - - // drive the compiler statemachine to completion - let mut output = self.preprocess()?.compile()?.write_artifacts()?.write_cache()?; - - if slash_paths { - // ensures we always use `/` paths - output.slash_paths(); - } - - Ok(output) - } - - /// Does basic preprocessing - /// - sets proper source unit names - /// - check cache - fn preprocess(self) -> Result> { - trace!("preprocessing"); - let Self { edges, project, mut sources } = self; - - // convert paths on windows to ensure consistency with the `CompilerOutput` `solc` emits, - // which is unix style `/` - sources.slash_paths(); - - let mut cache = ArtifactsCache::new(project, edges)?; - // retain and compile only dirty sources and all their imports - sources.filter(&mut cache); - - Ok(PreprocessedState { sources, cache }) - } -} - -/// A series of states that comprise the [`ProjectCompiler::compile()`] state machine -/// -/// The main reason is to debug all states individually -#[derive(Debug)] -struct PreprocessedState<'a, T: ArtifactOutput, C: Compiler> { - /// Contains all the sources to compile. - sources: CompilerSources, - - /// Cache that holds `CacheEntry` objects if caching is enabled and the project is recompiled - cache: ArtifactsCache<'a, T, C>, -} - -impl<'a, T: ArtifactOutput, C: Compiler> PreprocessedState<'a, T, C> { - /// advance to the next state by compiling all sources - fn compile(self) -> Result> { - trace!("compiling"); - let PreprocessedState { sources, mut cache } = self; - - let mut output = sources.compile(&mut cache)?; - - // source paths get stripped before handing them over to solc, so solc never uses absolute - // paths, instead `--base-path ` is set. this way any metadata that's derived from - // data (paths) is relative to the project dir and should be independent of the current OS - // disk. However internally we still want to keep absolute paths, so we join the - // contracts again - output.join_all(cache.project().root()); - - Ok(CompiledState { output, cache }) - } -} - -/// Represents the state after `solc` was successfully invoked -#[derive(Debug)] -struct CompiledState<'a, T: ArtifactOutput, C: Compiler> { - output: AggregatedCompilerOutput, - cache: ArtifactsCache<'a, T, C>, -} - -impl<'a, T: ArtifactOutput, C: Compiler> CompiledState<'a, T, C> { - /// advance to the next state by handling all artifacts - /// - /// Writes all output contracts to disk if enabled in the `Project` and if the build was - /// successful - #[instrument(skip_all, name = "write-artifacts")] - fn write_artifacts(self) -> Result> { - let CompiledState { output, cache } = self; - - let project = cache.project(); - let ctx = cache.output_ctx(); - // write all artifacts via the handler but only if the build succeeded and project wasn't - // configured with `no_artifacts == true` - let compiled_artifacts = if project.no_artifacts { - project.artifacts_handler().output_to_artifacts( - &output.contracts, - &output.sources, - ctx, - &project.paths, - ) - } else if output.has_error( - &project.ignored_error_codes, - &project.ignored_file_paths, - &project.compiler_severity_filter, - ) { - trace!("skip writing cache file due to solc errors: {:?}", output.errors); - project.artifacts_handler().output_to_artifacts( - &output.contracts, - &output.sources, - ctx, - &project.paths, - ) - } else { - trace!( - "handling artifact output for {} contracts and {} sources", - output.contracts.len(), - output.sources.len() - ); - // this emits the artifacts via the project's artifacts handler - let artifacts = project.artifacts_handler().on_output( - &output.contracts, - &output.sources, - &project.paths, - ctx, - )?; - - // emits all the build infos, if they exist - output.write_build_infos(project.build_info_path())?; - - artifacts - }; - - Ok(ArtifactsState { output, cache, compiled_artifacts }) - } -} - -/// Represents the state after all artifacts were written to disk -#[derive(Debug)] -struct ArtifactsState<'a, T: ArtifactOutput, C: Compiler> { - output: AggregatedCompilerOutput, - cache: ArtifactsCache<'a, T, C>, - compiled_artifacts: Artifacts, -} - -impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { - /// Writes the cache file - /// - /// this concludes the [`Project::compile()`] statemachine - fn write_cache(self) -> Result> { - let ArtifactsState { output, cache, compiled_artifacts } = self; - let project = cache.project(); - let ignored_error_codes = project.ignored_error_codes.clone(); - let ignored_file_paths = project.ignored_file_paths.clone(); - let compiler_severity_filter = project.compiler_severity_filter; - let has_error = - output.has_error(&ignored_error_codes, &ignored_file_paths, &compiler_severity_filter); - let skip_write_to_disk = project.no_artifacts || has_error; - trace!(has_error, project.no_artifacts, skip_write_to_disk, cache_path=?project.cache_path(),"prepare writing cache file"); - - let (cached_artifacts, cached_builds) = - cache.consume(&compiled_artifacts, &output.build_infos, !skip_write_to_disk)?; - - project.artifacts_handler().handle_cached_artifacts(&cached_artifacts)?; - - let builds = Builds( - output - .build_infos - .iter() - .map(|build_info| (build_info.id.clone(), build_info.build_context.clone())) - .chain(cached_builds) - .map(|(id, context)| (id, context.with_joined_paths(project.paths.root.as_path()))) - .collect(), - ); - - Ok(ProjectCompileOutput { - compiler_output: output, - compiled_artifacts, - cached_artifacts, - ignored_error_codes, - ignored_file_paths, - compiler_severity_filter, - builds, - }) - } -} - -/// Determines how the `solc <-> sources` pairs are executed. -#[derive(Debug, Clone)] -struct CompilerSources { - /// The sources to compile. - sources: VersionedSources, - /// The number of jobs to use for parallel compilation. - jobs: Option, -} - -impl CompilerSources { - /// Converts all `\\` separators to `/`. - /// - /// This effectively ensures that `solc` can find imported files like `/src/Cheats.sol` in the - /// VFS (the `CompilerInput` as json) under `src/Cheats.sol`. - fn slash_paths(&mut self) { - #[cfg(windows)] - { - use path_slash::PathBufExt; - - self.sources.values_mut().for_each(|versioned_sources| { - versioned_sources.values_mut().for_each(|sources| { - *sources = std::mem::take(sources) - .into_iter() - .map(|(path, source)| { - (PathBuf::from(path.to_slash_lossy().as_ref()), source) - }) - .collect() - }) - }); - } - } - - /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] - fn filter>( - &mut self, - cache: &mut ArtifactsCache<'_, T, C>, - ) { - cache.remove_dirty_sources(); - for versioned_sources in self.sources.values_mut() { - for (version, sources) in versioned_sources { - trace!("Filtering {} sources for {}", sources.len(), version); - cache.filter(sources, version); - trace!( - "Detected {} sources to compile {:?}", - sources.dirty().count(), - sources.dirty_files().collect::>() - ); - } - } - } - - /// Compiles all the files with `Solc` - fn compile, T: ArtifactOutput>( - self, - cache: &mut ArtifactsCache<'_, T, C>, - ) -> Result> { - let project = cache.project(); - let graph = cache.graph(); - - let jobs_cnt = self.jobs; - - let sparse_output = SparseOutputFilter::new(project.sparse_output.as_deref()); - - // Include additional paths collected during graph resolution. - let mut include_paths = project.paths.include_paths.clone(); - include_paths.extend(graph.include_paths().clone()); - - let mut jobs = Vec::new(); - for (language, versioned_sources) in self.sources { - for (version, sources) in versioned_sources { - if sources.is_empty() { - // nothing to compile - trace!("skip {} for empty sources set", version); - continue; - } - - // depending on the composition of the filtered sources, the output selection can be - // optimized - let mut opt_settings = project.settings.clone(); - let actually_dirty = - sparse_output.sparse_sources(&sources, &mut opt_settings, graph); - - if actually_dirty.is_empty() { - // nothing to compile for this particular language, all dirty files are in the - // other language set - trace!("skip {} run due to empty source set", version); - continue; - } - - trace!("calling {} with {} sources {:?}", version, sources.len(), sources.keys()); - - let settings = opt_settings - .with_base_path(&project.paths.root) - .with_allow_paths(&project.paths.allowed_paths) - .with_include_paths(&include_paths) - .with_remappings(&project.paths.remappings); - - let mut input = C::Input::build(sources, settings, language, version.clone()); - - input.strip_prefix(project.paths.root.as_path()); - - jobs.push((input, actually_dirty)); - } - } - - let results = if let Some(num_jobs) = jobs_cnt { - compile_parallel(&project.compiler, jobs, num_jobs) - } else { - compile_sequential(&project.compiler, jobs) - }?; - - let mut aggregated = AggregatedCompilerOutput::default(); - - for (input, mut output, actually_dirty) in results { - let version = input.version(); - - // Mark all files as seen by the compiler - for file in &actually_dirty { - cache.compiler_seen(file); - } - - let build_info = RawBuildInfo::new(&input, &output, project.build_info)?; - - output.retain_files( - actually_dirty - .iter() - .map(|f| f.strip_prefix(project.paths.root.as_path()).unwrap_or(f)), - ); - output.join_all(project.paths.root.as_path()); - - aggregated.extend(version.clone(), build_info, output); - } - - Ok(aggregated) - } -} - -type CompilationResult = Result, Vec)>>; - -/// Compiles the input set sequentially and returns a [Vec] of outputs. -fn compile_sequential( - compiler: &C, - jobs: Vec<(C::Input, Vec)>, -) -> CompilationResult { - jobs.into_iter() - .map(|(input, actually_dirty)| { - let start = Instant::now(); - report::compiler_spawn( - &input.compiler_name(), - input.version(), - actually_dirty.as_slice(), - ); - let output = compiler.compile(&input)?; - report::compiler_success(&input.compiler_name(), input.version(), &start.elapsed()); - - Ok((input, output, actually_dirty)) - }) - .collect() -} - -/// compiles the input set using `num_jobs` threads -fn compile_parallel( - compiler: &C, - jobs: Vec<(C::Input, Vec)>, - num_jobs: usize, -) -> CompilationResult { - // need to get the currently installed reporter before installing the pool, otherwise each new - // thread in the pool will get initialized with the default value of the `thread_local!`'s - // localkey. This way we keep access to the reporter in the rayon pool - let scoped_report = report::get_default(|reporter| reporter.clone()); - - // start a rayon threadpool that will execute all `Solc::compile()` processes - let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap(); - - pool.install(move || { - jobs.into_par_iter() - .map(move |(input, actually_dirty)| { - // set the reporter on this thread - let _guard = report::set_scoped(&scoped_report); - - let start = Instant::now(); - report::compiler_spawn( - &input.compiler_name(), - input.version(), - actually_dirty.as_slice(), - ); - compiler.compile(&input).map(move |output| { - report::compiler_success( - &input.compiler_name(), - input.version(), - &start.elapsed(), - ); - (input, output, actually_dirty) - }) - }) - .collect() - }) -} - -#[cfg(test)] -#[cfg(all(feature = "project-util", feature = "svm-solc"))] -mod tests { - use std::path::Path; - - use foundry_compilers_artifacts::output_selection::ContractOutputSelection; - - use crate::{ - compilers::multi::MultiCompiler, project_util::TempProject, ConfigurableArtifacts, - MinimalCombinedArtifacts, ProjectPathsConfig, - }; - - use super::*; - - fn init_tracing() { - let _ = tracing_subscriber::fmt() - .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) - .try_init() - .ok(); - } - - #[test] - fn can_preprocess() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - let project = Project::builder() - .paths(ProjectPathsConfig::dapptools(&root).unwrap()) - .build(Default::default()) - .unwrap(); - - let compiler = ProjectCompiler::new(&project).unwrap(); - let prep = compiler.preprocess().unwrap(); - let cache = prep.cache.as_cached().unwrap(); - // ensure that we have exactly 3 empty entries which will be filled on compilation. - assert_eq!(cache.cache.files.len(), 3); - assert!(cache.cache.files.values().all(|v| v.artifacts.is_empty())); - - let compiled = prep.compile().unwrap(); - assert_eq!(compiled.output.contracts.files().count(), 3); - } - - #[test] - fn can_detect_cached_files() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let inner = project.project(); - let compiler = ProjectCompiler::new(inner).unwrap(); - let prep = compiler.preprocess().unwrap(); - assert!(prep.cache.as_cached().unwrap().dirty_sources.is_empty()) - } - - #[test] - fn can_recompile_with_optimized_output() { - let tmp = TempProject::::dapptools().unwrap(); - - tmp.add_source( - "A", - r#" - pragma solidity ^0.8.10; - import "./B.sol"; - contract A {} - "#, - ) - .unwrap(); - - tmp.add_source( - "B", - r#" - pragma solidity ^0.8.10; - contract B { - function hello() public {} - } - import "./C.sol"; - "#, - ) - .unwrap(); - - tmp.add_source( - "C", - r" - pragma solidity ^0.8.10; - contract C { - function hello() public {} - } - ", - ) - .unwrap(); - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - - tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present(); - - // modify A.sol - tmp.add_source( - "A", - r#" - pragma solidity ^0.8.10; - import "./B.sol"; - contract A { - function testExample() public {} - } - "#, - ) - .unwrap(); - - let compiler = ProjectCompiler::new(tmp.project()).unwrap(); - let state = compiler.preprocess().unwrap(); - let sources = &state.sources.sources; - - let cache = state.cache.as_cached().unwrap(); - - // 2 clean sources - assert_eq!(cache.cache.artifacts_len(), 2); - assert!(cache.cache.all_artifacts_exist()); - assert_eq!(cache.dirty_sources.len(), 1); - - let len = sources.values().map(|v| v.len()).sum::(); - // single solc - assert_eq!(len, 1); - - let filtered = &sources.values().next().unwrap().values().next().unwrap(); - - // 3 contracts total - assert_eq!(filtered.0.len(), 3); - // A is modified - assert_eq!(filtered.dirty().count(), 1); - assert!(filtered.dirty_files().next().unwrap().ends_with("A.sol")); - - let state = state.compile().unwrap(); - assert_eq!(state.output.sources.len(), 1); - for (f, source) in state.output.sources.sources() { - if f.ends_with("A.sol") { - assert!(source.ast.is_some()); - } else { - assert!(source.ast.is_none()); - } - } - - assert_eq!(state.output.contracts.len(), 1); - let (a, c) = state.output.contracts_iter().next().unwrap(); - assert_eq!(a, "A"); - assert!(c.abi.is_some() && c.evm.is_some()); - - let state = state.write_artifacts().unwrap(); - assert_eq!(state.compiled_artifacts.as_ref().len(), 1); - - let out = state.write_cache().unwrap(); - - let artifacts: Vec<_> = out.into_artifacts().collect(); - assert_eq!(artifacts.len(), 3); - for (_, artifact) in artifacts { - let c = artifact.into_contract_bytecode(); - assert!(c.abi.is_some() && c.bytecode.is_some() && c.deployed_bytecode.is_some()); - } - - tmp.artifacts_snapshot().unwrap().assert_artifacts_essentials_present(); - } - - #[test] - #[ignore] - fn can_compile_real_project() { - init_tracing(); - let paths = ProjectPathsConfig::builder() - .root("../../foundry-integration-tests/testdata/solmate") - .build() - .unwrap(); - let project = Project::builder().paths(paths).build(Default::default()).unwrap(); - let compiler = ProjectCompiler::new(&project).unwrap(); - let _out = compiler.compile().unwrap(); - } - - #[test] - fn extra_output_cached() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let mut project = TempProject::::new(paths.clone()).unwrap(); - - // Compile once without enabled extra output - project.compile().unwrap(); - - // Enable extra output of abi - project.project_mut().artifacts = - ConfigurableArtifacts::new([], [ContractOutputSelection::Abi]); - - // Ensure that abi appears after compilation and that we didn't recompile anything - let abi_path = project.project().paths.artifacts.join("Dapp.sol/Dapp.abi.json"); - assert!(!abi_path.exists()); - let output = project.compile().unwrap(); - assert!(output.compiler_output.is_empty()); - assert!(abi_path.exists()); - } - - #[test] - fn can_compile_leftovers_after_sparse() { - let mut tmp = TempProject::::dapptools().unwrap(); - - tmp.add_source( - "A", - r#" -pragma solidity ^0.8.10; -import "./B.sol"; -contract A {} -"#, - ) - .unwrap(); - - tmp.add_source( - "B", - r#" -pragma solidity ^0.8.10; -contract B {} -"#, - ) - .unwrap(); - - tmp.project_mut().sparse_output = Some(Box::new(|f: &Path| f.ends_with("A.sol"))); - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert_eq!(compiled.artifacts().count(), 1); - - tmp.project_mut().sparse_output = None; - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert_eq!(compiled.artifacts().count(), 2); - } -} diff --git a/crates/compilers/src/compilers/mod.rs b/crates/compilers/src/compilers/mod.rs deleted file mode 100644 index d839b0b1..00000000 --- a/crates/compilers/src/compilers/mod.rs +++ /dev/null @@ -1,302 +0,0 @@ -use crate::ProjectPathsConfig; -use core::fmt; -use foundry_compilers_artifacts::{ - error::SourceLocation, - output_selection::OutputSelection, - remappings::Remapping, - sources::{Source, Sources}, - Contract, FileToContractsMap, Severity, SourceFile, -}; -use foundry_compilers_core::error::Result; -use semver::{Version, VersionReq}; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; -use std::{ - borrow::Cow, - collections::{BTreeMap, BTreeSet, HashMap, HashSet}, - fmt::{Debug, Display}, - hash::Hash, - path::{Path, PathBuf}, - sync::{Mutex, OnceLock}, -}; - -pub mod multi; -pub mod solc; -pub mod vyper; -pub mod zksolc; -pub use vyper::*; - -/// A compiler version is either installed (available locally) or can be downloaded, from the remote -/// endpoint -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(untagged)] -pub enum CompilerVersion { - Installed(Version), - Remote(Version), -} - -impl CompilerVersion { - pub fn is_installed(&self) -> bool { - matches!(self, Self::Installed(_)) - } -} - -impl AsRef for CompilerVersion { - fn as_ref(&self) -> &Version { - match self { - Self::Installed(v) | Self::Remote(v) => v, - } - } -} - -impl From for Version { - fn from(s: CompilerVersion) -> Self { - match s { - CompilerVersion::Installed(v) | CompilerVersion::Remote(v) => v, - } - } -} - -impl fmt::Display for CompilerVersion { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.as_ref()) - } -} - -/// Compilation settings including evm_version, output_selection, etc. -pub trait CompilerSettings: - Default + Serialize + DeserializeOwned + Clone + Debug + Send + Sync + 'static -{ - /// Executes given fn with mutable reference to configured [OutputSelection]. - fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy); - - /// Returns true if artifacts compiled with given `other` config are compatible with this - /// config and if compilation can be skipped. - /// - /// Ensures that all settings fields are equal except for `output_selection` which is required - /// to be a subset of `cached.output_selection`. - fn can_use_cached(&self, other: &Self) -> bool; - - /// Method which might be invoked to add remappings to the input. - fn with_remappings(self, _remappings: &[Remapping]) -> Self { - self - } - - /// Builder method to set the base path for the compiler. Primarily used by solc implementation - /// to se --base-path. - fn with_base_path(self, _base_path: &Path) -> Self { - self - } - - /// Builder method to set the allowed paths for the compiler. Primarily used by solc - /// implementation to set --allow-paths. - fn with_allow_paths(self, _allowed_paths: &BTreeSet) -> Self { - self - } - - /// Builder method to set the include paths for the compiler. Primarily used by solc - /// implementation to set --include-paths. - fn with_include_paths(self, _include_paths: &BTreeSet) -> Self { - self - } -} - -/// Input of a compiler, including sources and settings used for their compilation. -pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug { - type Settings: CompilerSettings; - type Language: Language; - - /// Constructs one or multiple inputs from given sources set. Might return multiple inputs in - /// cases when sources need to be divided into sets per language (Yul + Solidity for example). - fn build( - sources: Sources, - settings: Self::Settings, - language: Self::Language, - version: Version, - ) -> Self; - - /// Returns language of the sources included into this input. - fn language(&self) -> Self::Language; - - /// Returns compiler version for which this input is intended. - fn version(&self) -> &Version; - - fn sources(&self) -> impl Iterator; - - /// Returns compiler name used by reporters to display output during compilation. - fn compiler_name(&self) -> Cow<'static, str>; - - /// Strips given prefix from all paths. - fn strip_prefix(&mut self, base: &Path); -} - -/// Parser of the source files which is used to identify imports and version requirements of the -/// given source. -/// -/// Used by path resolver to resolve imports or determine compiler versions needed to compiler given -/// sources. -pub trait ParsedSource: Debug + Sized + Send + Clone { - type Language: Language; - - fn parse(content: &str, file: &Path) -> Result; - fn version_req(&self) -> Option<&VersionReq>; - - /// Invoked during import resolution. Should resolve imports for the given source, and populate - /// include_paths for compilers which support this config. - fn resolve_imports( - &self, - paths: &ProjectPathsConfig, - include_paths: &mut BTreeSet, - ) -> Result>; - fn language(&self) -> Self::Language; - - /// Used to configure [OutputSelection] for sparse builds. In certain cases, we might want to - /// include some of the file dependencies into the compiler output even if we might not be - /// directly interested in them. - /// - /// Example of such case is when we are compiling Solidity file containing link references and - /// need them to be included in the output to deploy needed libraries. - /// - /// Receives iterator over imports of the current source. - /// - /// Returns iterator over paths to the files that should be compiled with full output selection. - fn compilation_dependencies<'a>( - &self, - _imported_nodes: impl Iterator, - ) -> impl Iterator - where - Self: 'a, - { - vec![].into_iter() - } -} - -/// Error returned by compiler. Might also represent a warning or informational message. -pub trait CompilationError: - Serialize + Send + Sync + Display + Debug + Clone + PartialEq + Eq + 'static -{ - fn is_warning(&self) -> bool; - fn is_error(&self) -> bool; - fn source_location(&self) -> Option; - fn severity(&self) -> Severity; - fn error_code(&self) -> Option; -} - -/// Output of the compiler, including contracts, sources and errors. Currently only generic over the -/// error but might be extended in the future. -#[derive(Debug, Serialize, Deserialize)] -pub struct CompilerOutput { - #[serde(default = "Vec::new", skip_serializing_if = "Vec::is_empty")] - pub errors: Vec, - #[serde(default)] - pub contracts: FileToContractsMap, - #[serde(default)] - pub sources: BTreeMap, -} - -impl CompilerOutput { - /// Retains only those files the given iterator yields - /// - /// In other words, removes all contracts for files not included in the iterator - pub fn retain_files(&mut self, files: I) - where - F: AsRef, - I: IntoIterator, - { - // Note: use `to_lowercase` here because solc not necessarily emits the exact file name, - // e.g. `src/utils/upgradeProxy.sol` is emitted as `src/utils/UpgradeProxy.sol` - let files: HashSet<_> = - files.into_iter().map(|s| s.as_ref().to_string_lossy().to_lowercase()).collect(); - self.contracts.retain(|f, _| files.contains(&f.to_string_lossy().to_lowercase())); - self.sources.retain(|f, _| files.contains(&f.to_string_lossy().to_lowercase())); - } - - pub fn merge(&mut self, other: Self) { - self.errors.extend(other.errors); - self.contracts.extend(other.contracts); - self.sources.extend(other.sources); - } - - pub fn join_all(&mut self, root: &Path) { - self.contracts = std::mem::take(&mut self.contracts) - .into_iter() - .map(|(path, contracts)| (root.join(path), contracts)) - .collect(); - self.sources = std::mem::take(&mut self.sources) - .into_iter() - .map(|(path, source)| (root.join(path), source)) - .collect(); - } - - pub fn map_err F>(self, op: O) -> CompilerOutput { - CompilerOutput { - errors: self.errors.into_iter().map(op).collect(), - contracts: self.contracts, - sources: self.sources, - } - } -} - -impl Default for CompilerOutput { - fn default() -> Self { - Self { errors: Vec::new(), contracts: BTreeMap::new(), sources: BTreeMap::new() } - } -} - -/// Keeps a set of languages recognized by the compiler. -pub trait Language: - Hash + Eq + Copy + Clone + Debug + Display + Send + Sync + Serialize + DeserializeOwned + 'static -{ - /// Extensions of source files recognized by the language set. - const FILE_EXTENSIONS: &'static [&'static str]; -} - -/// The main compiler abstraction trait. -/// -/// Currently mostly represents a wrapper around compiler binary aware of the version and able to -/// compile given input into [`CompilerOutput`] including artifacts and errors. -#[auto_impl::auto_impl(&, Box, Arc)] -pub trait Compiler: Send + Sync + Clone { - /// Input type for the compiler. Contains settings and sources to be compiled. - type Input: CompilerInput; - /// Error type returned by the compiler. - type CompilationError: CompilationError; - /// Source parser used for resolving imports and version requirements. - type ParsedSource: ParsedSource; - /// Compiler settings. - type Settings: CompilerSettings; - /// Enum of languages supported by the compiler. - type Language: Language; - - /// Main entrypoint for the compiler. Compiles given input into [CompilerOutput]. Takes - /// ownership over the input and returns back version with potential modifications made to it. - /// Returned input is always the one which was seen by the binary. - fn compile(&self, input: &Self::Input) -> Result>; - - /// Returns all versions available locally and remotely. Should return versions with stripped - /// metadata. - fn available_versions(&self, language: &Self::Language) -> Vec; -} - -pub(crate) fn cache_version( - path: PathBuf, - args: &[String], - f: impl FnOnce(&Path) -> Result, -) -> Result { - #[allow(clippy::complexity)] - static VERSION_CACHE: OnceLock, Version>>>> = - OnceLock::new(); - let mut lock = VERSION_CACHE - .get_or_init(|| Mutex::new(HashMap::new())) - .lock() - .unwrap_or_else(std::sync::PoisonError::into_inner); - - if let Some(version) = lock.get(&path).and_then(|versions| versions.get(args)) { - return Ok(version.clone()); - } - - let version = f(&path)?; - - lock.entry(path).or_default().insert(args.to_vec(), version.clone()); - - Ok(version) -} diff --git a/crates/compilers/src/compilers/multi.rs b/crates/compilers/src/compilers/multi.rs deleted file mode 100644 index 832cdf62..00000000 --- a/crates/compilers/src/compilers/multi.rs +++ /dev/null @@ -1,394 +0,0 @@ -use super::{ - solc::{SolcCompiler, SolcVersionedInput, SOLC_EXTENSIONS}, - vyper::{ - input::VyperVersionedInput, parser::VyperParsedSource, Vyper, VyperLanguage, - VYPER_EXTENSIONS, - }, - CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, CompilerVersion, - Language, ParsedSource, -}; -use crate::{ - artifacts::vyper::{VyperCompilationError, VyperSettings}, - resolver::parse::SolData, - solc::SolcSettings, -}; -use foundry_compilers_artifacts::{ - error::SourceLocation, - output_selection::OutputSelection, - remappings::Remapping, - sources::{Source, Sources}, - Error, Severity, SolcLanguage, -}; -use foundry_compilers_core::error::{Result, SolcError}; -use semver::Version; -use serde::{Deserialize, Serialize}; -use std::{ - borrow::Cow, - collections::BTreeSet, - fmt, - path::{Path, PathBuf}, -}; - -/// Compiler capable of compiling both Solidity and Vyper sources. -#[derive(Clone, Debug)] -pub struct MultiCompiler { - pub solc: Option, - pub vyper: Option, -} - -impl Default for MultiCompiler { - fn default() -> Self { - let vyper = Vyper::new("vyper").ok(); - - #[cfg(feature = "svm-solc")] - let solc = Some(SolcCompiler::AutoDetect); - #[cfg(not(feature = "svm-solc"))] - let solc = crate::solc::Solc::new("solc").map(SolcCompiler::Specific).ok(); - - Self { solc, vyper } - } -} - -impl MultiCompiler { - pub fn new(solc: Option, vyper_path: Option) -> Result { - let vyper = vyper_path.map(Vyper::new).transpose()?; - Ok(Self { solc, vyper }) - } -} - -/// Languages supported by the [MultiCompiler]. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[serde(untagged)] -pub enum MultiCompilerLanguage { - Solc(SolcLanguage), - Vyper(VyperLanguage), -} - -impl From for MultiCompilerLanguage { - fn from(language: SolcLanguage) -> Self { - Self::Solc(language) - } -} - -impl From for MultiCompilerLanguage { - fn from(language: VyperLanguage) -> Self { - Self::Vyper(language) - } -} - -impl Language for MultiCompilerLanguage { - const FILE_EXTENSIONS: &'static [&'static str] = &["sol", "vy", "vyi", "yul"]; -} - -impl fmt::Display for MultiCompilerLanguage { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Solc(lang) => lang.fmt(f), - Self::Vyper(lang) => lang.fmt(f), - } - } -} - -/// Source parser for the [MultiCompiler]. Recognizes Solc and Vyper sources. -#[derive(Clone, Debug)] -pub enum MultiCompilerParsedSource { - Solc(SolData), - Vyper(VyperParsedSource), -} - -impl MultiCompilerParsedSource { - fn solc(&self) -> Option<&SolData> { - match self { - Self::Solc(parsed) => Some(parsed), - _ => None, - } - } - - fn vyper(&self) -> Option<&VyperParsedSource> { - match self { - Self::Vyper(parsed) => Some(parsed), - _ => None, - } - } -} - -/// Compilation error which may occur when compiling Solidity or Vyper sources. -#[derive(Clone, Debug, PartialEq, Eq, Serialize)] -#[serde(untagged)] -pub enum MultiCompilerError { - Solc(Error), - Vyper(VyperCompilationError), -} - -impl fmt::Display for MultiCompilerError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Solc(error) => error.fmt(f), - Self::Vyper(error) => error.fmt(f), - } - } -} - -/// Settings for the [MultiCompiler]. Includes settings for both Solc and Vyper compilers. -#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -pub struct MultiCompilerSettings { - pub solc: SolcSettings, - pub vyper: VyperSettings, -} - -impl CompilerSettings for MultiCompilerSettings { - fn can_use_cached(&self, other: &Self) -> bool { - self.solc.can_use_cached(&other.solc) && self.vyper.can_use_cached(&other.vyper) - } - - fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy) { - self.solc.update_output_selection(f); - self.vyper.update_output_selection(f); - } - - fn with_allow_paths(self, allowed_paths: &BTreeSet) -> Self { - Self { - solc: self.solc.with_allow_paths(allowed_paths), - vyper: self.vyper.with_allow_paths(allowed_paths), - } - } - - fn with_base_path(self, base_path: &Path) -> Self { - Self { - solc: self.solc.with_base_path(base_path), - vyper: self.vyper.with_base_path(base_path), - } - } - - fn with_include_paths(self, include_paths: &BTreeSet) -> Self { - Self { - solc: self.solc.with_include_paths(include_paths), - vyper: self.vyper.with_include_paths(include_paths), - } - } - - fn with_remappings(self, remappings: &[Remapping]) -> Self { - Self { - solc: self.solc.with_remappings(remappings), - vyper: self.vyper.with_remappings(remappings), - } - } -} - -impl From for SolcSettings { - fn from(settings: MultiCompilerSettings) -> Self { - settings.solc - } -} - -impl From for VyperSettings { - fn from(settings: MultiCompilerSettings) -> Self { - settings.vyper - } -} - -/// Input for the [MultiCompiler]. Either Solc or Vyper input. -#[derive(Clone, Debug, Serialize)] -#[serde(untagged)] -pub enum MultiCompilerInput { - Solc(SolcVersionedInput), - Vyper(VyperVersionedInput), -} - -impl CompilerInput for MultiCompilerInput { - type Language = MultiCompilerLanguage; - type Settings = MultiCompilerSettings; - - fn build( - sources: Sources, - settings: Self::Settings, - language: Self::Language, - version: Version, - ) -> Self { - match language { - MultiCompilerLanguage::Solc(language) => { - Self::Solc(SolcVersionedInput::build(sources, settings.solc, language, version)) - } - MultiCompilerLanguage::Vyper(language) => { - Self::Vyper(VyperVersionedInput::build(sources, settings.vyper, language, version)) - } - } - } - - fn compiler_name(&self) -> Cow<'static, str> { - match self { - Self::Solc(input) => input.compiler_name(), - Self::Vyper(input) => input.compiler_name(), - } - } - - fn language(&self) -> Self::Language { - match self { - Self::Solc(input) => MultiCompilerLanguage::Solc(input.language()), - Self::Vyper(input) => MultiCompilerLanguage::Vyper(input.language()), - } - } - - fn strip_prefix(&mut self, base: &Path) { - match self { - Self::Solc(input) => input.strip_prefix(base), - Self::Vyper(input) => input.strip_prefix(base), - } - } - - fn version(&self) -> &Version { - match self { - Self::Solc(input) => input.version(), - Self::Vyper(input) => input.version(), - } - } - - fn sources(&self) -> impl Iterator { - let ret: Box> = match self { - Self::Solc(input) => Box::new(input.sources()), - Self::Vyper(input) => Box::new(input.sources()), - }; - - ret - } -} - -impl Compiler for MultiCompiler { - type Input = MultiCompilerInput; - type CompilationError = MultiCompilerError; - type ParsedSource = MultiCompilerParsedSource; - type Settings = MultiCompilerSettings; - type Language = MultiCompilerLanguage; - - fn compile(&self, input: &Self::Input) -> Result> { - match input { - MultiCompilerInput::Solc(input) => { - if let Some(solc) = &self.solc { - Compiler::compile(solc, input).map(|res| res.map_err(MultiCompilerError::Solc)) - } else { - Err(SolcError::msg("solc compiler is not available")) - } - } - MultiCompilerInput::Vyper(input) => { - if let Some(vyper) = &self.vyper { - Compiler::compile(vyper, input) - .map(|res| res.map_err(MultiCompilerError::Vyper)) - } else { - Err(SolcError::msg("vyper compiler is not available")) - } - } - } - } - - fn available_versions(&self, language: &Self::Language) -> Vec { - match language { - MultiCompilerLanguage::Solc(language) => { - self.solc.as_ref().map(|s| s.available_versions(language)).unwrap_or_default() - } - MultiCompilerLanguage::Vyper(language) => { - self.vyper.as_ref().map(|v| v.available_versions(language)).unwrap_or_default() - } - } - } -} - -impl ParsedSource for MultiCompilerParsedSource { - type Language = MultiCompilerLanguage; - - fn parse(content: &str, file: &std::path::Path) -> Result { - let Some(extension) = file.extension().and_then(|e| e.to_str()) else { - return Err(SolcError::msg("failed to resolve file extension")); - }; - - if SOLC_EXTENSIONS.contains(&extension) { - ::parse(content, file).map(Self::Solc) - } else if VYPER_EXTENSIONS.contains(&extension) { - VyperParsedSource::parse(content, file).map(Self::Vyper) - } else { - Err(SolcError::msg("unexpected file extension")) - } - } - - fn version_req(&self) -> Option<&semver::VersionReq> { - match self { - Self::Solc(parsed) => parsed.version_req(), - Self::Vyper(parsed) => parsed.version_req(), - } - } - - fn resolve_imports( - &self, - paths: &crate::ProjectPathsConfig, - include_paths: &mut BTreeSet, - ) -> Result> { - match self { - Self::Solc(parsed) => parsed.resolve_imports(paths, include_paths), - Self::Vyper(parsed) => parsed.resolve_imports(paths, include_paths), - } - } - - fn language(&self) -> Self::Language { - match self { - Self::Solc(parsed) => MultiCompilerLanguage::Solc(parsed.language()), - Self::Vyper(parsed) => MultiCompilerLanguage::Vyper(parsed.language()), - } - } - - fn compilation_dependencies<'a>( - &self, - imported_nodes: impl Iterator, - ) -> impl Iterator - where - Self: 'a, - { - match self { - Self::Solc(parsed) => parsed - .compilation_dependencies( - imported_nodes.filter_map(|(path, node)| node.solc().map(|node| (path, node))), - ) - .collect::>(), - Self::Vyper(parsed) => parsed - .compilation_dependencies( - imported_nodes.filter_map(|(path, node)| node.vyper().map(|node| (path, node))), - ) - .collect::>(), - } - .into_iter() - } -} - -impl CompilationError for MultiCompilerError { - fn is_warning(&self) -> bool { - match self { - Self::Solc(error) => error.is_warning(), - Self::Vyper(error) => error.is_warning(), - } - } - fn is_error(&self) -> bool { - match self { - Self::Solc(error) => error.is_error(), - Self::Vyper(error) => error.is_error(), - } - } - - fn source_location(&self) -> Option { - match self { - Self::Solc(error) => error.source_location(), - Self::Vyper(error) => error.source_location(), - } - } - - fn severity(&self) -> Severity { - match self { - Self::Solc(error) => error.severity(), - Self::Vyper(error) => error.severity(), - } - } - - fn error_code(&self) -> Option { - match self { - Self::Solc(error) => error.error_code(), - Self::Vyper(error) => error.error_code(), - } - } -} diff --git a/crates/compilers/src/compilers/solc/compiler.rs b/crates/compilers/src/compilers/solc/compiler.rs deleted file mode 100644 index aefcc629..00000000 --- a/crates/compilers/src/compilers/solc/compiler.rs +++ /dev/null @@ -1,783 +0,0 @@ -use crate::resolver::parse::SolData; -use foundry_compilers_artifacts::{sources::Source, CompilerOutput, SolcInput}; -use foundry_compilers_core::{ - error::{Result, SolcError}, - utils::{self, SUPPORTS_BASE_PATH, SUPPORTS_INCLUDE_PATH}, -}; -use itertools::Itertools; -use semver::{Version, VersionReq}; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; -use std::{ - collections::BTreeSet, - path::{Path, PathBuf}, - process::{Command, Output, Stdio}, - str::FromStr, -}; - -/// Extensions acceptable by solc compiler. -pub const SOLC_EXTENSIONS: &[&str] = &["sol", "yul"]; - -/// take the lock in tests, we use this to enforce that -/// a test does not run while a compiler version is being installed -/// -/// This ensures that only one thread installs a missing `solc` exe. -/// Instead of taking this lock in `Solc::blocking_install`, the lock should be taken before -/// installation is detected. -#[cfg(feature = "svm-solc")] -#[cfg(any(test, feature = "test-utils"))] -#[macro_export] -macro_rules! take_solc_installer_lock { - ($lock:ident) => { - let lock_path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join(".lock"); - let lock_file = std::fs::OpenOptions::new() - .read(true) - .write(true) - .create(true) - .truncate(false) - .open(lock_path) - .unwrap(); - let mut lock = fd_lock::RwLock::new(lock_file); - let $lock = lock.write().unwrap(); - }; -} - -/// A list of upstream Solc releases, used to check which version -/// we should download. -/// The boolean value marks whether there was an error accessing the release list -#[cfg(feature = "svm-solc")] -pub static RELEASES: once_cell::sync::Lazy<(svm::Releases, Vec, bool)> = - once_cell::sync::Lazy::new(|| { - match serde_json::from_str::(svm_builds::RELEASE_LIST_JSON) { - Ok(releases) => { - let sorted_versions = releases.clone().into_versions(); - (releases, sorted_versions, true) - } - Err(err) => { - error!("{:?}", err); - Default::default() - } - } - }); - -/// Abstraction over `solc` command line utility -/// -/// Supports sync and async functions. -/// -/// By default the solc path is configured as follows, with descending priority: -/// 1. `SOLC_PATH` environment variable -/// 2. [svm](https://github.com/roynalnaruto/svm-rs)'s `global_version` (set via `svm use -/// `), stored at `/.global_version` -/// 3. `solc` otherwise -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] -pub struct Solc { - /// Path to the `solc` executable - pub solc: PathBuf, - /// Compiler version. - pub version: Version, - /// Value for --base-path arg. - pub base_path: Option, - /// Value for --allow-paths arg. - pub allow_paths: BTreeSet, - /// Value for --include-paths arg. - pub include_paths: BTreeSet, - /// Additional arbitrary arguments. - pub extra_args: Vec, -} - -impl Solc { - /// A new instance which points to `solc`. Invokes `solc --version` to determine the version. - /// - /// Returns error if `solc` is not found in the system or if the version cannot be retrieved. - pub fn new(path: impl Into) -> Result { - let path = path.into(); - let version = Self::version(path.clone())?; - Ok(Self::new_with_version(path, version)) - } - - /// A new instance which points to `solc` with additional cli arguments. Invokes `solc - /// --version` to determine the version. - /// - /// Returns error if `solc` is not found in the system or if the version cannot be retrieved. - pub fn new_with_args( - path: impl Into, - extra_args: impl IntoIterator>, - ) -> Result { - let args = extra_args.into_iter().map(Into::into).collect::>(); - let path = path.into(); - let version = Self::version_with_args(path.clone(), &args)?; - - let mut solc = Self::new_with_version(path, version); - solc.extra_args = args; - - Ok(solc) - } - - /// A new instance which points to `solc` with the given version - pub fn new_with_version(path: impl Into, version: Version) -> Self { - Self { - solc: path.into(), - version, - base_path: None, - allow_paths: Default::default(), - include_paths: Default::default(), - extra_args: Default::default(), - } - } - - /// Parses the given source looking for the `pragma` definition and - /// returns the corresponding SemVer version requirement. - pub fn source_version_req(source: &Source) -> Result { - let version = - utils::find_version_pragma(&source.content).ok_or(SolcError::PragmaNotFound)?; - Ok(SolData::parse_version_req(version.as_str())?) - } - - /// Given a Solidity source, it detects the latest compiler version which can be used - /// to build it, and returns it. - /// - /// If the required compiler version is not installed, it also proceeds to install it. - #[cfg(feature = "svm-solc")] - pub fn detect_version(source: &Source) -> Result { - // detects the required solc version - let sol_version = Self::source_version_req(source)?; - Self::ensure_installed(&sol_version) - } - - /// Given a Solidity version requirement, it detects the latest compiler version which can be - /// used to build it, and returns it. - /// - /// If the required compiler version is not installed, it also proceeds to install it. - #[cfg(feature = "svm-solc")] - pub fn ensure_installed(sol_version: &VersionReq) -> Result { - #[cfg(test)] - take_solc_installer_lock!(_lock); - - // load the local / remote versions - let versions = Self::installed_versions(); - - let local_versions = Self::find_matching_installation(&versions, sol_version); - let remote_versions = Self::find_matching_installation(&RELEASES.1, sol_version); - - // if there's a better upstream version than the one we have, install it - Ok(match (local_versions, remote_versions) { - (Some(local), None) => local, - (Some(local), Some(remote)) => { - if remote > local { - Self::blocking_install(&remote)?; - remote - } else { - local - } - } - (None, Some(version)) => { - Self::blocking_install(&version)?; - version - } - // do nothing otherwise - _ => return Err(SolcError::VersionNotFound), - }) - } - - /// Assuming the `versions` array is sorted, it returns the first element which satisfies - /// the provided [`VersionReq`] - pub fn find_matching_installation( - versions: &[Version], - required_version: &VersionReq, - ) -> Option { - // iterate in reverse to find the last match - versions.iter().rev().find(|version| required_version.matches(version)).cloned() - } - - /// Returns the path for a [svm](https://github.com/roynalnaruto/svm-rs) installed version. - /// - /// # Examples - /// - /// ```no_run - /// use foundry_compilers::solc::Solc; - /// use semver::Version; - /// - /// let solc = Solc::find_svm_installed_version(&Version::new(0, 8, 9))?; - /// assert_eq!(solc, Some(Solc::new("~/.svm/0.8.9/solc-0.8.9")?)); - /// - /// Ok::<_, Box>(()) - /// ``` - pub fn find_svm_installed_version(version: &Version) -> Result> { - let version = format!("{}.{}.{}", version.major, version.minor, version.patch); - let solc = Self::svm_home() - .ok_or_else(|| SolcError::msg("svm home dir not found"))? - .join(&version) - .join(format!("solc-{version}")); - - if !solc.is_file() { - return Ok(None); - } - Self::new(&solc).map(Some) - } - - /// Returns the directory in which [svm](https://github.com/roynalnaruto/svm-rs) stores all versions - /// - /// This will be: - /// - `~/.svm` on unix, if it exists - /// - $XDG_DATA_HOME (~/.local/share/svm) if the svm folder does not exist. - pub fn svm_home() -> Option { - if let Some(home_dir) = home::home_dir() { - let home_dot_svm = home_dir.join(".svm"); - if home_dot_svm.exists() { - return Some(home_dot_svm); - } - } - dirs::data_dir().map(|dir| dir.join("svm")) - } - - /// Returns the `semver::Version` [svm](https://github.com/roynalnaruto/svm-rs)'s `.global_version` is currently set to. - /// `global_version` is configured with (`svm use `) - /// - /// This will read the version string (eg: "0.8.9") that the `~/.svm/.global_version` file - /// contains - pub fn svm_global_version() -> Option { - let home = Self::svm_home()?; - let version = std::fs::read_to_string(home.join(".global_version")).ok()?; - Version::parse(&version).ok() - } - - /// Returns the list of all solc instances installed at `SVM_HOME` - pub fn installed_versions() -> Vec { - Self::svm_home() - .map(|home| utils::installed_versions(&home).unwrap_or_default()) - .unwrap_or_default() - } - - /// Returns the list of all versions that are available to download - #[cfg(feature = "svm-solc")] - pub fn released_versions() -> Vec { - RELEASES.1.clone().into_iter().collect() - } - - /// Installs the provided version of Solc in the machine under the svm dir and returns the - /// [Solc] instance pointing to the installation. - /// - /// # Examples - /// - /// ```no_run - /// use foundry_compilers::{solc::Solc, utils::ISTANBUL_SOLC}; - /// - /// # async fn run() -> Result<(), Box> { - /// let solc = Solc::install(&ISTANBUL_SOLC).await?; - /// # Ok(()) - /// # } - /// ``` - #[cfg(feature = "svm-solc")] - pub async fn install(version: &Version) -> std::result::Result { - trace!("installing solc version \"{}\"", version); - crate::report::solc_installation_start(version); - match svm::install(version).await { - Ok(path) => { - crate::report::solc_installation_success(version); - Ok(Self::new_with_version(path, version.clone())) - } - Err(err) => { - crate::report::solc_installation_error(version, &err.to_string()); - Err(err) - } - } - } - - /// Blocking version of `Self::install` - #[cfg(feature = "svm-solc")] - pub fn blocking_install(version: &Version) -> std::result::Result { - use foundry_compilers_core::utils::RuntimeOrHandle; - - #[cfg(test)] - crate::take_solc_installer_lock!(_lock); - - let version = Version::new(version.major, version.minor, version.patch); - - trace!("blocking installing solc version \"{}\"", version); - crate::report::solc_installation_start(&version); - // The async version `svm::install` is used instead of `svm::blocking_intsall` - // because the underlying `reqwest::blocking::Client` does not behave well - // inside of a Tokio runtime. See: https://github.com/seanmonstar/reqwest/issues/1017 - match RuntimeOrHandle::new().block_on(svm::install(&version)) { - Ok(path) => { - crate::report::solc_installation_success(&version); - Ok(Self::new_with_version(path, version.clone())) - } - Err(err) => { - crate::report::solc_installation_error(&version, &err.to_string()); - Err(err) - } - } - } - - /// Verify that the checksum for this version of solc is correct. We check against the SHA256 - /// checksum from the build information published by [binaries.soliditylang.org](https://binaries.soliditylang.org/) - #[cfg(feature = "svm-solc")] - pub fn verify_checksum(&self) -> Result<()> { - let version = self.version_short(); - let mut version_path = svm::version_path(version.to_string().as_str()); - version_path.push(format!("solc-{}", version.to_string().as_str())); - trace!(target:"solc", "reading solc binary for checksum {:?}", version_path); - let content = - std::fs::read(&version_path).map_err(|err| SolcError::io(err, version_path.clone()))?; - - if !RELEASES.2 { - // we skip checksum verification because the underlying request to fetch release info - // failed so we have nothing to compare against - return Ok(()); - } - - #[cfg(windows)] - { - // Prior to 0.7.2, binaries are released as exe files which are hard to verify: - // - const V0_7_2: Version = Version::new(0, 7, 2); - if version < V0_7_2 { - return Ok(()); - } - } - - use sha2::Digest; - let mut hasher = sha2::Sha256::new(); - hasher.update(content); - let checksum_calc = &hasher.finalize()[..]; - - let checksum_found = &RELEASES - .0 - .get_checksum(&version) - .ok_or_else(|| SolcError::ChecksumNotFound { version: version.clone() })?; - - if checksum_calc == checksum_found { - Ok(()) - } else { - use alloy_primitives::hex; - let expected = hex::encode(checksum_found); - let detected = hex::encode(checksum_calc); - warn!(target: "solc", "checksum mismatch for {:?}, expected {}, but found {} for file {:?}", version, expected, detected, version_path); - Err(SolcError::ChecksumMismatch { version, expected, detected, file: version_path }) - } - } - - /// Convenience function for compiling all sources under the given path - pub fn compile_source(&self, path: &Path) -> Result { - let mut res: CompilerOutput = Default::default(); - for input in - SolcInput::resolve_and_build(Source::read_sol_yul_from(path)?, Default::default()) - { - let input = input.sanitized(&self.version); - let output = self.compile(&input)?; - res.merge(output) - } - - Ok(res) - } - - /// Same as [`Self::compile()`], but only returns those files which are included in the - /// `CompilerInput`. - /// - /// In other words, this removes those files from the `CompilerOutput` that are __not__ included - /// in the provided `CompilerInput`. - /// - /// # Examples - pub fn compile_exact(&self, input: &SolcInput) -> Result { - let mut out = self.compile(input)?; - out.retain_files(input.sources.keys().map(|p| p.as_path())); - Ok(out) - } - - /// Compiles with `--standard-json` and deserializes the output as [`CompilerOutput`]. - /// - /// # Examples - /// - /// ```no_run - /// use foundry_compilers::{ - /// artifacts::{SolcInput, Source}, - /// compilers::{Compiler, CompilerInput}, - /// solc::Solc, - /// }; - /// - /// let solc = Solc::new("solc")?; - /// let input = SolcInput::resolve_and_build( - /// Source::read_sol_yul_from("./contracts".as_ref()).unwrap(), - /// Default::default(), - /// ); - /// let output = solc.compile(&input)?; - /// # Ok::<_, Box>(()) - /// ``` - pub fn compile(&self, input: &T) -> Result { - self.compile_as(input) - } - - /// Compiles with `--standard-json` and deserializes the output as the given `D`. - pub fn compile_as(&self, input: &T) -> Result { - let output = self.compile_output(input)?; - - // Only run UTF-8 validation once. - let output = std::str::from_utf8(&output).map_err(|_| SolcError::InvalidUtf8)?; - - Ok(serde_json::from_str(output)?) - } - - /// Compiles with `--standard-json` and returns the raw `stdout` output. - #[instrument(name = "compile", level = "debug", skip_all)] - pub fn compile_output(&self, input: &T) -> Result> { - let mut cmd = self.configure_cmd(); - - trace!(input=%serde_json::to_string(input).unwrap_or_else(|e| e.to_string())); - debug!(?cmd, "compiling"); - - let mut child = cmd.spawn().map_err(self.map_io_err())?; - debug!("spawned"); - - let stdin = child.stdin.as_mut().unwrap(); - serde_json::to_writer(stdin, input)?; - debug!("wrote JSON input to stdin"); - - let output = child.wait_with_output().map_err(self.map_io_err())?; - debug!(%output.status, output.stderr = ?String::from_utf8_lossy(&output.stderr), "finished"); - - compile_output(output) - } - - /// Invokes `solc --version` and parses the output as a SemVer [`Version`], stripping the - /// pre-release and build metadata. - pub fn version_short(&self) -> Version { - Version::new(self.version.major, self.version.minor, self.version.patch) - } - - /// Invokes `solc --version` and parses the output as a SemVer [`Version`]. - #[instrument(level = "debug", skip_all)] - pub fn version(solc: impl Into) -> Result { - Self::version_with_args(solc, &[]) - } - - /// Invokes `solc --version` and parses the output as a SemVer [`Version`]. - #[instrument(level = "debug", skip_all)] - pub fn version_with_args(solc: impl Into, args: &[String]) -> Result { - crate::cache_version(solc.into(), args, |solc| { - let mut cmd = Command::new(solc); - cmd.args(args) - .arg("--version") - .stdin(Stdio::piped()) - .stderr(Stdio::piped()) - .stdout(Stdio::piped()); - debug!(?cmd, "getting Solc version"); - let output = cmd.output().map_err(|e| SolcError::io(e, solc))?; - trace!(?output); - let version = version_from_output(output)?; - debug!(%version); - Ok(version) - }) - } - - fn map_io_err(&self) -> impl FnOnce(std::io::Error) -> SolcError + '_ { - move |err| SolcError::io(err, &self.solc) - } - - /// Configures [Command] object depeending on settings and solc version used. - /// Some features are only supported by newer versions of solc, so we have to disable them for - /// older ones. - pub fn configure_cmd(&self) -> Command { - let mut cmd = Command::new(&self.solc); - cmd.stdin(Stdio::piped()).stderr(Stdio::piped()).stdout(Stdio::piped()); - cmd.args(&self.extra_args); - - if !self.allow_paths.is_empty() { - cmd.arg("--allow-paths"); - cmd.arg(self.allow_paths.iter().map(|p| p.display()).join(",")); - } - if let Some(base_path) = &self.base_path { - if SUPPORTS_BASE_PATH.matches(&self.version) { - if SUPPORTS_INCLUDE_PATH.matches(&self.version) { - // `--base-path` and `--include-path` conflict if set to the same path, so - // as a precaution, we ensure here that the `--base-path` is not also used - // for `--include-path` - for path in - self.include_paths.iter().filter(|p| p.as_path() != base_path.as_path()) - { - cmd.arg("--include-path").arg(path); - } - } - - cmd.arg("--base-path").arg(base_path); - } - - cmd.current_dir(base_path); - } - - cmd.arg("--standard-json"); - - cmd - } - - /// Either finds an installed Solc version or installs it if it's not found. - #[cfg(feature = "svm-solc")] - pub fn find_or_install(version: &Version) -> Result { - let solc = if let Some(solc) = Self::find_svm_installed_version(version)? { - solc - } else { - Self::blocking_install(version)? - }; - - Ok(solc) - } -} - -#[cfg(feature = "async")] -impl Solc { - /// Convenience function for compiling all sources under the given path - pub async fn async_compile_source(&self, path: &Path) -> Result { - self.async_compile(&SolcInput::resolve_and_build( - Source::async_read_all_from(path, SOLC_EXTENSIONS).await?, - Default::default(), - )) - .await - } - - /// Run `solc --stand-json` and return the `solc`'s output as - /// `CompilerOutput` - pub async fn async_compile(&self, input: &T) -> Result { - self.async_compile_as(input).await - } - - /// Run `solc --stand-json` and return the `solc`'s output as the given json - /// output - pub async fn async_compile_as( - &self, - input: &T, - ) -> Result { - let output = self.async_compile_output(input).await?; - Ok(serde_json::from_slice(&output)?) - } - - pub async fn async_compile_output(&self, input: &T) -> Result> { - use tokio::{io::AsyncWriteExt, process::Command}; - - let mut cmd: Command = self.configure_cmd().into(); - let mut child = cmd.spawn().map_err(self.map_io_err())?; - let stdin = child.stdin.as_mut().unwrap(); - - let content = serde_json::to_vec(input)?; - - stdin.write_all(&content).await.map_err(self.map_io_err())?; - stdin.flush().await.map_err(self.map_io_err())?; - - compile_output(child.wait_with_output().await.map_err(self.map_io_err())?) - } - - pub async fn async_version(solc: &Path) -> Result { - let mut cmd = tokio::process::Command::new(solc); - cmd.arg("--version").stdin(Stdio::piped()).stderr(Stdio::piped()).stdout(Stdio::piped()); - debug!(?cmd, "getting version"); - let output = cmd.output().await.map_err(|e| SolcError::io(e, solc))?; - let version = version_from_output(output)?; - debug!(%version); - Ok(version) - } - - /// Compiles all `CompilerInput`s with their associated `Solc`. - /// - /// This will buffer up to `n` `solc` processes and then return the `CompilerOutput`s in the - /// order in which they complete. No more than `n` futures will be buffered at any point in - /// time, and less than `n` may also be buffered depending on the state of each future. - pub async fn compile_many(jobs: I, n: usize) -> crate::many::CompiledMany - where - I: IntoIterator, - { - use futures_util::stream::StreamExt; - - let outputs = futures_util::stream::iter( - jobs.into_iter() - .map(|(solc, input)| async { (solc.async_compile(&input).await, solc, input) }), - ) - .buffer_unordered(n) - .collect::>() - .await; - - crate::many::CompiledMany::new(outputs) - } -} - -fn compile_output(output: Output) -> Result> { - if output.status.success() { - Ok(output.stdout) - } else { - Err(SolcError::solc_output(&output)) - } -} - -fn version_from_output(output: Output) -> Result { - if output.status.success() { - let stdout = String::from_utf8_lossy(&output.stdout); - let version = stdout - .lines() - .filter(|l| !l.trim().is_empty()) - .last() - .ok_or_else(|| SolcError::msg("Version not found in Solc output"))?; - // NOTE: semver doesn't like `+` in g++ in build metadata which is invalid semver - Ok(Version::from_str(&version.trim_start_matches("Version: ").replace(".g++", ".gcc"))?) - } else { - Err(SolcError::solc_output(&output)) - } -} - -impl AsRef for Solc { - fn as_ref(&self) -> &Path { - &self.solc - } -} - -#[cfg(test)] -#[cfg(feature = "svm-solc")] -mod tests { - use super::*; - use crate::{resolver::parse::SolData, Artifact}; - - #[test] - fn test_version_parse() { - let req = SolData::parse_version_req(">=0.6.2 <0.8.21").unwrap(); - let semver_req: VersionReq = ">=0.6.2,<0.8.21".parse().unwrap(); - assert_eq!(req, semver_req); - } - - fn solc() -> Solc { - if let Some(solc) = Solc::find_svm_installed_version(&Version::new(0, 8, 18)).unwrap() { - solc - } else { - Solc::blocking_install(&Version::new(0, 8, 18)).unwrap() - } - } - - #[test] - fn solc_version_works() { - Solc::version(solc().solc).unwrap(); - } - - #[test] - fn can_parse_version_metadata() { - let _version = Version::from_str("0.6.6+commit.6c089d02.Linux.gcc").unwrap(); - } - - #[cfg(feature = "async")] - #[tokio::test(flavor = "multi_thread")] - async fn async_solc_version_works() { - Solc::async_version(&solc().solc).await.unwrap(); - } - - #[test] - fn solc_compile_works() { - let input = include_str!("../../../../../test-data/in/compiler-in-1.json"); - let input: SolcInput = serde_json::from_str(input).unwrap(); - let out = solc().compile(&input).unwrap(); - let other = solc().compile(&serde_json::json!(input)).unwrap(); - assert_eq!(out, other); - } - - #[test] - fn solc_metadata_works() { - let input = include_str!("../../../../../test-data/in/compiler-in-1.json"); - let mut input: SolcInput = serde_json::from_str(input).unwrap(); - input.settings.push_output_selection("metadata"); - let out = solc().compile(&input).unwrap(); - for (_, c) in out.split().1.contracts_iter() { - assert!(c.metadata.is_some()); - } - } - - #[test] - fn can_compile_with_remapped_links() { - let input: SolcInput = serde_json::from_str(include_str!( - "../../../../../test-data/library-remapping-in.json" - )) - .unwrap(); - let out = solc().compile(&input).unwrap(); - let (_, mut contracts) = out.split(); - let contract = contracts.remove("LinkTest").unwrap(); - let bytecode = &contract.get_bytecode().unwrap().object; - assert!(!bytecode.is_unlinked()); - } - - #[test] - fn can_compile_with_remapped_links_temp_dir() { - let input: SolcInput = serde_json::from_str(include_str!( - "../../../../../test-data/library-remapping-in-2.json" - )) - .unwrap(); - let out = solc().compile(&input).unwrap(); - let (_, mut contracts) = out.split(); - let contract = contracts.remove("LinkTest").unwrap(); - let bytecode = &contract.get_bytecode().unwrap().object; - assert!(!bytecode.is_unlinked()); - } - - #[cfg(feature = "async")] - #[tokio::test(flavor = "multi_thread")] - async fn async_solc_compile_works() { - let input = include_str!("../../../../../test-data/in/compiler-in-1.json"); - let input: SolcInput = serde_json::from_str(input).unwrap(); - let out = solc().async_compile(&input).await.unwrap(); - let other = solc().async_compile(&serde_json::json!(input)).await.unwrap(); - assert_eq!(out, other); - } - - #[cfg(feature = "async")] - #[tokio::test(flavor = "multi_thread")] - async fn async_solc_compile_works2() { - let input = include_str!("../../../../../test-data/in/compiler-in-2.json"); - let input: SolcInput = serde_json::from_str(input).unwrap(); - let out = solc().async_compile(&input).await.unwrap(); - let other = solc().async_compile(&serde_json::json!(input)).await.unwrap(); - assert_eq!(out, other); - let sync_out = solc().compile(&input).unwrap(); - assert_eq!(out, sync_out); - } - - #[test] - fn test_version_req() { - let versions = ["=0.1.2", "^0.5.6", ">=0.7.1", ">0.8.0"]; - - versions.iter().for_each(|version| { - let version_req = SolData::parse_version_req(version).unwrap(); - assert_eq!(version_req, VersionReq::from_str(version).unwrap()); - }); - - // Solidity defines version ranges with a space, whereas the semver package - // requires them to be separated with a comma - let version_range = ">=0.8.0 <0.9.0"; - let version_req = SolData::parse_version_req(version_range).unwrap(); - assert_eq!(version_req, VersionReq::from_str(">=0.8.0,<0.9.0").unwrap()); - } - - #[test] - #[cfg(feature = "full")] - fn test_find_installed_version_path() { - // This test does not take the lock by default, so we need to manually add it here. - take_solc_installer_lock!(_lock); - let version = Version::new(0, 8, 6); - if utils::installed_versions(svm::data_dir()) - .map(|versions| !versions.contains(&version)) - .unwrap_or_default() - { - Solc::blocking_install(&version).unwrap(); - } - drop(_lock); - let res = Solc::find_svm_installed_version(&version).unwrap().unwrap(); - let expected = svm::data_dir().join(version.to_string()).join(format!("solc-{version}")); - assert_eq!(res.solc, expected); - } - - #[test] - #[cfg(feature = "svm-solc")] - fn can_install_solc_in_tokio_rt() { - let version = Version::from_str("0.8.6").unwrap(); - let rt = tokio::runtime::Runtime::new().unwrap(); - let result = rt.block_on(async { Solc::blocking_install(&version) }); - assert!(result.is_ok()); - } - - #[test] - fn does_not_find_not_installed_version() { - let ver = Version::new(1, 1, 1); - let res = Solc::find_svm_installed_version(&ver).unwrap(); - assert!(res.is_none()); - } -} diff --git a/crates/compilers/src/compilers/solc/mod.rs b/crates/compilers/src/compilers/solc/mod.rs deleted file mode 100644 index 0bdb6f42..00000000 --- a/crates/compilers/src/compilers/solc/mod.rs +++ /dev/null @@ -1,367 +0,0 @@ -use super::{ - CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, CompilerVersion, - Language, ParsedSource, -}; -use crate::resolver::parse::SolData; -pub use foundry_compilers_artifacts::SolcLanguage; -use foundry_compilers_artifacts::{ - error::SourceLocation, - output_selection::OutputSelection, - remappings::Remapping, - sources::{Source, Sources}, - Error, Settings, Severity, SolcInput, -}; -use foundry_compilers_core::error::Result; -use itertools::Itertools; -use semver::Version; -use serde::{Deserialize, Serialize}; -use std::{ - borrow::Cow, - collections::BTreeSet, - ops::{Deref, DerefMut}, - path::{Path, PathBuf}, -}; -mod compiler; -pub use compiler::{Solc, SOLC_EXTENSIONS}; - -#[derive(Clone, Debug)] -#[cfg_attr(feature = "svm-solc", derive(Default))] -pub enum SolcCompiler { - #[default] - #[cfg(feature = "svm-solc")] - AutoDetect, - - Specific(Solc), -} - -impl Language for SolcLanguage { - const FILE_EXTENSIONS: &'static [&'static str] = SOLC_EXTENSIONS; -} - -impl Compiler for SolcCompiler { - type Input = SolcVersionedInput; - type CompilationError = Error; - type ParsedSource = SolData; - type Settings = SolcSettings; - type Language = SolcLanguage; - - fn compile(&self, input: &Self::Input) -> Result> { - let mut solc = match self { - Self::Specific(solc) => solc.clone(), - - #[cfg(feature = "svm-solc")] - Self::AutoDetect => Solc::find_or_install(&input.version)?, - }; - solc.base_path.clone_from(&input.cli_settings.base_path); - solc.allow_paths.clone_from(&input.cli_settings.allow_paths); - solc.include_paths.clone_from(&input.cli_settings.include_paths); - solc.extra_args.extend_from_slice(&input.cli_settings.extra_args); - - let solc_output = solc.compile(&input.input)?; - - let output = CompilerOutput { - errors: solc_output.errors, - contracts: solc_output.contracts, - sources: solc_output.sources, - }; - - Ok(output) - } - - fn available_versions(&self, _language: &Self::Language) -> Vec { - match self { - Self::Specific(solc) => vec![CompilerVersion::Installed(Version::new( - solc.version.major, - solc.version.minor, - solc.version.patch, - ))], - - #[cfg(feature = "svm-solc")] - Self::AutoDetect => { - let mut all_versions = Solc::installed_versions() - .into_iter() - .map(CompilerVersion::Installed) - .collect::>(); - let mut uniques = all_versions - .iter() - .map(|v| { - let v = v.as_ref(); - (v.major, v.minor, v.patch) - }) - .collect::>(); - all_versions.extend( - Solc::released_versions() - .into_iter() - .filter(|v| uniques.insert((v.major, v.minor, v.patch))) - .map(CompilerVersion::Remote), - ); - all_versions.sort_unstable(); - all_versions - } - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SolcVersionedInput { - pub version: Version, - #[serde(flatten)] - pub input: SolcInput, - #[serde(flatten)] - cli_settings: CliSettings, -} - -impl CompilerInput for SolcVersionedInput { - type Settings = SolcSettings; - type Language = SolcLanguage; - - /// Creates a new [CompilerInput]s with default settings and the given sources - /// - /// A [CompilerInput] expects a language setting, supported by solc are solidity or yul. - /// In case the `sources` is a mix of solidity and yul files, 2 CompilerInputs are returned - fn build( - sources: Sources, - settings: Self::Settings, - language: Self::Language, - version: Version, - ) -> Self { - let SolcSettings { settings, cli_settings } = settings; - let input = SolcInput::new(language, sources, settings).sanitized(&version); - - Self { version, input, cli_settings } - } - - fn language(&self) -> Self::Language { - self.input.language - } - - fn version(&self) -> &Version { - &self.version - } - - fn sources(&self) -> impl Iterator { - self.input.sources.iter().map(|(path, source)| (path.as_path(), source)) - } - - fn compiler_name(&self) -> Cow<'static, str> { - "Solc".into() - } - - fn strip_prefix(&mut self, base: &Path) { - self.input.strip_prefix(base); - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct CliSettings { - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub extra_args: Vec, - #[serde(default, skip_serializing_if = "BTreeSet::is_empty")] - pub allow_paths: BTreeSet, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub base_path: Option, - #[serde(default, skip_serializing_if = "BTreeSet::is_empty")] - pub include_paths: BTreeSet, -} - -#[derive(Debug, Clone, Serialize, Deserialize, Default, PartialEq, Eq)] -pub struct SolcSettings { - /// JSON settings expected by Solc - #[serde(flatten)] - pub settings: Settings, - /// Additional CLI args configuration - #[serde(flatten)] - pub cli_settings: CliSettings, -} - -impl Deref for SolcSettings { - type Target = Settings; - - fn deref(&self) -> &Self::Target { - &self.settings - } -} - -impl DerefMut for SolcSettings { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.settings - } -} - -impl CompilerSettings for SolcSettings { - fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy) { - f(&mut self.settings.output_selection) - } - - fn can_use_cached(&self, other: &Self) -> bool { - let Self { - settings: - Settings { - stop_after, - remappings, - optimizer, - model_checker, - metadata, - output_selection, - evm_version, - via_ir, - debug, - libraries, - eof_version, - }, - .. - } = self; - - *stop_after == other.settings.stop_after - && *remappings == other.settings.remappings - && *optimizer == other.settings.optimizer - && *model_checker == other.settings.model_checker - && *metadata == other.settings.metadata - && *evm_version == other.settings.evm_version - && *via_ir == other.settings.via_ir - && *debug == other.settings.debug - && *libraries == other.settings.libraries - && *eof_version == other.settings.eof_version - && output_selection.is_subset_of(&other.settings.output_selection) - } - - fn with_remappings(mut self, remappings: &[Remapping]) -> Self { - self.settings.remappings = remappings.to_vec(); - - self - } - - fn with_allow_paths(mut self, allowed_paths: &BTreeSet) -> Self { - self.cli_settings.allow_paths.clone_from(allowed_paths); - self - } - - fn with_base_path(mut self, base_path: &Path) -> Self { - self.cli_settings.base_path = Some(base_path.to_path_buf()); - self - } - - fn with_include_paths(mut self, include_paths: &BTreeSet) -> Self { - self.cli_settings.include_paths.clone_from(include_paths); - self - } -} - -impl ParsedSource for SolData { - type Language = SolcLanguage; - - fn parse(content: &str, file: &std::path::Path) -> Result { - Ok(Self::parse(content, file)) - } - - fn version_req(&self) -> Option<&semver::VersionReq> { - self.version_req.as_ref() - } - - fn resolve_imports( - &self, - _paths: &crate::ProjectPathsConfig, - _include_paths: &mut BTreeSet, - ) -> Result> { - Ok(self.imports.iter().map(|i| i.data().path().to_path_buf()).collect_vec()) - } - - fn language(&self) -> Self::Language { - if self.is_yul { - SolcLanguage::Yul - } else { - SolcLanguage::Solidity - } - } - - fn compilation_dependencies<'a>( - &self, - imported_nodes: impl Iterator, - ) -> impl Iterator - where - Self: 'a, - { - imported_nodes.filter_map(|(path, node)| (!node.libraries.is_empty()).then_some(path)) - } -} - -impl CompilationError for Error { - fn is_warning(&self) -> bool { - self.severity.is_warning() - } - fn is_error(&self) -> bool { - self.severity.is_error() - } - - fn source_location(&self) -> Option { - self.source_location.clone() - } - - fn severity(&self) -> Severity { - self.severity - } - - fn error_code(&self) -> Option { - self.error_code - } -} - -#[cfg(test)] -mod tests { - use foundry_compilers_artifacts::{CompilerOutput, SolcLanguage}; - use semver::Version; - - use crate::{ - buildinfo::RawBuildInfo, - compilers::{ - solc::{SolcCompiler, SolcVersionedInput}, - CompilerInput, - }, - AggregatedCompilerOutput, - }; - - #[test] - fn can_parse_declaration_error() { - let s = r#"{ - "errors": [ - { - "component": "general", - "errorCode": "7576", - "formattedMessage": "DeclarationError: Undeclared identifier. Did you mean \"revert\"?\n --> /Users/src/utils/UpgradeProxy.sol:35:17:\n |\n35 | refert(\"Transparent ERC1967 proxies do not have upgradeable implementations\");\n | ^^^^^^\n\n", - "message": "Undeclared identifier. Did you mean \"revert\"?", - "severity": "error", - "sourceLocation": { - "end": 1623, - "file": "/Users/src/utils/UpgradeProxy.sol", - "start": 1617 - }, - "type": "DeclarationError" - } - ], - "sources": { } -}"#; - - let out: CompilerOutput = serde_json::from_str(s).unwrap(); - assert_eq!(out.errors.len(), 1); - - let out_converted = crate::compilers::CompilerOutput { - errors: out.errors, - contracts: Default::default(), - sources: Default::default(), - }; - - let v: Version = "0.8.12".parse().unwrap(); - let input = SolcVersionedInput::build( - Default::default(), - Default::default(), - SolcLanguage::Solidity, - v.clone(), - ); - let build_info = RawBuildInfo::new(&input, &out_converted, true).unwrap(); - let mut aggregated = AggregatedCompilerOutput::::default(); - aggregated.extend(v, build_info, out_converted); - assert!(!aggregated.is_unchanged()); - } -} diff --git a/crates/compilers/src/compilers/vyper/error.rs b/crates/compilers/src/compilers/vyper/error.rs deleted file mode 100644 index 91644b06..00000000 --- a/crates/compilers/src/compilers/vyper/error.rs +++ /dev/null @@ -1,24 +0,0 @@ -use crate::{artifacts::vyper::VyperCompilationError, compilers::CompilationError}; -use foundry_compilers_artifacts::{error::SourceLocation, Severity}; - -impl CompilationError for VyperCompilationError { - fn is_warning(&self) -> bool { - self.severity.is_warning() - } - - fn is_error(&self) -> bool { - self.severity.is_error() - } - - fn source_location(&self) -> Option { - None - } - - fn severity(&self) -> Severity { - self.severity - } - - fn error_code(&self) -> Option { - None - } -} diff --git a/crates/compilers/src/compilers/vyper/input.rs b/crates/compilers/src/compilers/vyper/input.rs deleted file mode 100644 index 0401e6a1..00000000 --- a/crates/compilers/src/compilers/vyper/input.rs +++ /dev/null @@ -1,55 +0,0 @@ -use super::VyperLanguage; -use crate::{ - artifacts::vyper::{VyperInput, VyperSettings}, - compilers::CompilerInput, -}; -use foundry_compilers_artifacts::sources::{Source, Sources}; -use semver::Version; -use serde::Serialize; -use std::{borrow::Cow, path::Path}; - -#[derive(Clone, Debug, Serialize)] -pub struct VyperVersionedInput { - #[serde(flatten)] - pub input: VyperInput, - #[serde(skip)] - pub version: Version, -} - -impl CompilerInput for VyperVersionedInput { - type Settings = VyperSettings; - type Language = VyperLanguage; - - fn build( - sources: Sources, - settings: Self::Settings, - _language: Self::Language, - version: Version, - ) -> Self { - Self { input: VyperInput::new(sources, settings), version } - } - - fn compiler_name(&self) -> Cow<'static, str> { - "Vyper".into() - } - - fn strip_prefix(&mut self, base: &Path) { - self.input.strip_prefix(base); - } - - fn language(&self) -> Self::Language { - VyperLanguage - } - - fn version(&self) -> &Version { - &self.version - } - - fn sources(&self) -> impl Iterator { - self.input - .sources - .iter() - .chain(self.input.interfaces.iter()) - .map(|(path, source)| (path.as_path(), source)) - } -} diff --git a/crates/compilers/src/compilers/vyper/mod.rs b/crates/compilers/src/compilers/vyper/mod.rs deleted file mode 100644 index 1b351124..00000000 --- a/crates/compilers/src/compilers/vyper/mod.rs +++ /dev/null @@ -1,212 +0,0 @@ -use self::{input::VyperVersionedInput, parser::VyperParsedSource}; -use super::{Compiler, CompilerOutput, Language}; -pub use crate::artifacts::vyper::{VyperCompilationError, VyperInput, VyperOutput, VyperSettings}; -use core::fmt; -use foundry_compilers_artifacts::sources::Source; -use foundry_compilers_core::error::{Result, SolcError}; -use semver::Version; -use serde::{de::DeserializeOwned, Serialize}; -use std::{ - path::{Path, PathBuf}, - process::{Command, Stdio}, - str::FromStr, -}; - -pub mod error; -pub mod input; -mod output; -pub mod parser; -pub mod settings; - -/// File extensions that are recognized as Vyper source files. -pub const VYPER_EXTENSIONS: &[&str] = &["vy", "vyi"]; - -/// Extension of Vyper interface file. -pub const VYPER_INTERFACE_EXTENSION: &str = "vyi"; - -/// Vyper language, used as [Compiler::Language] for the Vyper compiler. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -#[non_exhaustive] -pub struct VyperLanguage; - -impl serde::Serialize for VyperLanguage { - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - serializer.serialize_str("vyper") - } -} - -impl<'de> serde::Deserialize<'de> for VyperLanguage { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - let res = String::deserialize(deserializer)?; - if res != "vyper" { - Err(serde::de::Error::custom(format!("Invalid Vyper language: {res}"))) - } else { - Ok(Self) - } - } -} - -impl Language for VyperLanguage { - const FILE_EXTENSIONS: &'static [&'static str] = VYPER_EXTENSIONS; -} - -impl fmt::Display for VyperLanguage { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "Vyper") - } -} - -/// Vyper compiler. Wrapper aound vyper binary. -#[derive(Clone, Debug)] -pub struct Vyper { - pub path: PathBuf, - pub version: Version, -} - -impl Vyper { - /// Creates a new instance of the Vyper compiler. Uses the `vyper` binary in the system `PATH`. - pub fn new(path: impl Into) -> Result { - let path = path.into(); - let version = Self::version(path.clone())?; - Ok(Self { path, version }) - } - - /// Convenience function for compiling all sources under the given path - pub fn compile_source(&self, path: &Path) -> Result { - let input = - VyperInput::new(Source::read_all_from(path, VYPER_EXTENSIONS)?, Default::default()); - self.compile(&input) - } - - /// Same as [`Self::compile()`], but only returns those files which are included in the - /// `CompilerInput`. - /// - /// In other words, this removes those files from the `VyperOutput` that are __not__ - /// included in the provided `CompilerInput`. - /// - /// # Examples - pub fn compile_exact(&self, input: &VyperInput) -> Result { - let mut out = self.compile(input)?; - out.retain_files(input.sources.keys().map(|p| p.as_path())); - Ok(out) - } - - /// Compiles with `--standard-json` and deserializes the output as [`VyperOutput`]. - /// - /// # Examples - /// - /// ```no_run - /// use foundry_compilers::{ - /// artifacts::{ - /// vyper::{VyperInput, VyperSettings}, - /// Source, - /// }, - /// Vyper, - /// }; - /// use std::path::Path; - /// - /// let vyper = Vyper::new("vyper")?; - /// let path = Path::new("path/to/sources"); - /// let sources = Source::read_all_from(path, &["vy", "vyi"])?; - /// let input = VyperInput::new(sources, VyperSettings::default()); - /// let output = vyper.compile(&input)?; - /// # Ok::<_, Box>(()) - /// ``` - pub fn compile(&self, input: &T) -> Result { - self.compile_as(input) - } - - /// Compiles with `--standard-json` and deserializes the output as the given `D`. - pub fn compile_as(&self, input: &T) -> Result { - let output = self.compile_output(input)?; - - // Only run UTF-8 validation once. - let output = std::str::from_utf8(&output).map_err(|_| SolcError::InvalidUtf8)?; - - trace!("vyper compiler output: {}", output); - - Ok(serde_json::from_str(output)?) - } - - /// Compiles with `--standard-json` and returns the raw `stdout` output. - #[instrument(name = "compile", level = "debug", skip_all)] - pub fn compile_output(&self, input: &T) -> Result> { - let mut cmd = Command::new(&self.path); - cmd.arg("--standard-json") - .stdin(Stdio::piped()) - .stderr(Stdio::piped()) - .stdout(Stdio::piped()); - - trace!(input=%serde_json::to_string(input).unwrap_or_else(|e| e.to_string())); - debug!(?cmd, "compiling"); - - let mut child = cmd.spawn().map_err(self.map_io_err())?; - debug!("spawned"); - - let stdin = child.stdin.as_mut().unwrap(); - serde_json::to_writer(stdin, input)?; - debug!("wrote JSON input to stdin"); - - let output = child.wait_with_output().map_err(self.map_io_err())?; - debug!(%output.status, output.stderr = ?String::from_utf8_lossy(&output.stderr), "finished"); - - if output.status.success() { - Ok(output.stdout) - } else { - Err(SolcError::solc_output(&output)) - } - } - - /// Invokes `vyper --version` and parses the output as a SemVer [`Version`]. - #[instrument(level = "debug", skip_all)] - pub fn version(vyper: impl Into) -> Result { - crate::cache_version(vyper.into(), &[], |vyper| { - let mut cmd = Command::new(vyper); - cmd.arg("--version") - .stdin(Stdio::piped()) - .stderr(Stdio::piped()) - .stdout(Stdio::piped()); - debug!(?cmd, "getting Vyper version"); - let output = cmd.output().map_err(|e| SolcError::io(e, vyper))?; - trace!(?output); - if output.status.success() { - let stdout = String::from_utf8_lossy(&output.stdout); - Ok(Version::from_str( - &stdout.trim().replace("rc", "-rc").replace("b", "-b").replace("a", "-a"), - )?) - } else { - Err(SolcError::solc_output(&output)) - } - }) - } - - fn map_io_err(&self) -> impl FnOnce(std::io::Error) -> SolcError + '_ { - move |err| SolcError::io(err, &self.path) - } -} - -impl Compiler for Vyper { - type Settings = VyperSettings; - type CompilationError = VyperCompilationError; - type ParsedSource = VyperParsedSource; - type Input = VyperVersionedInput; - type Language = VyperLanguage; - - fn compile(&self, input: &Self::Input) -> Result> { - self.compile(input).map(Into::into) - } - - fn available_versions(&self, _language: &Self::Language) -> Vec { - vec![super::CompilerVersion::Installed(Version::new( - self.version.major, - self.version.minor, - self.version.patch, - ))] - } -} diff --git a/crates/compilers/src/compilers/vyper/output.rs b/crates/compilers/src/compilers/vyper/output.rs deleted file mode 100644 index c4f739dd..00000000 --- a/crates/compilers/src/compilers/vyper/output.rs +++ /dev/null @@ -1,15 +0,0 @@ -use crate::artifacts::vyper::{VyperCompilationError, VyperOutput}; - -impl From for super::CompilerOutput { - fn from(output: VyperOutput) -> Self { - Self { - errors: output.errors, - contracts: output - .contracts - .into_iter() - .map(|(k, v)| (k, v.into_iter().map(|(k, v)| (k, v.into())).collect())) - .collect(), - sources: output.sources.into_iter().map(|(k, v)| (k, v.into())).collect(), - } - } -} diff --git a/crates/compilers/src/compilers/vyper/parser.rs b/crates/compilers/src/compilers/vyper/parser.rs deleted file mode 100644 index e98f5ea2..00000000 --- a/crates/compilers/src/compilers/vyper/parser.rs +++ /dev/null @@ -1,224 +0,0 @@ -use super::VyperLanguage; -use crate::{ - compilers::{vyper::VYPER_EXTENSIONS, ParsedSource}, - ProjectPathsConfig, -}; -use foundry_compilers_core::{ - error::{Result, SolcError}, - utils::{capture_outer_and_inner, RE_VYPER_VERSION}, -}; -use semver::VersionReq; -use std::{ - collections::BTreeSet, - path::{Path, PathBuf}, -}; -use winnow::{ - ascii::space1, - combinator::{alt, opt, preceded}, - token::{take_till, take_while}, - PResult, Parser, -}; - -#[derive(Clone, Debug, PartialEq)] -pub struct VyperImport { - pub level: usize, - pub path: Option, - pub final_part: Option, -} - -#[derive(Clone, Debug)] -pub struct VyperParsedSource { - path: PathBuf, - version_req: Option, - imports: Vec, -} - -impl ParsedSource for VyperParsedSource { - type Language = VyperLanguage; - - fn parse(content: &str, file: &Path) -> Result { - let version_req = capture_outer_and_inner(content, &RE_VYPER_VERSION, &["version"]) - .first() - .and_then(|(cap, _)| VersionReq::parse(cap.as_str()).ok()); - - let imports = parse_imports(content); - - let path = file.to_path_buf(); - - Ok(Self { path, version_req, imports }) - } - - fn version_req(&self) -> Option<&VersionReq> { - self.version_req.as_ref() - } - - fn resolve_imports( - &self, - paths: &ProjectPathsConfig, - include_paths: &mut BTreeSet, - ) -> Result> { - let mut imports = Vec::new(); - 'outer: for import in &self.imports { - // skip built-in imports - if import.level == 0 - && import - .path - .as_ref() - .map(|path| path.starts_with("vyper.") || path.starts_with("ethereum.ercs")) - .unwrap_or_default() - { - continue; - } - - // Potential locations of imported source. - let mut candidate_dirs = Vec::new(); - - // For relative imports, vyper always checks only directory containing contract which - // includes given import. - if import.level > 0 { - let mut candidate_dir = Some(self.path.as_path()); - - for _ in 0..import.level { - candidate_dir = candidate_dir.and_then(|dir| dir.parent()); - } - - let candidate_dir = candidate_dir.ok_or_else(|| { - SolcError::msg(format!( - "Could not go {} levels up for import at {}", - import.level, - self.path.display() - )) - })?; - - candidate_dirs.push(candidate_dir); - } else { - // For absolute imports, Vyper firstly checks current directory, and then root. - if let Some(parent) = self.path.parent() { - candidate_dirs.push(parent); - } - candidate_dirs.push(paths.root.as_path()); - } - - candidate_dirs.extend(paths.libraries.iter().map(PathBuf::as_path)); - - let import_path = { - let mut path = PathBuf::new(); - - if let Some(import_path) = &import.path { - path = path.join(import_path.replace('.', "/")); - } - - if let Some(part) = &import.final_part { - path = path.join(part); - } - - path - }; - - for candidate_dir in candidate_dirs { - let candidate = candidate_dir.join(&import_path); - for extension in VYPER_EXTENSIONS { - let candidate = candidate.clone().with_extension(extension); - trace!("trying {}", candidate.display()); - if candidate.exists() { - imports.push(candidate); - include_paths.insert(candidate_dir.to_path_buf()); - continue 'outer; - } - } - } - - return Err(SolcError::msg(format!( - "failed to resolve import {}{} at {}", - ".".repeat(import.level), - import_path.display(), - self.path.display() - ))); - } - Ok(imports) - } - - fn language(&self) -> Self::Language { - VyperLanguage - } -} - -/// Parses given source trying to find all import directives. -fn parse_imports(content: &str) -> Vec { - let mut imports = Vec::new(); - - for mut line in content.split('\n') { - if let Ok(parts) = parse_import(&mut line) { - imports.push(parts); - } - } - - imports -} - -/// Parses given input, trying to find (import|from) part1.part2.part3 (import part4)? -fn parse_import(input: &mut &str) -> PResult { - ( - preceded( - (alt(["from", "import"]), space1), - (take_while(0.., |c| c == '.'), take_till(0.., [' '])), - ), - opt(preceded((space1, "import", space1), take_till(0.., [' ']))), - ) - .parse_next(input) - .map(|((dots, path), last)| VyperImport { - level: dots.len(), - path: (!path.is_empty()).then(|| path.to_string()), - final_part: last.map(|p| p.to_string()), - }) -} - -#[cfg(test)] -mod tests { - use super::{parse_import, VyperImport}; - use winnow::Parser; - - #[test] - fn can_parse_import() { - assert_eq!( - parse_import.parse("import one.two.three").unwrap(), - VyperImport { level: 0, path: Some("one.two.three".to_string()), final_part: None } - ); - assert_eq!( - parse_import.parse("from one.two.three import four").unwrap(), - VyperImport { - level: 0, - path: Some("one.two.three".to_string()), - final_part: Some("four".to_string()), - } - ); - assert_eq!( - parse_import.parse("from one import two").unwrap(), - VyperImport { - level: 0, - path: Some("one".to_string()), - final_part: Some("two".to_string()), - } - ); - assert_eq!( - parse_import.parse("import one").unwrap(), - VyperImport { level: 0, path: Some("one".to_string()), final_part: None } - ); - assert_eq!( - parse_import.parse("from . import one").unwrap(), - VyperImport { level: 1, path: None, final_part: Some("one".to_string()) } - ); - assert_eq!( - parse_import.parse("from ... import two").unwrap(), - VyperImport { level: 3, path: None, final_part: Some("two".to_string()) } - ); - assert_eq!( - parse_import.parse("from ...one.two import three").unwrap(), - VyperImport { - level: 3, - path: Some("one.two".to_string()), - final_part: Some("three".to_string()) - } - ); - } -} diff --git a/crates/compilers/src/compilers/vyper/settings.rs b/crates/compilers/src/compilers/vyper/settings.rs deleted file mode 100644 index 21ae4526..00000000 --- a/crates/compilers/src/compilers/vyper/settings.rs +++ /dev/null @@ -1,33 +0,0 @@ -use std::{collections::BTreeSet, path::PathBuf}; - -pub use crate::artifacts::vyper::VyperSettings; -use crate::compilers::CompilerSettings; -use foundry_compilers_artifacts::output_selection::OutputSelection; - -impl CompilerSettings for VyperSettings { - fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection)) { - f(&mut self.output_selection) - } - - fn can_use_cached(&self, other: &Self) -> bool { - let Self { - evm_version, - optimize, - bytecode_metadata, - output_selection, - search_paths, - experimental_codegen, - } = self; - evm_version == &other.evm_version - && optimize == &other.optimize - && bytecode_metadata == &other.bytecode_metadata - && output_selection.is_subset_of(&other.output_selection) - && search_paths == &other.search_paths - && experimental_codegen == &other.experimental_codegen - } - - fn with_include_paths(mut self, include_paths: &BTreeSet) -> Self { - self.search_paths = Some(include_paths.clone()); - self - } -} diff --git a/crates/compilers/src/config.rs b/crates/compilers/src/config.rs deleted file mode 100644 index c8f64a7d..00000000 --- a/crates/compilers/src/config.rs +++ /dev/null @@ -1,1146 +0,0 @@ -use crate::{ - cache::SOLIDITY_FILES_CACHE_FILENAME, - compilers::{multi::MultiCompilerLanguage, Language}, - flatten::{collect_ordered_deps, combine_version_pragmas}, - resolver::{parse::SolData, SolImportAlias}, - Graph, -}; -use foundry_compilers_artifacts::{ - output_selection::ContractOutputSelection, - remappings::Remapping, - sources::{Source, Sources}, - Libraries, Settings, SolcLanguage, -}; -use foundry_compilers_core::{ - error::{Result, SolcError, SolcIoError}, - utils::{self, strip_prefix_owned}, -}; - -use serde::{Deserialize, Serialize}; -use std::{ - collections::BTreeSet, - fmt::{self, Formatter}, - fs, - marker::PhantomData, - path::{Component, Path, PathBuf}, -}; - -/// Where to find all files or where to write them -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ProjectPathsConfig { - /// Project root - pub root: PathBuf, - /// Path to the cache, if any - pub cache: PathBuf, - /// Where to store build artifacts - pub artifacts: PathBuf, - /// Where to store the build info files - pub build_infos: PathBuf, - /// Where to find sources - pub sources: PathBuf, - /// Where to find tests - pub tests: PathBuf, - /// Where to find scripts - pub scripts: PathBuf, - /// Where to look for libraries - pub libraries: Vec, - /// The compiler remappings - pub remappings: Vec, - /// Paths to use for solc's `--include-path` - pub include_paths: BTreeSet, - /// The paths which will be allowed for library inclusion - pub allowed_paths: BTreeSet, - - pub _l: PhantomData, -} - -impl ProjectPathsConfig { - pub fn builder() -> ProjectPathsConfigBuilder { - ProjectPathsConfigBuilder::default() - } - - /// Attempts to autodetect the artifacts directory based on the given root path - /// - /// Dapptools layout takes precedence over hardhat style. - /// This will return: - /// - `/out` if it exists or `/artifacts` does not exist, - /// - `/artifacts` if it exists and `/out` does not exist. - pub fn find_artifacts_dir(root: &Path) -> PathBuf { - utils::find_fave_or_alt_path(root, "out", "artifacts") - } - - /// Attempts to autodetect the source directory based on the given root path - /// - /// Dapptools layout takes precedence over hardhat style. - /// This will return: - /// - `/src` if it exists or `/contracts` does not exist, - /// - `/contracts` if it exists and `/src` does not exist. - pub fn find_source_dir(root: &Path) -> PathBuf { - utils::find_fave_or_alt_path(root, "src", "contracts") - } - - /// Attempts to autodetect the lib directory based on the given root path - /// - /// Dapptools layout takes precedence over hardhat style. - /// This will return: - /// - `/lib` if it exists or `/node_modules` does not exist, - /// - `/node_modules` if it exists and `/lib` does not exist. - pub fn find_libs(root: &Path) -> Vec { - vec![utils::find_fave_or_alt_path(root, "lib", "node_modules")] - } -} - -impl ProjectPathsConfig { - /// Flattens the target solidity file into a single string suitable for verification. - /// - /// This method uses a dependency graph to resolve imported files and substitute - /// import directives with the contents of target files. It will strip the pragma - /// version directives and SDPX license identifiers from all imported files. - /// - /// NB: the SDPX license identifier will be removed from the imported file - /// only if it is found at the beginning of the file. - pub fn flatten(&self, target: &Path) -> Result { - trace!("flattening file"); - let mut input_files = self.input_files(); - - // we need to ensure that the target is part of the input set, otherwise it's not - // part of the graph if it's not imported by any input file - let flatten_target = target.to_path_buf(); - if !input_files.contains(&flatten_target) { - input_files.push(flatten_target.clone()); - } - - let sources = Source::read_all_files(input_files)?; - let graph = Graph::::resolve_sources(self, sources)?; - let ordered_deps = collect_ordered_deps(&flatten_target, self, &graph)?; - - #[cfg(windows)] - let ordered_deps = { - use path_slash::PathBufExt; - - let mut deps = ordered_deps; - for p in &mut deps { - *p = PathBuf::from(p.to_slash_lossy().to_string()); - } - deps - }; - - let mut sources = Vec::new(); - let mut experimental_pragma = None; - let mut version_pragmas = Vec::new(); - - let mut result = String::new(); - - for path in ordered_deps.iter() { - let node_id = graph.files().get(path).ok_or_else(|| { - SolcError::msg(format!("cannot resolve file at {}", path.display())) - })?; - let node = graph.node(*node_id); - let content = node.content(); - - // Firstly we strip all licesnses, verson pragmas - // We keep target file pragma and license placing them in the beginning of the result. - let mut ranges_to_remove = Vec::new(); - - if let Some(license) = &node.data.license { - ranges_to_remove.push(license.loc()); - if *path == flatten_target { - result.push_str(&content[license.loc()]); - result.push('\n'); - } - } - if let Some(version) = &node.data.version { - let content = &content[version.loc()]; - ranges_to_remove.push(version.loc()); - version_pragmas.push(content); - } - if let Some(experimental) = &node.data.experimental { - ranges_to_remove.push(experimental.loc()); - if experimental_pragma.is_none() { - experimental_pragma = Some(content[experimental.loc()].to_owned()); - } - } - for import in &node.data.imports { - ranges_to_remove.push(import.loc()); - } - ranges_to_remove.sort_by_key(|loc| loc.start); - - let mut content = content.as_bytes().to_vec(); - let mut offset = 0_isize; - - for range in ranges_to_remove { - let repl_range = utils::range_by_offset(&range, offset); - offset -= repl_range.len() as isize; - content.splice(repl_range, std::iter::empty()); - } - - let mut content = String::from_utf8(content).map_err(|err| { - SolcError::msg(format!("failed to convert extended bytes to string: {err}")) - })?; - - // Iterate over all aliased imports, and replace alias with real name via regexes - for alias in node.data.imports.iter().flat_map(|i| i.data().aliases()) { - let (alias, target) = match alias { - SolImportAlias::Contract(alias, target) => (alias.clone(), target.clone()), - _ => continue, - }; - let name_regex = utils::create_contract_or_lib_name_regex(&alias); - let target_len = target.len() as isize; - let mut replace_offset = 0; - for cap in name_regex.captures_iter(&content.clone()) { - if cap.name("ignore").is_some() { - continue; - } - if let Some(name_match) = - ["n1", "n2", "n3"].iter().find_map(|name| cap.name(name)) - { - let name_match_range = - utils::range_by_offset(&name_match.range(), replace_offset); - replace_offset += target_len - (name_match_range.len() as isize); - content.replace_range(name_match_range, &target); - } - } - } - - let content = format!( - "// {}\n{}", - path.strip_prefix(&self.root).unwrap_or(path).display(), - content - ); - - sources.push(content); - } - - if let Some(version) = combine_version_pragmas(version_pragmas) { - result.push_str(&version); - result.push('\n'); - } - if let Some(experimental) = experimental_pragma { - result.push_str(&experimental); - result.push('\n'); - } - - for source in sources { - result.push_str("\n\n"); - result.push_str(&source); - } - - Ok(format!("{}\n", utils::RE_THREE_OR_MORE_NEWLINES.replace_all(&result, "\n\n").trim())) - } -} - -impl ProjectPathsConfig { - /// Creates a new hardhat style config instance which points to the canonicalized root path - pub fn hardhat(root: &Path) -> Result { - PathStyle::HardHat.paths(root) - } - - /// Creates a new dapptools style config instance which points to the canonicalized root path - pub fn dapptools(root: &Path) -> Result { - PathStyle::Dapptools.paths(root) - } - - /// Creates a new config with the current directory as the root - pub fn current_hardhat() -> Result { - Self::hardhat(&std::env::current_dir().map_err(|err| SolcError::io(err, "."))?) - } - - /// Creates a new config with the current directory as the root - pub fn current_dapptools() -> Result { - Self::dapptools(&std::env::current_dir().map_err(|err| SolcError::io(err, "."))?) - } - - /// Returns a new [ProjectPaths] instance that contains all directories configured for this - /// project - pub fn paths(&self) -> ProjectPaths { - ProjectPaths { - artifacts: self.artifacts.clone(), - build_infos: self.build_infos.clone(), - sources: self.sources.clone(), - tests: self.tests.clone(), - scripts: self.scripts.clone(), - libraries: self.libraries.iter().cloned().collect(), - } - } - /// Same as [`paths`][ProjectPathsConfig::paths] but strips the `root` form all paths. - /// - /// See: [`ProjectPaths::strip_prefix_all`] - pub fn paths_relative(&self) -> ProjectPaths { - let mut paths = self.paths(); - paths.strip_prefix_all(&self.root); - paths - } - /// Creates all configured dirs and files - pub fn create_all(&self) -> std::result::Result<(), SolcIoError> { - if let Some(parent) = self.cache.parent() { - fs::create_dir_all(parent).map_err(|err| SolcIoError::new(err, parent))?; - } - fs::create_dir_all(&self.artifacts) - .map_err(|err| SolcIoError::new(err, &self.artifacts))?; - fs::create_dir_all(&self.sources).map_err(|err| SolcIoError::new(err, &self.sources))?; - fs::create_dir_all(&self.tests).map_err(|err| SolcIoError::new(err, &self.tests))?; - fs::create_dir_all(&self.scripts).map_err(|err| SolcIoError::new(err, &self.scripts))?; - for lib in &self.libraries { - fs::create_dir_all(lib).map_err(|err| SolcIoError::new(err, lib))?; - } - Ok(()) - } - - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - #[cfg(windows)] - { - use path_slash::PathBufExt; - - let slashed = |p: &mut PathBuf| { - *p = p.to_slash_lossy().as_ref().into(); - }; - slashed(&mut self.root); - slashed(&mut self.cache); - slashed(&mut self.artifacts); - slashed(&mut self.build_infos); - slashed(&mut self.sources); - slashed(&mut self.tests); - slashed(&mut self.scripts); - - self.libraries.iter_mut().for_each(slashed); - self.remappings.iter_mut().for_each(Remapping::slash_path); - - self.include_paths = std::mem::take(&mut self.include_paths) - .into_iter() - .map(|mut p| { - slashed(&mut p); - p - }) - .collect(); - self.allowed_paths = std::mem::take(&mut self.allowed_paths) - .into_iter() - .map(|mut p| { - slashed(&mut p); - p - }) - .collect(); - } - } - - /// Returns true if the `file` belongs to a `library`, See [`Self::find_library_ancestor()`] - pub fn has_library_ancestor(&self, file: &Path) -> bool { - self.find_library_ancestor(file).is_some() - } - - /// Returns the library the file belongs to - /// - /// Returns the first library that is an ancestor of the given `file`. - /// - /// **Note:** this does not resolve remappings [`Self::resolve_import()`], instead this merely - /// checks if a `library` is a parent of `file` - /// - /// # Examples - /// - /// ```no_run - /// use foundry_compilers::ProjectPathsConfig; - /// use std::path::Path; - /// - /// let config: ProjectPathsConfig = ProjectPathsConfig::builder().lib("lib").build()?; - /// assert_eq!( - /// config.find_library_ancestor("lib/src/Greeter.sol".as_ref()), - /// Some(Path::new("lib")) - /// ); - /// Ok::<_, Box>(()) - /// ``` - pub fn find_library_ancestor(&self, file: &Path) -> Option<&Path> { - for lib in &self.libraries { - if lib.is_relative() - && file.is_absolute() - && file.starts_with(&self.root) - && file.starts_with(self.root.join(lib)) - || file.is_relative() - && lib.is_absolute() - && lib.starts_with(&self.root) - && self.root.join(file).starts_with(lib) - { - return Some(lib); - } - if file.starts_with(lib) { - return Some(lib); - } - } - - None - } - - /// Attempts to resolve an `import` from the given working directory. - /// - /// The `cwd` path is the parent dir of the file that includes the `import` - /// - /// This will also populate the `include_paths` with any nested library root paths that should - /// be provided to solc via `--include-path` because it uses absolute imports. - pub fn resolve_import_and_include_paths( - &self, - cwd: &Path, - import: &Path, - include_paths: &mut BTreeSet, - ) -> Result { - let component = import - .components() - .next() - .ok_or_else(|| SolcError::msg(format!("Empty import path {}", import.display())))?; - - if component == Component::CurDir || component == Component::ParentDir { - // if the import is relative we assume it's already part of the processed input - // file set - utils::normalize_solidity_import_path(cwd, import).map_err(|err| { - SolcError::msg(format!("failed to resolve relative import \"{err:?}\"")) - }) - } else { - // resolve library file - let resolved = self.resolve_library_import(cwd.as_ref(), import.as_ref()); - - if resolved.is_none() { - // absolute paths in solidity are a thing for example `import - // "src/interfaces/IConfig.sol"` which could either point to `cwd + - // src/interfaces/IConfig.sol`, or make use of a remapping (`src/=....`) - if let Some(lib) = self.find_library_ancestor(cwd) { - if let Some((include_path, import)) = - utils::resolve_absolute_library(lib, cwd, import) - { - // track the path for this absolute import inside a nested library - include_paths.insert(include_path); - return Ok(import); - } - } - // also try to resolve absolute imports from the project paths - for path in [&self.root, &self.sources, &self.tests, &self.scripts] { - if cwd.starts_with(path) { - if let Ok(import) = utils::normalize_solidity_import_path(path, import) { - return Ok(import); - } - } - } - } - - resolved.ok_or_else(|| { - SolcError::msg(format!( - "failed to resolve library import \"{:?}\"", - import.display() - )) - }) - } - } - - /// Attempts to resolve an `import` from the given working directory. - /// - /// The `cwd` path is the parent dir of the file that includes the `import` - pub fn resolve_import(&self, cwd: &Path, import: &Path) -> Result { - self.resolve_import_and_include_paths(cwd, import, &mut Default::default()) - } - - /// Attempts to find the path to the real solidity file that's imported via the given `import` - /// path by applying the configured remappings and checking the library dirs - /// - /// # Examples - /// - /// Following `@aave` dependency in the `lib` folder `node_modules` - /// - /// ```text - /// /node_modules/@aave - /// ├── aave-token - /// │ ├── contracts - /// │ │ ├── open-zeppelin - /// │ │ ├── token - /// ├── governance-v2 - /// ├── contracts - /// ├── interfaces - /// ``` - /// - /// has this remapping: `@aave/=@aave/` (name:path) so contracts can be imported as - /// - /// ```solidity - /// import "@aave/governance-v2/contracts/governance/Executor.sol"; - /// ``` - /// - /// So that `Executor.sol` can be found by checking each `lib` folder (`node_modules`) with - /// applied remappings. Applying remapping works by checking if the import path of an import - /// statement starts with the name of a remapping and replacing it with the remapping's `path`. - /// - /// There are some caveats though, dapptools style remappings usually include the `src` folder - /// `ds-test/=lib/ds-test/src/` so that imports look like `import "ds-test/test.sol";` (note the - /// missing `src` in the import path). - /// - /// For hardhat/npm style that's not always the case, most notably for [openzeppelin-contracts](https://github.com/OpenZeppelin/openzeppelin-contracts) if installed via npm. - /// The remapping is detected as `'@openzeppelin/=node_modules/@openzeppelin/contracts/'`, which - /// includes the source directory `contracts`, however it's common to see import paths like: - /// - /// `import "@openzeppelin/contracts/token/ERC20/IERC20.sol";` - /// - /// instead of - /// - /// `import "@openzeppelin/token/ERC20/IERC20.sol";` - /// - /// There is no strict rule behind this, but because - /// [`foundry_compilers_artifacts::remappings::Remapping::find_many`] returns `'@ - /// openzeppelin/=node_modules/@openzeppelin/contracts/'` we should handle the case if the - /// remapping path ends with `contracts` and the import path starts with `/contracts`. Otherwise we can end up with a resolved path that has a - /// duplicate `contracts` segment: - /// `@openzeppelin/contracts/contracts/token/ERC20/IERC20.sol` we check for this edge case - /// here so that both styles work out of the box. - pub fn resolve_library_import(&self, cwd: &Path, import: &Path) -> Option { - // if the import path starts with the name of the remapping then we get the resolved path by - // removing the name and adding the remainder to the path of the remapping - let cwd = cwd.strip_prefix(&self.root).unwrap_or(cwd); - if let Some(path) = self - .remappings - .iter() - .filter(|r| { - // only check remappings that are either global or for `cwd` - if let Some(ctx) = r.context.as_ref() { - cwd.starts_with(ctx) - } else { - true - } - }) - .find_map(|r| { - import.strip_prefix(&r.name).ok().map(|stripped_import| { - let lib_path = Path::new(&r.path).join(stripped_import); - - // we handle the edge case where the path of a remapping ends with "contracts" - // (`/=.../contracts`) and the stripped import also starts with - // `contracts` - if let Ok(adjusted_import) = stripped_import.strip_prefix("contracts/") { - if r.path.ends_with("contracts/") && !lib_path.exists() { - return Path::new(&r.path).join(adjusted_import); - } - } - lib_path - }) - }) - { - Some(self.root.join(path)) - } else { - utils::resolve_library(&self.libraries, import) - } - } - - pub fn with_language(self) -> ProjectPathsConfig { - let Self { - root, - cache, - artifacts, - build_infos, - sources, - tests, - scripts, - libraries, - remappings, - include_paths, - allowed_paths, - _l, - } = self; - - ProjectPathsConfig { - root, - cache, - artifacts, - build_infos, - sources, - tests, - scripts, - libraries, - remappings, - include_paths, - allowed_paths, - _l: PhantomData, - } - } - - pub fn apply_lib_remappings(&self, mut libraries: Libraries) -> Libraries { - libraries.libs = libraries.libs - .into_iter() - .map(|(file, target)| { - let file = self.resolve_import(&self.root, &file).unwrap_or_else(|err| { - warn!(target: "libs", "Failed to resolve library `{}` for linking: {:?}", file.display(), err); - file - }); - (file, target) - }) - .collect(); - libraries - } -} - -impl ProjectPathsConfig { - /// Returns all sources found under the project's configured `sources` path - pub fn read_sources(&self) -> Result { - trace!("reading all sources from \"{}\"", self.sources.display()); - Ok(Source::read_all_from(&self.sources, L::FILE_EXTENSIONS)?) - } - - /// Returns all sources found under the project's configured `test` path - pub fn read_tests(&self) -> Result { - trace!("reading all tests from \"{}\"", self.tests.display()); - Ok(Source::read_all_from(&self.tests, L::FILE_EXTENSIONS)?) - } - - /// Returns all sources found under the project's configured `script` path - pub fn read_scripts(&self) -> Result { - trace!("reading all scripts from \"{}\"", self.scripts.display()); - Ok(Source::read_all_from(&self.scripts, L::FILE_EXTENSIONS)?) - } - - /// Returns true if the there is at least one solidity file in this config. - /// - /// See also, `Self::input_files()` - pub fn has_input_files(&self) -> bool { - self.input_files_iter().next().is_some() - } - - /// Returns an iterator that yields all solidity file paths for `Self::sources`, `Self::tests` - /// and `Self::scripts` - pub fn input_files_iter(&self) -> impl Iterator + '_ { - utils::source_files_iter(&self.sources, L::FILE_EXTENSIONS) - .chain(utils::source_files_iter(&self.tests, L::FILE_EXTENSIONS)) - .chain(utils::source_files_iter(&self.scripts, L::FILE_EXTENSIONS)) - } - - /// Returns the combined set solidity file paths for `Self::sources`, `Self::tests` and - /// `Self::scripts` - pub fn input_files(&self) -> Vec { - self.input_files_iter().collect() - } - - /// Returns the combined set of `Self::read_sources` + `Self::read_tests` + `Self::read_scripts` - pub fn read_input_files(&self) -> Result { - Ok(Source::read_all_files(self.input_files())?) - } -} - -impl fmt::Display for ProjectPathsConfig { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - writeln!(f, "root: {}", self.root.display())?; - writeln!(f, "contracts: {}", self.sources.display())?; - writeln!(f, "artifacts: {}", self.artifacts.display())?; - writeln!(f, "tests: {}", self.tests.display())?; - writeln!(f, "scripts: {}", self.scripts.display())?; - writeln!(f, "libs:")?; - for lib in &self.libraries { - writeln!(f, " {}", lib.display())?; - } - writeln!(f, "remappings:")?; - for remapping in &self.remappings { - writeln!(f, " {remapping}")?; - } - Ok(()) - } -} - -/// This is a subset of [ProjectPathsConfig] that contains all relevant folders in the project -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct ProjectPaths { - pub artifacts: PathBuf, - pub build_infos: PathBuf, - pub sources: PathBuf, - pub tests: PathBuf, - pub scripts: PathBuf, - pub libraries: BTreeSet, -} - -impl ProjectPaths { - /// Joins the folders' location with `root` - pub fn join_all(&mut self, root: &Path) -> &mut Self { - self.artifacts = root.join(&self.artifacts); - self.build_infos = root.join(&self.build_infos); - self.sources = root.join(&self.sources); - self.tests = root.join(&self.tests); - self.scripts = root.join(&self.scripts); - let libraries = std::mem::take(&mut self.libraries); - self.libraries.extend(libraries.into_iter().map(|p| root.join(p))); - self - } - - /// Removes `base` from all folders - pub fn strip_prefix_all(&mut self, base: &Path) -> &mut Self { - if let Ok(stripped) = self.artifacts.strip_prefix(base) { - self.artifacts = stripped.to_path_buf(); - } - if let Ok(stripped) = self.build_infos.strip_prefix(base) { - self.build_infos = stripped.to_path_buf(); - } - if let Ok(stripped) = self.sources.strip_prefix(base) { - self.sources = stripped.to_path_buf(); - } - if let Ok(stripped) = self.tests.strip_prefix(base) { - self.tests = stripped.to_path_buf(); - } - if let Ok(stripped) = self.scripts.strip_prefix(base) { - self.scripts = stripped.to_path_buf(); - } - self.libraries = std::mem::take(&mut self.libraries) - .into_iter() - .map(|path| strip_prefix_owned(path, base)) - .collect(); - self - } -} - -impl Default for ProjectPaths { - fn default() -> Self { - Self { - artifacts: "out".into(), - build_infos: ["out", "build-info"].iter().collect::(), - sources: "src".into(), - tests: "test".into(), - scripts: "script".into(), - libraries: Default::default(), - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum PathStyle { - HardHat, - Dapptools, -} - -impl PathStyle { - /// Convert into a `ProjectPathsConfig` given the root path and based on the styled - pub fn paths(&self, root: &Path) -> Result> { - let root = utils::canonicalize(root)?; - - Ok(match self { - Self::Dapptools => ProjectPathsConfig::builder() - .sources(root.join("src")) - .artifacts(root.join("out")) - .build_infos(root.join("out").join("build-info")) - .lib(root.join("lib")) - .remappings(Remapping::find_many(&root.join("lib"))) - .root(root) - .build()?, - Self::HardHat => ProjectPathsConfig::builder() - .sources(root.join("contracts")) - .artifacts(root.join("artifacts")) - .build_infos(root.join("artifacts").join("build-info")) - .lib(root.join("node_modules")) - .root(root) - .build()?, - }) - } -} - -#[derive(Clone, Debug, Default)] -pub struct ProjectPathsConfigBuilder { - root: Option, - cache: Option, - artifacts: Option, - build_infos: Option, - sources: Option, - tests: Option, - scripts: Option, - libraries: Option>, - remappings: Option>, - include_paths: BTreeSet, - allowed_paths: BTreeSet, -} - -impl ProjectPathsConfigBuilder { - pub fn root(mut self, root: impl Into) -> Self { - self.root = Some(utils::canonicalized(root)); - self - } - - pub fn cache(mut self, cache: impl Into) -> Self { - self.cache = Some(utils::canonicalized(cache)); - self - } - - pub fn artifacts(mut self, artifacts: impl Into) -> Self { - self.artifacts = Some(utils::canonicalized(artifacts)); - self - } - - pub fn build_infos(mut self, build_infos: impl Into) -> Self { - self.build_infos = Some(utils::canonicalized(build_infos)); - self - } - - pub fn sources(mut self, sources: impl Into) -> Self { - self.sources = Some(utils::canonicalized(sources)); - self - } - - pub fn tests(mut self, tests: impl Into) -> Self { - self.tests = Some(utils::canonicalized(tests)); - self - } - - pub fn scripts(mut self, scripts: impl Into) -> Self { - self.scripts = Some(utils::canonicalized(scripts)); - self - } - - /// Specifically disallow additional libraries - pub fn no_libs(mut self) -> Self { - self.libraries = Some(Vec::new()); - self - } - - pub fn lib(mut self, lib: impl Into) -> Self { - self.libraries.get_or_insert_with(Vec::new).push(utils::canonicalized(lib)); - self - } - - pub fn libs(mut self, libs: impl IntoIterator>) -> Self { - let libraries = self.libraries.get_or_insert_with(Vec::new); - for lib in libs.into_iter() { - libraries.push(utils::canonicalized(lib)); - } - self - } - - pub fn remapping(mut self, remapping: Remapping) -> Self { - self.remappings.get_or_insert_with(Vec::new).push(remapping); - self - } - - pub fn remappings(mut self, remappings: impl IntoIterator) -> Self { - let our_remappings = self.remappings.get_or_insert_with(Vec::new); - for remapping in remappings.into_iter() { - our_remappings.push(remapping); - } - self - } - - /// Adds an allowed-path to the solc executable - pub fn allowed_path>(mut self, path: P) -> Self { - self.allowed_paths.insert(path.into()); - self - } - - /// Adds multiple allowed-path to the solc executable - pub fn allowed_paths(mut self, args: I) -> Self - where - I: IntoIterator, - S: Into, - { - for arg in args { - self = self.allowed_path(arg); - } - self - } - - /// Adds an `--include-path` to the solc executable - pub fn include_path>(mut self, path: P) -> Self { - self.include_paths.insert(path.into()); - self - } - - /// Adds multiple include-path to the solc executable - pub fn include_paths(mut self, args: I) -> Self - where - I: IntoIterator, - S: Into, - { - for arg in args { - self = self.include_path(arg); - } - self - } - - pub fn build_with_root(self, root: impl Into) -> ProjectPathsConfig { - let root = utils::canonicalized(root); - - let libraries = self.libraries.unwrap_or_else(|| ProjectPathsConfig::find_libs(&root)); - let artifacts = - self.artifacts.unwrap_or_else(|| ProjectPathsConfig::find_artifacts_dir(&root)); - - let mut allowed_paths = self.allowed_paths; - // allow every contract under root by default - allowed_paths.insert(root.clone()); - - ProjectPathsConfig { - cache: self - .cache - .unwrap_or_else(|| root.join("cache").join(SOLIDITY_FILES_CACHE_FILENAME)), - build_infos: self.build_infos.unwrap_or_else(|| artifacts.join("build-info")), - artifacts, - sources: self.sources.unwrap_or_else(|| ProjectPathsConfig::find_source_dir(&root)), - tests: self.tests.unwrap_or_else(|| root.join("test")), - scripts: self.scripts.unwrap_or_else(|| root.join("script")), - remappings: self.remappings.unwrap_or_else(|| { - libraries.iter().flat_map(|p| Remapping::find_many(p)).collect() - }), - libraries, - root, - include_paths: self.include_paths, - allowed_paths, - _l: PhantomData, - } - } - - pub fn build(self) -> std::result::Result, SolcIoError> { - let root = self - .root - .clone() - .map(Ok) - .unwrap_or_else(std::env::current_dir) - .map_err(|err| SolcIoError::new(err, "."))?; - Ok(self.build_with_root(root)) - } -} - -/// The config to use when compiling the contracts -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct SolcConfig { - /// How the file was compiled - pub settings: Settings, -} - -impl SolcConfig { - /// Creates a new [`SolcConfig`] builder. - /// - /// # Examples - /// - /// Autodetect solc version and default settings - /// - /// ``` - /// use foundry_compilers::SolcConfig; - /// - /// let config = SolcConfig::builder().build(); - /// ``` - pub fn builder() -> SolcConfigBuilder { - SolcConfigBuilder::default() - } -} - -impl From for Settings { - fn from(config: SolcConfig) -> Self { - config.settings - } -} - -#[derive(Default)] -pub struct SolcConfigBuilder { - settings: Option, - - /// additionally selected outputs that should be included in the `Contract` that solc creates. - output_selection: Vec, - - /// whether to include the AST in the output - ast: bool, -} - -impl SolcConfigBuilder { - pub fn settings(mut self, settings: Settings) -> Self { - self.settings = Some(settings); - self - } - - /// Adds another `ContractOutputSelection` to the set - #[must_use] - pub fn additional_output(mut self, output: impl Into) -> Self { - self.output_selection.push(output.into()); - self - } - - /// Adds multiple `ContractOutputSelection` to the set - #[must_use] - pub fn additional_outputs(mut self, outputs: I) -> Self - where - I: IntoIterator, - S: Into, - { - for out in outputs { - self = self.additional_output(out); - } - self - } - - pub fn ast(mut self, yes: bool) -> Self { - self.ast = yes; - self - } - - /// Creates the solc settings - pub fn build(self) -> Settings { - let Self { settings, output_selection, ast } = self; - let mut settings = settings.unwrap_or_default(); - settings.push_all(output_selection); - if ast { - settings = settings.with_ast(); - } - settings - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_autodetect_dirs() { - let root = utils::tempdir("root").unwrap(); - let out = root.path().join("out"); - let artifacts = root.path().join("artifacts"); - let build_infos = artifacts.join("build-info"); - let contracts = root.path().join("contracts"); - let src = root.path().join("src"); - let lib = root.path().join("lib"); - let node_modules = root.path().join("node_modules"); - - let root = root.path(); - assert_eq!(ProjectPathsConfig::find_source_dir(root), src,); - std::fs::create_dir_all(&contracts).unwrap(); - assert_eq!(ProjectPathsConfig::find_source_dir(root), contracts,); - assert_eq!( - ProjectPathsConfig::builder().build_with_root::<()>(root).sources, - utils::canonicalized(contracts), - ); - std::fs::create_dir_all(&src).unwrap(); - assert_eq!(ProjectPathsConfig::find_source_dir(root), src,); - assert_eq!( - ProjectPathsConfig::builder().build_with_root::<()>(root).sources, - utils::canonicalized(src), - ); - - assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,); - std::fs::create_dir_all(&artifacts).unwrap(); - assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), artifacts,); - assert_eq!( - ProjectPathsConfig::builder().build_with_root::<()>(root).artifacts, - utils::canonicalized(artifacts), - ); - std::fs::create_dir_all(&build_infos).unwrap(); - assert_eq!( - ProjectPathsConfig::builder().build_with_root::<()>(root).build_infos, - utils::canonicalized(build_infos) - ); - - std::fs::create_dir_all(&out).unwrap(); - assert_eq!(ProjectPathsConfig::find_artifacts_dir(root), out,); - assert_eq!( - ProjectPathsConfig::builder().build_with_root::<()>(root).artifacts, - utils::canonicalized(out), - ); - - assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],); - std::fs::create_dir_all(&node_modules).unwrap(); - assert_eq!(ProjectPathsConfig::find_libs(root), vec![node_modules.clone()],); - assert_eq!( - ProjectPathsConfig::builder().build_with_root::<()>(root).libraries, - vec![utils::canonicalized(node_modules)], - ); - std::fs::create_dir_all(&lib).unwrap(); - assert_eq!(ProjectPathsConfig::find_libs(root), vec![lib.clone()],); - assert_eq!( - ProjectPathsConfig::builder().build_with_root::<()>(root).libraries, - vec![utils::canonicalized(lib)], - ); - } - - #[test] - fn can_have_sane_build_info_default() { - let root = utils::tempdir("root").unwrap(); - let root = root.path(); - let artifacts = root.join("forge-artifacts"); - - // Set the artifacts directory without setting the - // build info directory - let paths = ProjectPathsConfig::builder().artifacts(&artifacts).build_with_root::<()>(root); - - // The artifacts should be set correctly based on the configured value - assert_eq!(paths.artifacts, utils::canonicalized(artifacts)); - - // The build infos should by default in the artifacts directory - assert_eq!(paths.build_infos, utils::canonicalized(paths.artifacts.join("build-info"))); - } - - #[test] - #[cfg_attr(windows, ignore = "Windows remappings #2347")] - fn can_find_library_ancestor() { - let mut config = ProjectPathsConfig::builder().lib("lib").build::<()>().unwrap(); - config.root = "/root/".into(); - - assert_eq!( - config.find_library_ancestor("lib/src/Greeter.sol".as_ref()).unwrap(), - Path::new("lib") - ); - - assert_eq!( - config.find_library_ancestor("/root/lib/src/Greeter.sol".as_ref()).unwrap(), - Path::new("lib") - ); - - config.libraries.push("/root/test/".into()); - - assert_eq!( - config.find_library_ancestor("test/src/Greeter.sol".as_ref()).unwrap(), - Path::new("/root/test/") - ); - - assert_eq!( - config.find_library_ancestor("/root/test/src/Greeter.sol".as_ref()).unwrap(), - Path::new("/root/test/") - ); - } - - #[test] - fn can_resolve_import() { - let dir = tempfile::tempdir().unwrap(); - let config = ProjectPathsConfig::builder().root(dir.path()).build::<()>().unwrap(); - config.create_all().unwrap(); - - fs::write(config.sources.join("A.sol"), r"pragma solidity ^0.8.0; contract A {}").unwrap(); - - // relative import - assert_eq!( - config - .resolve_import_and_include_paths( - &config.sources, - Path::new("./A.sol"), - &mut Default::default(), - ) - .unwrap(), - config.sources.join("A.sol") - ); - - // direct import - assert_eq!( - config - .resolve_import_and_include_paths( - &config.sources, - Path::new("src/A.sol"), - &mut Default::default(), - ) - .unwrap(), - config.sources.join("A.sol") - ); - } - - #[test] - fn can_resolve_remapped_import() { - let dir = tempfile::tempdir().unwrap(); - let mut config = ProjectPathsConfig::builder().root(dir.path()).build::<()>().unwrap(); - config.create_all().unwrap(); - - let dependency = config.root.join("dependency"); - fs::create_dir(&dependency).unwrap(); - fs::write(dependency.join("A.sol"), r"pragma solidity ^0.8.0; contract A {}").unwrap(); - - config.remappings.push(Remapping { - context: None, - name: "@dependency/".into(), - path: "dependency/".into(), - }); - - assert_eq!( - config - .resolve_import_and_include_paths( - &config.sources, - Path::new("@dependency/A.sol"), - &mut Default::default(), - ) - .unwrap(), - dependency.join("A.sol") - ); - } -} diff --git a/crates/compilers/src/filter.rs b/crates/compilers/src/filter.rs deleted file mode 100644 index 7ab20d80..00000000 --- a/crates/compilers/src/filter.rs +++ /dev/null @@ -1,180 +0,0 @@ -//! Types to apply filter to input types - -use crate::{ - compilers::{multi::MultiCompilerParsedSource, CompilerSettings, ParsedSource}, - resolver::{parse::SolData, GraphEdges}, - Sources, -}; -use foundry_compilers_artifacts::output_selection::OutputSelection; -use std::{ - collections::HashSet, - fmt, - path::{Path, PathBuf}, -}; - -/// A predicate property that determines whether a file satisfies a certain condition -pub trait FileFilter: dyn_clone::DynClone + Send + Sync { - /// The predicate function that should return if the given `file` should be included. - fn is_match(&self, file: &Path) -> bool; -} - -dyn_clone::clone_trait_object!(FileFilter); - -impl bool + Clone + Send + Sync> FileFilter for F { - fn is_match(&self, file: &Path) -> bool { - (self)(file) - } -} - -/// An [FileFilter] that matches all solidity files that end with `.t.sol` -#[derive(Clone, Default)] -pub struct TestFileFilter { - _priv: (), -} - -impl fmt::Debug for TestFileFilter { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("TestFileFilter").finish() - } -} - -impl fmt::Display for TestFileFilter { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("TestFileFilter") - } -} - -impl FileFilter for TestFileFilter { - fn is_match(&self, file: &Path) -> bool { - file.file_name().and_then(|s| s.to_str()).map(|s| s.ends_with(".t.sol")).unwrap_or_default() - } -} - -pub trait MaybeSolData { - fn sol_data(&self) -> Option<&SolData>; -} - -impl MaybeSolData for SolData { - fn sol_data(&self) -> Option<&SolData> { - Some(self) - } -} - -impl MaybeSolData for MultiCompilerParsedSource { - fn sol_data(&self) -> Option<&SolData> { - match self { - Self::Solc(data) => Some(data), - _ => None, - } - } -} - -/// A type that can apply a filter to a set of preprocessed [Sources] in order to set sparse -/// output for specific files -#[derive(Default)] -pub enum SparseOutputFilter<'a> { - /// Sets the configured [OutputSelection] for dirty files only. - /// - /// In other words, we request the output of solc only for files that have been detected as - /// _dirty_. - #[default] - Optimized, - /// Apply an additional filter to [Sources] to - Custom(&'a dyn FileFilter), -} - -impl<'a> SparseOutputFilter<'a> { - pub fn new(filter: Option<&'a dyn FileFilter>) -> Self { - if let Some(f) = filter { - SparseOutputFilter::Custom(f) - } else { - SparseOutputFilter::Optimized - } - } - - /// While solc needs all the files to compile the actual _dirty_ files, we can tell solc to - /// output everything for those dirty files as currently configured in the settings, but output - /// nothing for the other files that are _not_ dirty. - /// - /// This will modify the [OutputSelection] of the [CompilerSettings] so that we explicitly - /// select the files' output based on their state. - /// - /// This also takes the project's graph as input, this allows us to check if the files the - /// filter matches depend on libraries that need to be linked - pub fn sparse_sources( - &self, - sources: &Sources, - settings: &mut S, - graph: &GraphEdges, - ) -> Vec { - let mut full_compilation: HashSet = sources - .dirty_files() - .flat_map(|file| { - // If we have a custom filter and file does not match, we skip it. - if let Self::Custom(f) = self { - if !f.is_match(file) { - return vec![]; - } - } - - // Collect compilation dependencies for sources needing compilation. - let mut required_sources = vec![file.clone()]; - if let Some(data) = graph.get_parsed_source(file) { - let imports = graph.imports(file).into_iter().filter_map(|import| { - graph.get_parsed_source(import).map(|data| (import.as_path(), data)) - }); - for import in data.compilation_dependencies(imports) { - let import = import.to_path_buf(); - - #[cfg(windows)] - let import = { - use path_slash::PathBufExt; - - PathBuf::from(import.to_slash_lossy().to_string()) - }; - - required_sources.push(import); - } - } - - required_sources - }) - .collect(); - - // Remove clean sources, those will be read from cache. - full_compilation.retain(|file| sources.0.get(file).map_or(false, |s| s.is_dirty())); - - settings.update_output_selection(|selection| { - trace!( - "optimizing output selection for {} sources", - sources.len() - full_compilation.len() - ); - let default_selection = selection - .as_mut() - .remove("*") - .unwrap_or_else(OutputSelection::default_file_output_selection); - - // set output selections - for file in sources.0.keys() { - let key = file.display().to_string(); - let output = if full_compilation.contains(file) { - default_selection.clone() - } else { - OutputSelection::empty_file_output_select() - }; - selection.as_mut().insert(key, output); - } - }); - - full_compilation.into_iter().collect() - } -} - -impl<'a> fmt::Debug for SparseOutputFilter<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - SparseOutputFilter::Optimized => f.write_str("Optimized"), - SparseOutputFilter::Custom(_) => f.write_str("Custom"), - } - } -} diff --git a/crates/compilers/src/flatten.rs b/crates/compilers/src/flatten.rs deleted file mode 100644 index e1fdcc4b..00000000 --- a/crates/compilers/src/flatten.rs +++ /dev/null @@ -1,889 +0,0 @@ -use crate::{ - compilers::{Compiler, ParsedSource}, - filter::MaybeSolData, - resolver::parse::SolData, - CompilerSettings, Graph, Project, ProjectPathsConfig, -}; -use foundry_compilers_artifacts::{ - ast::{visitor::Visitor, *}, - output_selection::OutputSelection, - solc::ExternalInlineAssemblyReference, - sources::{Source, Sources}, - ContractDefinitionPart, SourceUnit, SourceUnitPart, -}; -use foundry_compilers_core::{ - error::{Result, SolcError}, - utils, -}; -use itertools::Itertools; -use std::{ - collections::{HashMap, HashSet}, - hash::Hash, - path::{Path, PathBuf}, -}; -use visitor::Walk; - -/// Alternative of `SourceLocation` which includes path of the file. -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -struct ItemLocation { - path: PathBuf, - start: usize, - end: usize, -} - -impl ItemLocation { - fn try_from_source_loc(src: &SourceLocation, path: PathBuf) -> Option { - let start = src.start?; - let end = start + src.length?; - - Some(Self { path, start, end }) - } - - fn length(&self) -> usize { - self.end - self.start - } -} - -/// Visitor exploring AST and collecting all references to declarations via `Identifier` and -/// `IdentifierPath` nodes. -/// -/// It also collects `MemberAccess` parts. So, if we have `X.Y` expression, loc and AST ID will be -/// saved for Y only. -/// -/// That way, even if we have a long `MemberAccess` expression (a.b.c.d) then the first member (a) -/// will be collected as either `Identifier` or `IdentifierPath`, and all subsequent parts (b, c, d) -/// will be collected as `MemberAccess` parts. -struct ReferencesCollector { - path: PathBuf, - references: HashMap>, -} - -impl ReferencesCollector { - fn process_referenced_declaration(&mut self, id: isize, src: &SourceLocation) { - if let Some(loc) = ItemLocation::try_from_source_loc(src, self.path.clone()) { - self.references.entry(id).or_default().insert(loc); - } - } -} - -impl Visitor for ReferencesCollector { - fn visit_identifier(&mut self, identifier: &Identifier) { - if let Some(id) = identifier.referenced_declaration { - self.process_referenced_declaration(id, &identifier.src); - } - } - - fn visit_identifier_path(&mut self, path: &IdentifierPath) { - self.process_referenced_declaration(path.referenced_declaration, &path.src); - } - - fn visit_member_access(&mut self, access: &MemberAccess) { - if let Some(referenced_declaration) = access.referenced_declaration { - if let (Some(src_start), Some(src_length)) = (access.src.start, access.src.length) { - let name_length = access.member_name.len(); - // Accessed member name is in the last name.len() symbols of the expression. - let start = src_start + src_length - name_length; - let end = start + name_length; - - self.references.entry(referenced_declaration).or_default().insert(ItemLocation { - start, - end, - path: self.path.to_path_buf(), - }); - } - } - } - - fn visit_external_assembly_reference(&mut self, reference: &ExternalInlineAssemblyReference) { - let mut src = reference.src.clone(); - - // If suffix is used in assembly reference (e.g. value.slot), it will be included into src. - // However, we are only interested in the referenced name, thus we strip . part. - if let Some(suffix) = &reference.suffix { - if let Some(len) = src.length.as_mut() { - let suffix_len = suffix.to_string().len(); - *len -= suffix_len + 1; - } - } - - self.process_referenced_declaration(reference.declaration as isize, &src); - } -} - -/// Updates to be applied to the sources. -/// source_path -> (start, end, new_value) -type Updates = HashMap>; - -pub struct FlatteningResult<'a> { - /// Updated source in the order they shoud be written to the output file. - sources: Vec, - /// Pragmas that should be present in the target file. - pragmas: Vec, - /// License identifier that should be present in the target file. - license: Option<&'a str>, -} - -impl<'a> FlatteningResult<'a> { - fn new( - flattener: &Flattener, - mut updates: Updates, - pragmas: Vec, - license: Option<&'a str>, - ) -> Self { - let mut sources = Vec::new(); - - for path in &flattener.ordered_sources { - let mut content = flattener.sources.get(path).unwrap().content.as_bytes().to_vec(); - let mut offset: isize = 0; - if let Some(updates) = updates.remove(path) { - let mut updates = updates.iter().collect::>(); - updates.sort_by_key(|(start, _, _)| *start); - for (start, end, new_value) in updates { - let start = (*start as isize + offset) as usize; - let end = (*end as isize + offset) as usize; - - content.splice(start..end, new_value.bytes()); - offset += new_value.len() as isize - (end - start) as isize; - } - } - let content = format!( - "// {}\n{}", - path.strip_prefix(&flattener.project_root).unwrap_or(path).display(), - String::from_utf8(content).unwrap() - ); - sources.push(content); - } - - Self { sources, pragmas, license } - } - - fn get_flattened_target(&self) -> String { - let mut result = String::new(); - - if let Some(license) = &self.license { - result.push_str(&format!("// {license}\n")); - } - for pragma in &self.pragmas { - result.push_str(&format!("{pragma}\n")); - } - for source in &self.sources { - result.push_str(&format!("\n\n{source}")); - } - - format!("{}\n", utils::RE_THREE_OR_MORE_NEWLINES.replace_all(&result, "\n\n").trim()) - } -} - -#[derive(Debug, thiserror::Error)] -pub enum FlattenerError { - #[error("Failed to compile {0}")] - Compilation(SolcError), - #[error(transparent)] - Other(SolcError), -} - -impl> From for FlattenerError { - fn from(err: T) -> Self { - Self::Other(err.into()) - } -} - -/// Context for flattening. Stores all sources and ASTs that are in scope of the flattening target. -pub struct Flattener { - /// Target file to flatten. - target: PathBuf, - /// Sources including only target and it dependencies (imports of any depth). - sources: Sources, - /// Vec of (path, ast) pairs. - asts: Vec<(PathBuf, SourceUnit)>, - /// Sources in the order they should be written to the output file. - ordered_sources: Vec, - /// Project root directory. - project_root: PathBuf, -} - -impl Flattener { - /// Compiles the target file and prepares AST and analysis data for flattening. - pub fn new( - mut project: Project, - target: &Path, - ) -> std::result::Result - where - C::ParsedSource: MaybeSolData, - { - // Configure project to compile the target file and only request AST for target file. - project.cached = false; - project.no_artifacts = true; - project.settings.update_output_selection(|selection| { - *selection = OutputSelection::ast_output_selection(); - }); - - let output = project.compile_file(target).map_err(FlattenerError::Compilation)?; - - if output.has_compiler_errors() { - return Err(FlattenerError::Compilation(SolcError::msg(&output))); - } - - let output = output.compiler_output; - - let sources = Source::read_all_files(vec![target.to_path_buf()])?; - let graph = Graph::::resolve_sources(&project.paths, sources)?; - - let ordered_sources = collect_ordered_deps(&target.to_path_buf(), &project.paths, &graph)?; - - #[cfg(windows)] - let ordered_sources = { - let mut sources = ordered_sources; - use path_slash::PathBufExt; - for p in &mut sources { - *p = PathBuf::from(p.to_slash_lossy().to_string()); - } - sources - }; - - let sources = Source::read_all(&ordered_sources)?; - - // Convert all ASTs from artifacts to strongly typed ASTs - let mut asts: Vec<(PathBuf, SourceUnit)> = Vec::new(); - for (path, ast) in output.sources.0.iter().filter_map(|(path, files)| { - if let Some(ast) = files.first().and_then(|source| source.source_file.ast.as_ref()) { - if sources.contains_key(path) { - return Some((path, ast)); - } - } - None - }) { - asts.push((PathBuf::from(path), serde_json::from_str(&serde_json::to_string(ast)?)?)); - } - - Ok(Self { - target: target.into(), - sources, - asts, - ordered_sources, - project_root: project.root().clone(), - }) - } - - /// Flattens target file and returns the result as a string - /// - /// Flattening process includes following steps: - /// 1. Find all file-level definitions and rename references to them via aliased or qualified - /// imports. - /// 2. Find all duplicates among file-level definitions and rename them to avoid conflicts. - /// 3. Remove all imports. - /// 4. Remove all pragmas except for the ones in the target file. - /// 5. Remove all license identifiers except for the one in the target file. - pub fn flatten(&self) -> String { - let mut updates = Updates::new(); - - let top_level_names = self.rename_top_level_definitions(&mut updates); - self.rename_contract_level_types_references(&top_level_names, &mut updates); - self.remove_qualified_imports(&mut updates); - self.update_inheritdocs(&top_level_names, &mut updates); - - self.remove_imports(&mut updates); - let target_pragmas = self.process_pragmas(&mut updates); - let target_license = self.process_licenses(&mut updates); - - self.flatten_result(updates, target_pragmas, target_license).get_flattened_target() - } - - fn flatten_result<'a>( - &'a self, - updates: Updates, - target_pragmas: Vec, - target_license: Option<&'a str>, - ) -> FlatteningResult<'a> { - FlatteningResult::new(self, updates, target_pragmas, target_license) - } - - /// Finds and goes over all references to file-level definitions and updates them to match - /// definition name. This is needed for two reasons: - /// 1. We want to rename all aliased or qualified imports. - /// 2. We want to find any duplicates and rename them to avoid conflicts. - /// - /// If we find more than 1 declaration with the same name, it's name is getting changed. - /// Two Counter contracts will be renamed to Counter_0 and Counter_1 - /// - /// Returns mapping from top-level declaration id to its name (possibly updated) - fn rename_top_level_definitions(&self, updates: &mut Updates) -> HashMap { - let top_level_definitions = self.collect_top_level_definitions(); - let references = self.collect_references(); - - let mut top_level_names = HashMap::new(); - - for (name, ids) in top_level_definitions { - let mut definition_name = name.to_string(); - let needs_rename = ids.len() > 1; - - let mut ids = ids.clone().into_iter().collect::>(); - if needs_rename { - // `loc.path` is expected to be different for each id because there can't be 2 - // top-level eclarations with the same name in the same file. - // - // Sorting by index loc.path in sorted files to make the renaming process - // deterministic. - ids.sort_by_key(|(_, loc)| { - self.ordered_sources.iter().position(|p| p == &loc.path).unwrap() - }); - } - for (i, (id, loc)) in ids.iter().enumerate() { - if needs_rename { - definition_name = format!("{name}_{i}"); - } - updates.entry(loc.path.clone()).or_default().insert(( - loc.start, - loc.end, - definition_name.clone(), - )); - if let Some(references) = references.get(&(*id as isize)) { - for loc in references { - updates.entry(loc.path.clone()).or_default().insert(( - loc.start, - loc.end, - definition_name.clone(), - )); - } - } - - top_level_names.insert(*id, definition_name.clone()); - } - } - top_level_names - } - - /// This is not very clean, but in most cases effective enough method to remove qualified - /// imports from sources. - /// - /// Every qualified import part is an `Identifier` with `referencedDeclaration` field matching - /// ID of one of the import directives. - /// - /// This approach works by firstly collecting all IDs of import directives, and then looks for - /// any references of them. Once the reference is found, it's full length is getting removed - /// from source + 1 charater ('.') - /// - /// This should work correctly for vast majority of cases, however there are situations for - /// which such approach won't work, most of which are related to code being formatted in an - /// uncommon way. - fn remove_qualified_imports(&self, updates: &mut Updates) { - let imports_ids = self - .asts - .iter() - .flat_map(|(_, ast)| { - ast.nodes.iter().filter_map(|node| match node { - SourceUnitPart::ImportDirective(directive) => Some(directive.id), - _ => None, - }) - }) - .collect::>(); - - let references = self.collect_references(); - - for (id, locs) in references { - if !imports_ids.contains(&(id as usize)) { - continue; - } - - for loc in locs { - updates.entry(loc.path).or_default().insert(( - loc.start, - loc.end + 1, - String::new(), - )); - } - } - } - - /// Here we are going through all references to items defined in scope of contracts and updating - /// them to be using correct parent contract name. - /// - /// This will only operate on references from `IdentifierPath` nodes. - fn rename_contract_level_types_references( - &self, - top_level_names: &HashMap, - updates: &mut Updates, - ) { - let contract_level_definitions = self.collect_contract_level_definitions(); - - for (path, ast) in &self.asts { - for node in &ast.nodes { - let mut collector = - ReferencesCollector { path: self.target.clone(), references: HashMap::new() }; - - node.walk(&mut collector); - - let references = collector.references; - - for (id, locs) in references { - if let Some((name, contract_id)) = - contract_level_definitions.get(&(id as usize)) - { - for loc in &locs { - // If child item is referenced directly by it's name it's either defined - // in the same contract or in one of it's base contracts, so we don't - // have to change anything. - // Comparing lengths is enough because such items cannot be aliased. - if loc.length() == name.len() { - continue; - } - // If it was referenced somehow else, we rename it to `Parent.Child` - // format. - let parent_name = top_level_names.get(contract_id).unwrap(); - updates.entry(path.clone()).or_default().insert(( - loc.start, - loc.end, - format!("{parent_name}.{name}"), - )); - } - } - } - } - } - } - - /// Finds all @inheritdoc tags in natspec comments and tries replacing them. - /// - /// We will either replace contract name or remove @inheritdoc tag completely to avoid - /// generating invalid source code. - fn update_inheritdocs(&self, top_level_names: &HashMap, updates: &mut Updates) { - trace!("updating @inheritdoc tags"); - for (path, ast) in &self.asts { - // Collect all exported symbols for this source unit - // @inheritdoc value is either one of those or qualified import path which we don't - // support - let exported_symbols = ast - .exported_symbols - .iter() - .filter_map( - |(name, ids)| { - if !ids.is_empty() { - Some((name.as_str(), ids[0])) - } else { - None - } - }, - ) - .collect::>(); - - // Collect all docs in all contracts - let docs = ast - .nodes - .iter() - .filter_map(|node| match node { - SourceUnitPart::ContractDefinition(d) => Some(d), - _ => None, - }) - .flat_map(|contract| { - contract.nodes.iter().filter_map(|node| match node { - ContractDefinitionPart::EventDefinition(event) => { - event.documentation.as_ref() - } - ContractDefinitionPart::ErrorDefinition(error) => { - error.documentation.as_ref() - } - ContractDefinitionPart::FunctionDefinition(func) => { - func.documentation.as_ref() - } - ContractDefinitionPart::VariableDeclaration(var) => { - var.documentation.as_ref() - } - _ => None, - }) - }); - - docs.for_each(|doc| { - let Documentation::Structured(doc) = doc else { - return - }; - let src_start = doc.src.start.unwrap(); - let src_end = src_start + doc.src.length.unwrap(); - - // Documentation node has `text` field, however, it does not contain - // slashes and we can't use if to find positions. - let content: &str = &self.sources.get(path).unwrap().content[src_start..src_end]; - let tag_len = "@inheritdoc".len(); - - if let Some(tag_start) = content.find("@inheritdoc") { - trace!("processing doc with content {:?}", content); - if let Some(name_start) = content[tag_start + tag_len..] - .find(|c| c != ' ') - .map(|p| p + tag_start + tag_len) - { - let name_end = content[name_start..] - .find([' ', '\n', '*', '/']) - .map(|p| p + name_start) - .unwrap_or(content.len()); - - let name = &content[name_start..name_end]; - trace!("found name {name}"); - - let mut new_name = None; - - if let Some(ast_id) = exported_symbols.get(name) { - if let Some(name) = top_level_names.get(ast_id) { - new_name = Some(name); - } else { - trace!(identifiers=?top_level_names, "ast id {ast_id} cannot be matched to top-level identifier"); - } - } - - if let Some(new_name) = new_name { - trace!("updating tag value with {new_name}"); - updates.entry(path.to_path_buf()).or_default().insert(( - src_start + name_start, - src_start + name_end, - new_name.to_string(), - )); - } else { - trace!("name is unknown, removing @inheritdoc tag"); - updates.entry(path.to_path_buf()).or_default().insert(( - src_start + tag_start, - src_start + name_end, - String::new(), - )); - } - } - } - }); - } - } - - /// Processes all ASTs and collects all top-level definitions in the form of - /// a mapping from name to (definition id, source location) - fn collect_top_level_definitions(&self) -> HashMap<&String, HashSet<(usize, ItemLocation)>> { - self.asts - .iter() - .flat_map(|(path, ast)| { - ast.nodes - .iter() - .filter_map(|node| match node { - SourceUnitPart::ContractDefinition(contract) => Some(( - &contract.name, - contract.id, - &contract.src, - &contract.name_location, - )), - SourceUnitPart::EnumDefinition(enum_) => { - Some((&enum_.name, enum_.id, &enum_.src, &enum_.name_location)) - } - SourceUnitPart::StructDefinition(struct_) => { - Some((&struct_.name, struct_.id, &struct_.src, &struct_.name_location)) - } - SourceUnitPart::FunctionDefinition(func) => { - Some((&func.name, func.id, &func.src, &func.name_location)) - } - SourceUnitPart::VariableDeclaration(var) => { - Some((&var.name, var.id, &var.src, &var.name_location)) - } - SourceUnitPart::UserDefinedValueTypeDefinition(type_) => { - Some((&type_.name, type_.id, &type_.src, &type_.name_location)) - } - _ => None, - }) - .map(|(name, id, src, maybe_name_src)| { - let loc = match maybe_name_src { - Some(src) => { - ItemLocation::try_from_source_loc(src, path.clone()).unwrap() - } - None => { - // Find location of name in source - let content: &str = &self.sources.get(path).unwrap().content; - let start = src.start.unwrap(); - let end = start + src.length.unwrap(); - - let name_start = content[start..end].find(name).unwrap(); - let name_end = name_start + name.len(); - - ItemLocation { - path: path.clone(), - start: start + name_start, - end: start + name_end, - } - } - }; - - (name, (id, loc)) - }) - }) - .fold(HashMap::new(), |mut acc, (name, (id, item_location))| { - acc.entry(name).or_default().insert((id, item_location)); - acc - }) - } - - /// Collect all contract-level definitions in the form of a mapping from definition id to - /// (definition name, contract id) - fn collect_contract_level_definitions(&self) -> HashMap { - self.asts - .iter() - .flat_map(|(_, ast)| { - ast.nodes.iter().filter_map(|node| match node { - SourceUnitPart::ContractDefinition(contract) => { - Some((contract.id, &contract.nodes)) - } - _ => None, - }) - }) - .flat_map(|(contract_id, nodes)| { - nodes.iter().filter_map(move |node| match node { - ContractDefinitionPart::EnumDefinition(enum_) => { - Some((enum_.id, (&enum_.name, contract_id))) - } - ContractDefinitionPart::ErrorDefinition(error) => { - Some((error.id, (&error.name, contract_id))) - } - ContractDefinitionPart::EventDefinition(event) => { - Some((event.id, (&event.name, contract_id))) - } - ContractDefinitionPart::StructDefinition(struct_) => { - Some((struct_.id, (&struct_.name, contract_id))) - } - ContractDefinitionPart::FunctionDefinition(function) => { - Some((function.id, (&function.name, contract_id))) - } - ContractDefinitionPart::VariableDeclaration(variable) => { - Some((variable.id, (&variable.name, contract_id))) - } - ContractDefinitionPart::UserDefinedValueTypeDefinition(value_type) => { - Some((value_type.id, (&value_type.name, contract_id))) - } - _ => None, - }) - }) - .collect() - } - - /// Collects all references to any declaration in the form of a mapping from declaration id to - /// set of source locations it appears in - fn collect_references(&self) -> HashMap> { - self.asts - .iter() - .flat_map(|(path, ast)| { - let mut collector = - ReferencesCollector { path: path.clone(), references: HashMap::new() }; - ast.walk(&mut collector); - collector.references - }) - .fold(HashMap::new(), |mut acc, (id, locs)| { - acc.entry(id).or_default().extend(locs); - acc - }) - } - - /// Removes all imports from all sources. - fn remove_imports(&self, updates: &mut Updates) { - for loc in self.collect_imports() { - updates.entry(loc.path.clone()).or_default().insert(( - loc.start, - loc.end, - String::new(), - )); - } - } - - // Collects all imports locations. - fn collect_imports(&self) -> HashSet { - self.asts - .iter() - .flat_map(|(path, ast)| { - ast.nodes.iter().filter_map(|node| match node { - SourceUnitPart::ImportDirective(import) => { - ItemLocation::try_from_source_loc(&import.src, path.clone()) - } - _ => None, - }) - }) - .collect() - } - - /// Removes all pragma directives from all sources. Returns Vec with experimental and combined - /// version pragmas (if present). - fn process_pragmas(&self, updates: &mut Updates) -> Vec { - let mut abicoder_v2 = None; - - let pragmas = self.collect_pragmas(); - let mut version_pragmas = Vec::new(); - - for loc in &pragmas { - let pragma_content = self.read_location(loc); - if pragma_content.contains("experimental") || pragma_content.contains("abicoder") { - if abicoder_v2.is_none() { - abicoder_v2 = Some(self.read_location(loc).to_string()); - } - } else if pragma_content.contains("solidity") { - version_pragmas.push(pragma_content); - } - - updates.entry(loc.path.clone()).or_default().insert(( - loc.start, - loc.end, - String::new(), - )); - } - - let mut pragmas = Vec::new(); - - if let Some(version_pragma) = combine_version_pragmas(version_pragmas) { - pragmas.push(version_pragma); - } - - if let Some(pragma) = abicoder_v2 { - pragmas.push(pragma); - } - - pragmas - } - - // Collects all pragma directives locations. - fn collect_pragmas(&self) -> HashSet { - self.asts - .iter() - .flat_map(|(path, ast)| { - ast.nodes.iter().filter_map(|node| match node { - SourceUnitPart::PragmaDirective(import) => { - ItemLocation::try_from_source_loc(&import.src, path.clone()) - } - _ => None, - }) - }) - .collect() - } - - /// Removes all license identifiers from all sources. Returns licesnse identifier from target - /// file, if any. - fn process_licenses(&self, updates: &mut Updates) -> Option<&str> { - let mut target_license = None; - - for loc in &self.collect_licenses() { - if loc.path == self.target { - let license_line = self.read_location(loc); - let license_start = license_line.find("SPDX-License-Identifier:").unwrap(); - target_license = Some(license_line[license_start..].trim()); - } - updates.entry(loc.path.clone()).or_default().insert(( - loc.start, - loc.end, - String::new(), - )); - } - - target_license - } - - // Collects all SPDX-License-Identifier locations. - fn collect_licenses(&self) -> HashSet { - self.sources - .iter() - .flat_map(|(path, source)| { - let mut licenses = HashSet::new(); - if let Some(license_start) = source.content.find("SPDX-License-Identifier:") { - let start = - source.content[..license_start].rfind('\n').map(|i| i + 1).unwrap_or(0); - let end = start - + source.content[start..] - .find('\n') - .unwrap_or(source.content.len() - start); - licenses.insert(ItemLocation { path: path.clone(), start, end }); - } - licenses - }) - .collect() - } - - // Reads value from the given location of a source file. - fn read_location(&self, loc: &ItemLocation) -> &str { - let content: &str = &self.sources.get(&loc.path).unwrap().content; - &content[loc.start..loc.end] - } -} - -/// Performs DFS to collect all dependencies of a target -fn collect_deps( - path: &PathBuf, - paths: &ProjectPathsConfig, - graph: &Graph, - deps: &mut HashSet, -) -> Result<()> { - if deps.insert(path.clone()) { - let target_dir = path.parent().ok_or_else(|| { - SolcError::msg(format!("failed to get parent directory for \"{}\"", path.display())) - })?; - - let node_id = graph - .files() - .get(path) - .ok_or_else(|| SolcError::msg(format!("cannot resolve file at {}", path.display())))?; - - if let Some(data) = graph.node(*node_id).data.sol_data() { - for import in &data.imports { - let path = paths.resolve_import(target_dir, import.data().path())?; - collect_deps(&path, paths, graph, deps)?; - } - } - } - Ok(()) -} - -/// We want to make order in which sources are written to resulted flattened file -/// deterministic. -/// -/// We can't just sort files alphabetically as it might break compilation, because Solidity -/// does not allow base class definitions to appear after derived contract -/// definitions. -/// -/// Instead, we sort files by the number of their dependencies (imports of any depth) in ascending -/// order. If files have the same number of dependencies, we sort them alphabetically. -/// Target file is always placed last. -pub fn collect_ordered_deps( - path: &PathBuf, - paths: &ProjectPathsConfig, - graph: &Graph, -) -> Result> { - let mut deps = HashSet::new(); - collect_deps(path, paths, graph, &mut deps)?; - - // Remove path prior counting dependencies - // It will be added later to the end of resulted Vec - deps.remove(path); - - let mut paths_with_deps_count = Vec::new(); - for path in deps { - let mut path_deps = HashSet::new(); - collect_deps(&path, paths, graph, &mut path_deps)?; - paths_with_deps_count.push((path_deps.len(), path)); - } - - paths_with_deps_count.sort(); - - let mut ordered_deps = - paths_with_deps_count.into_iter().map(|(_, path)| path).collect::>(); - - ordered_deps.push(path.clone()); - - Ok(ordered_deps) -} - -pub fn combine_version_pragmas(pragmas: Vec<&str>) -> Option { - let mut versions = pragmas - .into_iter() - .filter_map(|p| { - SolData::parse_version_req( - p.replace("pragma", "").replace("solidity", "").replace(';', "").trim(), - ) - .ok() - }) - .flat_map(|req| req.comparators) - .collect::>() - .into_iter() - .map(|comp| comp.to_string()) - .collect::>(); - - versions.sort(); - - if !versions.is_empty() { - return Some(format!("pragma solidity {};", versions.iter().format(" "))); - } - - None -} diff --git a/crates/compilers/src/lib.rs b/crates/compilers/src/lib.rs deleted file mode 100644 index 474fd3b4..00000000 --- a/crates/compilers/src/lib.rs +++ /dev/null @@ -1,1020 +0,0 @@ -#![doc = include_str!("../README.md")] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -#[macro_use] -extern crate tracing; - -#[cfg(feature = "project-util")] -#[macro_use] -extern crate foundry_compilers_core; - -mod artifact_output; -pub use artifact_output::*; - -pub mod buildinfo; - -pub mod cache; - -pub mod flatten; - -pub mod resolver; -pub use resolver::Graph; - -pub mod compilers; -pub use compilers::*; - -mod compile; -pub use compile::{ - output::{AggregatedCompilerOutput, ProjectCompileOutput}, - *, -}; - -mod config; -pub use config::{PathStyle, ProjectPaths, ProjectPathsConfig, SolcConfig}; - -mod filter; -pub use filter::{FileFilter, SparseOutputFilter, TestFileFilter}; - -pub mod zksync; - -pub mod report; - -/// Utilities for creating, mocking and testing of (temporary) projects -#[cfg(feature = "project-util")] -pub mod project_util; - -pub use foundry_compilers_artifacts as artifacts; -pub use foundry_compilers_core::{error, utils}; - -use cache::CompilerCache; -use compile::output::contracts::VersionedContracts; -use compilers::multi::MultiCompiler; -use derivative::Derivative; -use foundry_compilers_artifacts::solc::{ - output_selection::OutputSelection, - sources::{Source, SourceCompilationKind, Sources}, - Contract, Severity, SourceFile, StandardJsonCompilerInput, -}; -use foundry_compilers_core::error::{Result, SolcError, SolcIoError}; -use output::sources::{VersionedSourceFile, VersionedSourceFiles}; -use project::ProjectCompiler; -use semver::Version; -use solang_parser::pt::SourceUnitPart; -use solc::SolcSettings; -use std::{ - collections::{BTreeMap, HashMap, HashSet}, - fs, - path::{Path, PathBuf}, -}; - -/// Represents a project workspace and handles `solc` compiling of all contracts in that workspace. -#[derive(Clone, Derivative)] -#[derivative(Debug)] -pub struct Project { - pub compiler: C, - /// Compiler versions locked for specific languages. - pub locked_versions: HashMap, - /// The layout of the project - pub paths: ProjectPathsConfig, - /// The compiler settings - pub settings: C::Settings, - /// Whether caching is enabled - pub cached: bool, - /// Whether to output build information with each solc call. - pub build_info: bool, - /// Whether writing artifacts to disk is enabled - pub no_artifacts: bool, - /// Handles all artifacts related tasks, reading and writing from the artifact dir. - pub artifacts: T, - /// Errors/Warnings which match these error codes are not going to be logged - pub ignored_error_codes: Vec, - /// Errors/Warnings which match these file paths are not going to be logged - pub ignored_file_paths: Vec, - /// The minimum severity level that is treated as a compiler error - pub compiler_severity_filter: Severity, - /// Maximum number of `solc` processes to run simultaneously. - solc_jobs: usize, - /// Offline mode, if set, network access (download solc) is disallowed - pub offline: bool, - /// Windows only config value to ensure the all paths use `/` instead of `\\`, same as `solc` - /// - /// This is a noop on other platforms - pub slash_paths: bool, - /// Optional sparse output filter used to optimize compilation. - #[derivative(Debug = "ignore")] - pub sparse_output: Option>, -} - -impl Project { - /// Convenience function to call `ProjectBuilder::default()`. - /// - /// # Examples - /// - /// Configure with [ConfigurableArtifacts] artifacts output and [MultiCompiler] compiler: - /// ```no_run - /// use foundry_compilers::Project; - /// - /// let config = Project::builder().build(Default::default())?; - /// # Ok::<(), Box>(()) - /// ``` - /// - /// To configure any a project with any `ArtifactOutput` use either: - /// ```no_run - /// use foundry_compilers::Project; - /// - /// let config = Project::builder().build(Default::default())?; - /// # Ok::<(), Box>(()) - /// ``` - /// - /// or use the builder directly: - /// ```no_run - /// use foundry_compilers::{multi::MultiCompiler, ConfigurableArtifacts, ProjectBuilder}; - /// - /// let config = ProjectBuilder::::default().build(Default::default())?; - /// # Ok::<(), Box>(()) - /// ``` - pub fn builder() -> ProjectBuilder { - ProjectBuilder::default() - } -} - -impl Project { - /// Returns the handler that takes care of processing all artifacts - pub fn artifacts_handler(&self) -> &T { - &self.artifacts - } -} - -impl Project -where - C::Settings: Into, -{ - /// Returns standard-json-input to compile the target contract - pub fn standard_json_input(&self, target: &Path) -> Result { - trace!(?target, "Building standard-json-input"); - let graph = Graph::::resolve(&self.paths)?; - let target_index = graph.files().get(target).ok_or_else(|| { - SolcError::msg(format!("cannot resolve file at {:?}", target.display())) - })?; - - let mut sources = Vec::new(); - let mut unique_paths = HashSet::new(); - let (path, source) = graph.node(*target_index).unpack(); - unique_paths.insert(path.clone()); - sources.push((path, source)); - sources.extend( - graph - .all_imported_nodes(*target_index) - .map(|index| graph.node(index).unpack()) - .filter(|(p, _)| unique_paths.insert(p.to_path_buf())), - ); - - let root = self.root(); - let sources = sources - .into_iter() - .map(|(path, source)| (rebase_path(root, path), source.clone())) - .collect(); - - let mut settings = self.settings.clone().into(); - // strip the path to the project root from all remappings - settings.remappings = self - .paths - .remappings - .clone() - .into_iter() - .map(|r| r.into_relative(self.root()).to_relative_remapping()) - .collect::>(); - - let input = StandardJsonCompilerInput::new(sources, settings.settings); - - Ok(input) - } -} - -impl Project { - /// Returns the path to the artifacts directory - pub fn artifacts_path(&self) -> &PathBuf { - &self.paths.artifacts - } - - /// Returns the path to the sources directory - pub fn sources_path(&self) -> &PathBuf { - &self.paths.sources - } - - /// Returns the path to the cache file - pub fn cache_path(&self) -> &PathBuf { - &self.paths.cache - } - - /// Returns the path to the `build-info` directory nested in the artifacts dir - pub fn build_info_path(&self) -> &PathBuf { - &self.paths.build_infos - } - - /// Returns the root directory of the project - pub fn root(&self) -> &PathBuf { - &self.paths.root - } - - /// Convenience function to read the cache file. - /// See also [CompilerCache::read_joined()] - pub fn read_cache_file(&self) -> Result> { - CompilerCache::read_joined(&self.paths) - } - - /// Sets the maximum number of parallel `solc` processes to run simultaneously. - /// - /// # Panics - /// - /// if `jobs == 0` - pub fn set_solc_jobs(&mut self, jobs: usize) { - assert!(jobs > 0); - self.solc_jobs = jobs; - } - - /// Returns all sources found under the project's configured sources path - #[instrument(skip_all, fields(name = "sources"))] - pub fn sources(&self) -> Result { - self.paths.read_sources() - } - - /// Emit the cargo [`rerun-if-changed`](https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath) instruction. - /// - /// This tells Cargo to re-run the build script if a file inside the project's sources directory - /// has changed. - /// - /// Use this if you compile a project in a `build.rs` file. - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::{Project, ProjectPathsConfig}; - /// - /// // Configure the project with all its paths, solc, cache etc. - /// // where the root dir is the current Rust project. - /// let paths = ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR").as_ref())?; - /// let project = Project::builder().paths(paths).build(Default::default())?; - /// let output = project.compile()?; - /// - /// // Tell Cargo to rerun this build script that if a source file changes. - /// project.rerun_if_sources_changed(); - /// # Ok::<_, Box>(()) - /// ``` - pub fn rerun_if_sources_changed(&self) { - println!("cargo:rerun-if-changed={}", self.paths.sources.display()) - } - - pub fn compile(&self) -> Result> { - project::ProjectCompiler::new(self)?.compile() - } - - /// Convenience function to compile a single solidity file with the project's settings. - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::Project; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile_file("example/Greeter.sol")?; - /// # Ok::<(), Box>(()) - /// ``` - pub fn compile_file(&self, file: impl Into) -> Result> { - let file = file.into(); - let source = Source::read(&file)?; - project::ProjectCompiler::with_sources(self, Sources::from([(file, source)]))?.compile() - } - - /// Convenience function to compile a series of solidity files with the project's settings. - /// Same as [`Self::compile()`] but with the given `files` as input. - /// - /// # Examples - /// ```no_run - /// use foundry_compilers::Project; - /// - /// let project = Project::builder().build(Default::default())?; - /// let output = project.compile_files(["examples/Foo.sol", "examples/Bar.sol"])?; - /// # Ok::<(), Box>(()) - /// ``` - pub fn compile_files(&self, files: I) -> Result> - where - I: IntoIterator, - P: Into, - { - let sources = Source::read_all(files)?; - - ProjectCompiler::with_sources(self, sources)?.compile() - } - - /// Removes the project's artifacts and cache file - /// - /// If the cache file was the only file in the folder, this also removes the empty folder. - /// - /// # Examples - /// ``` - /// use foundry_compilers::Project; - /// - /// let project = Project::builder().build(Default::default())?; - /// let _ = project.compile()?; - /// assert!(project.artifacts_path().exists()); - /// assert!(project.cache_path().exists()); - /// - /// project.cleanup(); - /// assert!(!project.artifacts_path().exists()); - /// assert!(!project.cache_path().exists()); - /// # Ok::<(), Box>(()) - /// ``` - pub fn cleanup(&self) -> std::result::Result<(), SolcIoError> { - trace!("clean up project"); - if self.cache_path().exists() { - std::fs::remove_file(self.cache_path()) - .map_err(|err| SolcIoError::new(err, self.cache_path()))?; - if let Some(cache_folder) = - self.cache_path().parent().filter(|cache_folder| self.root() != cache_folder) - { - // remove the cache folder if the cache file was the only file - if cache_folder - .read_dir() - .map_err(|err| SolcIoError::new(err, cache_folder))? - .next() - .is_none() - { - std::fs::remove_dir(cache_folder) - .map_err(|err| SolcIoError::new(err, cache_folder))?; - } - } - trace!("removed cache file \"{}\"", self.cache_path().display()); - } - - // clean the artifacts dir - if self.artifacts_path().exists() && self.root() != self.artifacts_path() { - std::fs::remove_dir_all(self.artifacts_path()) - .map_err(|err| SolcIoError::new(err, self.artifacts_path().clone()))?; - trace!("removed artifacts dir \"{}\"", self.artifacts_path().display()); - } - - // also clean the build-info dir, in case it's not nested in the artifacts dir - if self.build_info_path().exists() && self.root() != self.build_info_path() { - std::fs::remove_dir_all(self.build_info_path()) - .map_err(|err| SolcIoError::new(err, self.build_info_path().clone()))?; - tracing::trace!("removed build-info dir \"{}\"", self.build_info_path().display()); - } - - Ok(()) - } - - /// Runs solc compiler without requesting any output and collects a mapping from contract names - /// to source files containing artifact with given name. - fn collect_contract_names_solc(&self) -> Result>> - where - T: Clone, - C: Clone, - { - let mut temp_project = (*self).clone(); - temp_project.no_artifacts = true; - temp_project.settings.update_output_selection(|selection| { - *selection = OutputSelection::common_output_selection(["abi".to_string()]); - }); - - let output = temp_project.compile()?; - - if output.has_compiler_errors() { - return Err(SolcError::msg(output)); - } - - let contracts = output.into_artifacts().fold( - HashMap::new(), - |mut contracts: HashMap<_, Vec<_>>, (id, _)| { - contracts.entry(id.name).or_default().push(id.source); - contracts - }, - ); - - Ok(contracts) - } - - /// Parses project sources via solang parser, collecting mapping from contract name to source - /// files containing artifact with given name. On parser failure, fallbacks to - /// [Self::collect_contract_names_solc]. - fn collect_contract_names(&self) -> Result>> - where - T: Clone, - C: Clone, - { - let graph = Graph::::resolve(&self.paths)?; - let mut contracts: HashMap> = HashMap::new(); - - for file in graph.files().keys() { - let src = fs::read_to_string(file).map_err(|e| SolcError::io(e, file))?; - let Ok((parsed, _)) = solang_parser::parse(&src, 0) else { - return self.collect_contract_names_solc(); - }; - - for part in parsed.0 { - if let SourceUnitPart::ContractDefinition(contract) = part { - if let Some(name) = contract.name { - contracts.entry(name.name).or_default().push(file.clone()); - } - } - } - } - - Ok(contracts) - } - - /// Finds the path of the contract with the given name. - /// Throws error if multiple or no contracts with the same name are found. - pub fn find_contract_path(&self, target_name: &str) -> Result - where - T: Clone, - C: Clone, - { - let mut contracts = self.collect_contract_names()?; - - if contracts.get(target_name).map_or(true, |paths| paths.is_empty()) { - return Err(SolcError::msg(format!("No contract found with the name `{target_name}`"))); - } - let mut paths = contracts.remove(target_name).unwrap(); - if paths.len() > 1 { - return Err(SolcError::msg(format!( - "Multiple contracts found with the name `{target_name}`" - ))); - } - - Ok(paths.remove(0)) - } -} - -pub struct ProjectBuilder { - /// The layout of the - paths: Option>, - /// Compiler versions locked for specific languages. - locked_versions: HashMap, - /// How solc invocation should be configured. - settings: Option, - /// Whether caching is enabled, default is true. - cached: bool, - /// Whether to output build information with each solc call. - build_info: bool, - /// Whether writing artifacts to disk is enabled, default is true. - no_artifacts: bool, - /// Use offline mode - offline: bool, - /// Whether to slash paths of the `ProjectCompilerOutput` - slash_paths: bool, - /// handles all artifacts related tasks - artifacts: T, - /// Which error codes to ignore - pub ignored_error_codes: Vec, - /// Which file paths to ignore - pub ignored_file_paths: Vec, - /// The minimum severity level that is treated as a compiler error - compiler_severity_filter: Severity, - solc_jobs: Option, - /// Optional sparse output filter used to optimize compilation. - sparse_output: Option>, -} - -impl ProjectBuilder { - /// Create a new builder with the given artifacts handler - pub fn new(artifacts: T) -> Self { - Self { - paths: None, - cached: true, - build_info: false, - no_artifacts: false, - offline: false, - slash_paths: true, - artifacts, - ignored_error_codes: Vec::new(), - ignored_file_paths: Vec::new(), - compiler_severity_filter: Severity::Error, - solc_jobs: None, - settings: None, - locked_versions: Default::default(), - sparse_output: None, - } - } - - #[must_use] - pub fn paths(mut self, paths: ProjectPathsConfig) -> Self { - self.paths = Some(paths); - self - } - - #[must_use] - pub fn settings(mut self, settings: C::Settings) -> Self { - self.settings = Some(settings); - self - } - - #[must_use] - pub fn ignore_error_code(mut self, code: u64) -> Self { - self.ignored_error_codes.push(code); - self - } - - #[must_use] - pub fn ignore_error_codes(mut self, codes: impl IntoIterator) -> Self { - for code in codes { - self = self.ignore_error_code(code); - } - self - } - - pub fn ignore_paths(mut self, paths: Vec) -> Self { - self.ignored_file_paths = paths; - self - } - - #[must_use] - pub fn set_compiler_severity_filter(mut self, compiler_severity_filter: Severity) -> Self { - self.compiler_severity_filter = compiler_severity_filter; - self - } - - /// Disables cached builds - #[must_use] - pub fn ephemeral(self) -> Self { - self.set_cached(false) - } - - /// Sets the cache status - #[must_use] - pub fn set_cached(mut self, cached: bool) -> Self { - self.cached = cached; - self - } - - /// Sets the build info value - #[must_use] - pub fn set_build_info(mut self, build_info: bool) -> Self { - self.build_info = build_info; - self - } - - /// Activates offline mode - /// - /// Prevents network possible access to download/check solc installs - #[must_use] - pub fn offline(self) -> Self { - self.set_offline(true) - } - - /// Sets the offline status - #[must_use] - pub fn set_offline(mut self, offline: bool) -> Self { - self.offline = offline; - self - } - - /// Sets whether to slash all paths on windows - /// - /// If set to `true` all `\\` separators are replaced with `/`, same as solc - #[must_use] - pub fn set_slashed_paths(mut self, slashed_paths: bool) -> Self { - self.slash_paths = slashed_paths; - self - } - - /// Disables writing artifacts to disk - #[must_use] - pub fn no_artifacts(self) -> Self { - self.set_no_artifacts(true) - } - - /// Sets the no artifacts status - #[must_use] - pub fn set_no_artifacts(mut self, artifacts: bool) -> Self { - self.no_artifacts = artifacts; - self - } - - /// Sets the maximum number of parallel `solc` processes to run simultaneously. - /// - /// # Panics - /// - /// `jobs` must be at least 1 - #[must_use] - pub fn solc_jobs(mut self, jobs: usize) -> Self { - assert!(jobs > 0); - self.solc_jobs = Some(jobs); - self - } - - /// Sets the number of parallel `solc` processes to `1`, no parallelization - #[must_use] - pub fn single_solc_jobs(self) -> Self { - self.solc_jobs(1) - } - - #[must_use] - pub fn locked_version(mut self, lang: impl Into, version: Version) -> Self { - self.locked_versions.insert(lang.into(), version); - self - } - - #[must_use] - pub fn locked_versions(mut self, versions: HashMap) -> Self { - self.locked_versions = versions; - self - } - - #[must_use] - pub fn sparse_output(mut self, filter: F) -> Self - where - F: FileFilter + 'static, - { - self.sparse_output = Some(Box::new(filter)); - self - } - - /// Set arbitrary `ArtifactOutputHandler` - pub fn artifacts(self, artifacts: A) -> ProjectBuilder { - let Self { - paths, - cached, - no_artifacts, - ignored_error_codes, - compiler_severity_filter, - solc_jobs, - offline, - build_info, - slash_paths, - ignored_file_paths, - settings, - locked_versions, - sparse_output, - .. - } = self; - ProjectBuilder { - paths, - cached, - no_artifacts, - offline, - slash_paths, - artifacts, - ignored_error_codes, - ignored_file_paths, - compiler_severity_filter, - solc_jobs, - build_info, - settings, - locked_versions, - sparse_output, - } - } - - pub fn build(self, compiler: C) -> Result> { - let Self { - paths, - cached, - no_artifacts, - artifacts, - ignored_error_codes, - ignored_file_paths, - compiler_severity_filter, - solc_jobs, - offline, - build_info, - slash_paths, - settings, - locked_versions, - sparse_output, - } = self; - - let mut paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?; - - if slash_paths { - // ensures we always use `/` paths - paths.slash_paths(); - } - - Ok(Project { - compiler, - paths, - cached, - build_info, - no_artifacts, - artifacts, - ignored_error_codes, - ignored_file_paths, - compiler_severity_filter, - solc_jobs: solc_jobs - .or_else(|| std::thread::available_parallelism().ok().map(|n| n.get())) - .unwrap_or(1), - offline, - slash_paths, - settings: settings.unwrap_or_default(), - locked_versions, - sparse_output, - }) - } -} - -impl Default for ProjectBuilder { - fn default() -> Self { - Self::new(T::default()) - } -} - -impl ArtifactOutput for Project { - type Artifact = T::Artifact; - - fn on_output( - &self, - contracts: &VersionedContracts, - sources: &VersionedSourceFiles, - layout: &ProjectPathsConfig, - ctx: OutputContext<'_>, - ) -> Result> { - self.artifacts_handler().on_output(contracts, sources, layout, ctx) - } - - fn handle_artifacts( - &self, - contracts: &VersionedContracts, - artifacts: &Artifacts, - ) -> Result<()> { - self.artifacts_handler().handle_artifacts(contracts, artifacts) - } - - fn output_file_name(name: &str) -> PathBuf { - T::output_file_name(name) - } - - fn output_file_name_versioned(name: &str, version: &Version) -> PathBuf { - T::output_file_name_versioned(name, version) - } - - fn output_file(contract_file: &Path, name: &str) -> PathBuf { - T::output_file(contract_file, name) - } - - fn output_file_versioned(contract_file: &Path, name: &str, version: &Version) -> PathBuf { - T::output_file_versioned(contract_file, name, version) - } - - fn contract_name(file: &Path) -> Option { - T::contract_name(file) - } - - fn output_exists(contract_file: &Path, name: &str, root: &Path) -> bool { - T::output_exists(contract_file, name, root) - } - - fn read_cached_artifact(path: &Path) -> Result { - T::read_cached_artifact(path) - } - - fn read_cached_artifacts(files: I) -> Result> - where - I: IntoIterator, - P: Into, - { - T::read_cached_artifacts(files) - } - - fn contract_to_artifact( - &self, - file: &Path, - name: &str, - contract: Contract, - source_file: Option<&SourceFile>, - ) -> Self::Artifact { - self.artifacts_handler().contract_to_artifact(file, name, contract, source_file) - } - - fn output_to_artifacts( - &self, - contracts: &VersionedContracts, - sources: &VersionedSourceFiles, - ctx: OutputContext<'_>, - layout: &ProjectPathsConfig, - ) -> Artifacts { - self.artifacts_handler().output_to_artifacts(contracts, sources, ctx, layout) - } - - fn standalone_source_file_to_artifact( - &self, - path: &Path, - file: &VersionedSourceFile, - ) -> Option { - self.artifacts_handler().standalone_source_file_to_artifact(path, file) - } - - fn is_dirty(&self, artifact_file: &ArtifactFile) -> Result { - self.artifacts_handler().is_dirty(artifact_file) - } - - fn handle_cached_artifacts(&self, artifacts: &Artifacts) -> Result<()> { - self.artifacts_handler().handle_cached_artifacts(artifacts) - } -} - -// Rebases the given path to the base directory lexically. -// -// For instance, given the base `/home/user/project` and the path `/home/user/project/src/A.sol`, -// this function returns `src/A.sol`. -// -// This function transforms a path into a form that is relative to the base directory. The returned -// path starts either with a normal component (e.g., `src`) or a parent directory component (i.e., -// `..`). It also converts the path into a UTF-8 string and replaces all separators with forward -// slashes (`/`), if they're not. -// -// The rebasing process can be conceptualized as follows: -// -// 1. Remove the leading components from the path that match those in the base. -// 2. Prepend `..` components to the path, matching the number of remaining components in the base. -// -// # Examples -// -// `rebase_path("/home/user/project", "/home/user/project/src/A.sol")` returns `src/A.sol`. The -// common part, `/home/user/project`, is removed from the path. -// -// `rebase_path("/home/user/project", "/home/user/A.sol")` returns `../A.sol`. First, the common -// part, `/home/user`, is removed, leaving `A.sol`. Next, as `project` remains in the base, `..` is -// prepended to the path. -// -// On Windows, paths like `a\b\c` are converted to `a/b/c`. -// -// For more examples, see the test. -fn rebase_path(base: &Path, path: &Path) -> PathBuf { - use path_slash::PathExt; - - let mut base_components = base.components(); - let mut path_components = path.components(); - - let mut new_path = PathBuf::new(); - - while let Some(path_component) = path_components.next() { - let base_component = base_components.next(); - - if Some(path_component) != base_component { - if base_component.is_some() { - new_path.extend( - std::iter::repeat(std::path::Component::ParentDir) - .take(base_components.count() + 1), - ); - } - - new_path.push(path_component); - new_path.extend(path_components); - - break; - } - } - - new_path.to_slash_lossy().into_owned().into() -} - -#[cfg(test)] -#[cfg(feature = "svm-solc")] -mod tests { - use foundry_compilers_artifacts::Remapping; - use foundry_compilers_core::utils::{self, mkdir_or_touch, tempdir}; - - use super::*; - - #[test] - #[cfg_attr(windows, ignore = "<0.7 solc is flaky")] - fn test_build_all_versions() { - let paths = ProjectPathsConfig::builder() - .root("../../test-data/test-contract-versions") - .sources("../../test-data/test-contract-versions") - .build() - .unwrap(); - let project = Project::builder() - .paths(paths) - .no_artifacts() - .ephemeral() - .build(Default::default()) - .unwrap(); - let contracts = project.compile().unwrap().succeeded().into_output().contracts; - // Contracts A to F - assert_eq!(contracts.contracts().count(), 3); - } - - #[test] - fn test_build_many_libs() { - let root = utils::canonicalize("../../test-data/test-contract-libs").unwrap(); - - let paths = ProjectPathsConfig::builder() - .root(&root) - .sources(root.join("src")) - .lib(root.join("lib1")) - .lib(root.join("lib2")) - .remappings( - Remapping::find_many(&root.join("lib1")) - .into_iter() - .chain(Remapping::find_many(&root.join("lib2"))), - ) - .build() - .unwrap(); - let project = Project::builder() - .paths(paths) - .no_artifacts() - .ephemeral() - .no_artifacts() - .build(Default::default()) - .unwrap(); - let contracts = project.compile().unwrap().succeeded().into_output().contracts; - assert_eq!(contracts.contracts().count(), 3); - } - - #[test] - fn test_build_remappings() { - let root = utils::canonicalize("../../test-data/test-contract-remappings").unwrap(); - let paths = ProjectPathsConfig::builder() - .root(&root) - .sources(root.join("src")) - .lib(root.join("lib")) - .remappings(Remapping::find_many(&root.join("lib"))) - .build() - .unwrap(); - let project = Project::builder() - .no_artifacts() - .paths(paths) - .ephemeral() - .build(Default::default()) - .unwrap(); - let contracts = project.compile().unwrap().succeeded().into_output().contracts; - assert_eq!(contracts.contracts().count(), 2); - } - - #[test] - fn can_rebase_path() { - let rebase_path = |a: &str, b: &str| rebase_path(a.as_ref(), b.as_ref()); - - assert_eq!(rebase_path("a/b", "a/b/c"), PathBuf::from("c")); - assert_eq!(rebase_path("a/b", "a/c"), PathBuf::from("../c")); - assert_eq!(rebase_path("a/b", "c"), PathBuf::from("../../c")); - - assert_eq!( - rebase_path("/home/user/project", "/home/user/project/A.sol"), - PathBuf::from("A.sol") - ); - assert_eq!( - rebase_path("/home/user/project", "/home/user/project/src/A.sol"), - PathBuf::from("src/A.sol") - ); - assert_eq!( - rebase_path("/home/user/project", "/home/user/project/lib/forge-std/src/Test.sol"), - PathBuf::from("lib/forge-std/src/Test.sol") - ); - assert_eq!( - rebase_path("/home/user/project", "/home/user/A.sol"), - PathBuf::from("../A.sol") - ); - assert_eq!(rebase_path("/home/user/project", "/home/A.sol"), PathBuf::from("../../A.sol")); - assert_eq!(rebase_path("/home/user/project", "/A.sol"), PathBuf::from("../../../A.sol")); - assert_eq!( - rebase_path("/home/user/project", "/tmp/A.sol"), - PathBuf::from("../../../tmp/A.sol") - ); - - assert_eq!( - rebase_path("/Users/ah/temp/verif", "/Users/ah/temp/remapped/Child.sol"), - PathBuf::from("../remapped/Child.sol") - ); - assert_eq!( - rebase_path("/Users/ah/temp/verif", "/Users/ah/temp/verif/../remapped/Parent.sol"), - PathBuf::from("../remapped/Parent.sol") - ); - } - - #[test] - fn can_resolve_oz_remappings() { - let tmp_dir = tempdir("node_modules").unwrap(); - let tmp_dir_node_modules = tmp_dir.path().join("node_modules"); - let paths = [ - "node_modules/@openzeppelin/contracts/interfaces/IERC1155.sol", - "node_modules/@openzeppelin/contracts/finance/VestingWallet.sol", - "node_modules/@openzeppelin/contracts/proxy/Proxy.sol", - "node_modules/@openzeppelin/contracts/token/ERC20/IERC20.sol", - ]; - mkdir_or_touch(tmp_dir.path(), &paths[..]); - let remappings = Remapping::find_many(&tmp_dir_node_modules); - let mut paths = ProjectPathsConfig::<()>::hardhat(tmp_dir.path()).unwrap(); - paths.remappings = remappings; - - let resolved = paths - .resolve_library_import( - tmp_dir.path(), - Path::new("@openzeppelin/contracts/token/ERC20/IERC20.sol"), - ) - .unwrap(); - assert!(resolved.exists()); - - // adjust remappings - paths.remappings[0].name = "@openzeppelin/".to_string(); - - let resolved = paths - .resolve_library_import( - tmp_dir.path(), - Path::new("@openzeppelin/contracts/token/ERC20/IERC20.sol"), - ) - .unwrap(); - assert!(resolved.exists()); - } -} diff --git a/crates/compilers/src/project_util/mock.rs b/crates/compilers/src/project_util/mock.rs deleted file mode 100644 index 22c6fdbf..00000000 --- a/crates/compilers/src/project_util/mock.rs +++ /dev/null @@ -1,630 +0,0 @@ -//! Helpers to generate mock projects - -use foundry_compilers_artifacts::Remapping; -use foundry_compilers_core::error::{Result, SolcError}; -use rand::{ - distributions::{Distribution, Uniform}, - seq::SliceRandom, - Rng, -}; -use serde::{Deserialize, Serialize}; -use std::{ - collections::{BTreeSet, HashMap, HashSet, VecDeque}, - path::{Path, PathBuf}, -}; - -use crate::{ - compilers::{multi::MultiCompilerParsedSource, Language, ParsedSource}, - resolver::GraphEdges, - Graph, ProjectPathsConfig, -}; - -/// Represents the layout of a project -#[derive(Default, Serialize, Deserialize)] -pub struct MockProjectSkeleton { - /// all files for the project - pub files: Vec, - /// all libraries - pub libraries: Vec, -} - -impl MockProjectSkeleton { - /// Returns a list of file ids the given file id imports. - pub fn imported_nodes(&self, from: usize) -> impl Iterator + '_ { - self.files[from].imports.iter().map(|i| i.file_id()) - } -} - -/// Represents a virtual project -#[derive(Serialize)] -pub struct MockProjectGenerator { - /// how to name things - #[serde(skip)] - name_strategy: Box, - - #[serde(flatten)] - inner: MockProjectSkeleton, -} - -impl MockProjectGenerator { - /// Create a new project and populate it using the given settings - pub fn new(settings: &MockProjectSettings) -> Self { - let mut mock = Self::default(); - mock.populate(settings); - mock - } - - /// Create a skeleton of a real project - pub fn create(paths: &ProjectPathsConfig) -> Result { - fn get_libs( - edges: &GraphEdges, - lib_folder: &Path, - ) -> Option>> { - let mut libs: HashMap<_, Vec<_>> = HashMap::new(); - for lib_file in edges.library_files() { - let component = - edges.node_path(lib_file).strip_prefix(lib_folder).ok()?.components().next()?; - libs.entry(lib_folder.join(component)).or_default().push(lib_file); - } - Some(libs) - } - - let graph = Graph::::resolve(paths)?; - let mut gen = Self::default(); - let (_, edges) = graph.into_sources(); - - // add all files as source files - gen.add_sources(edges.files().count()); - - // stores libs and their files - let libs = get_libs( - &edges, - &paths.libraries.first().cloned().unwrap_or_else(|| paths.root.join("lib")), - ) - .ok_or_else(|| SolcError::msg("Failed to detect libs"))?; - - // mark all files as libs - for (lib_id, lib_files) in libs.into_values().enumerate() { - let lib_name = gen.name_strategy.new_lib_name(lib_id); - let offset = gen.inner.files.len(); - let lib = MockLib { name: lib_name, id: lib_id, num_files: lib_files.len(), offset }; - for lib_file in lib_files { - let file = &mut gen.inner.files[lib_file]; - file.lib_id = Some(lib_id); - file.name = gen.name_strategy.new_lib_name(file.id); - } - gen.inner.libraries.push(lib); - } - - for id in edges.files() { - for import in edges.imported_nodes(id).iter().copied() { - let import = gen.get_import(import); - gen.inner.files[id].imports.insert(import); - } - } - - Ok(gen) - } - - /// Consumes the type and returns the underlying skeleton - pub fn into_inner(self) -> MockProjectSkeleton { - self.inner - } - - /// Generate all solidity files and write under the paths config - pub fn write_to( - &self, - paths: &ProjectPathsConfig, - version: &str, - ) -> Result<()> { - for file in self.inner.files.iter() { - let imports = self.get_imports(file.id); - let content = file.mock_content(version, imports.join("\n").as_str()); - super::create_contract_file(&file.target_path(self, paths), content)?; - } - - Ok(()) - } - - fn get_imports(&self, file: usize) -> Vec { - let file = &self.inner.files[file]; - let mut imports = Vec::with_capacity(file.imports.len()); - - for import in file.imports.iter() { - match *import { - MockImport::Internal(f) => { - imports.push(format!("import \"./{}.sol\";", self.inner.files[f].name)); - } - MockImport::External(lib, f) => { - imports.push(format!( - "import \"{}/{}.sol\";", - self.inner.libraries[lib].name, self.inner.files[f].name - )); - } - } - } - imports - } - - /// Returns all the remappings for the project for the given root path - pub fn remappings_at(&self, root: &Path) -> Vec { - self.inner - .libraries - .iter() - .map(|lib| { - let path = root.join("lib").join(&lib.name).join("src"); - format!("{}/={}/", lib.name, path.display()).parse().unwrap() - }) - .collect() - } - - /// Returns all the remappings for the project - pub fn remappings(&self) -> Vec { - self.inner - .libraries - .iter() - .map(|lib| format!("{0}/=lib/{0}/src/", lib.name).parse().unwrap()) - .collect() - } - - /// Generates a random project with random settings - pub fn random() -> Self { - let settings = MockProjectSettings::random(); - let mut mock = Self::default(); - mock.populate(&settings); - mock - } - - /// Adds sources and libraries and populates imports based on the settings - pub fn populate(&mut self, settings: &MockProjectSettings) -> &mut Self { - self.add_sources(settings.num_lib_files); - for _ in 0..settings.num_libs { - self.add_lib(settings.num_lib_files); - } - self.populate_imports(settings) - } - - fn next_file_id(&self) -> usize { - self.inner.files.len() - } - - fn next_lib_id(&self) -> usize { - self.inner.libraries.len() - } - - /// Adds a new source file - pub fn add_source(&mut self) -> &mut Self { - let id = self.next_file_id(); - let name = self.name_strategy.new_source_file_name(id); - let file = - MockFile { id, name, imports: Default::default(), lib_id: None, emit_artifacts: true }; - self.inner.files.push(file); - self - } - - /// Adds `num` new source files - pub fn add_sources(&mut self, num: usize) -> &mut Self { - for _ in 0..num { - self.add_source(); - } - self - } - - /// Adds a new lib file - pub fn add_lib_file(&mut self, lib_id: usize) -> &mut Self { - let id = self.next_file_id(); - let name = self.name_strategy.new_source_file_name(id); - let file = MockFile { - id, - name, - imports: Default::default(), - lib_id: Some(lib_id), - emit_artifacts: true, - }; - self.inner.files.push(file); - self - } - - /// Adds `num` new source files - pub fn add_lib_files(&mut self, num: usize, lib_id: usize) -> &mut Self { - for _ in 0..num { - self.add_lib_file(lib_id); - } - self - } - - /// Adds a new lib with the number of lib files - pub fn add_lib(&mut self, num_files: usize) -> &mut Self { - let lib_id = self.next_lib_id(); - let lib_name = self.name_strategy.new_lib_name(lib_id); - let offset = self.inner.files.len(); - self.add_lib_files(num_files, lib_id); - self.inner.libraries.push(MockLib { name: lib_name, id: lib_id, num_files, offset }); - self - } - - /// randomly assign empty file status so that mocked files don't emit artifacts - pub fn assign_empty_files(&mut self) -> &mut Self { - let mut rng = rand::thread_rng(); - let die = Uniform::from(0..self.inner.files.len()); - for file in self.inner.files.iter_mut() { - let throw = die.sample(&mut rng); - if throw == 0 { - // give it a 1 in num(files) chance that the file will be empty - file.emit_artifacts = false; - } - } - self - } - - /// Populates the imports of the project - pub fn populate_imports(&mut self, settings: &MockProjectSettings) -> &mut Self { - let mut rng = rand::thread_rng(); - - // populate imports - for id in 0..self.inner.files.len() { - let imports = if let Some(lib) = self.inner.files[id].lib_id { - let num_imports = rng - .gen_range(settings.min_imports..=settings.max_imports) - .min(self.inner.libraries[lib].num_files.saturating_sub(1)); - self.unique_imports_for_lib(&mut rng, lib, id, num_imports) - } else { - let num_imports = rng - .gen_range(settings.min_imports..=settings.max_imports) - .min(self.inner.files.len().saturating_sub(1)); - self.unique_imports_for_source(&mut rng, id, num_imports) - }; - - self.inner.files[id].imports = imports; - } - self - } - - fn get_import(&self, id: usize) -> MockImport { - if let Some(lib) = self.inner.files[id].lib_id { - MockImport::External(lib, id) - } else { - MockImport::Internal(id) - } - } - - /// Returns the file for the given id - pub fn get_file(&self, id: usize) -> &MockFile { - &self.inner.files[id] - } - - /// All file ids - pub fn file_ids(&self) -> impl Iterator + '_ { - self.inner.files.iter().map(|f| f.id) - } - - /// Returns an iterator over all file ids that are source files or imported by source files - /// - /// In other words, all files that are relevant in order to compile the project's source files. - pub fn used_file_ids(&self) -> impl Iterator + '_ { - let mut file_ids = BTreeSet::new(); - for file in self.internal_file_ids() { - file_ids.extend(NodesIter::new(file, &self.inner)) - } - file_ids.into_iter() - } - - /// All ids of internal files - pub fn internal_file_ids(&self) -> impl Iterator + '_ { - self.inner.files.iter().filter(|f| !f.is_external()).map(|f| f.id) - } - - /// All ids of external files - pub fn external_file_ids(&self) -> impl Iterator + '_ { - self.inner.files.iter().filter(|f| f.is_external()).map(|f| f.id) - } - - /// generates exactly `num` unique imports in the range of all files - /// - /// # Panics - /// - /// if `num` can't be satisfied because the range is too narrow - fn unique_imports_for_source( - &self, - rng: &mut R, - id: usize, - num: usize, - ) -> BTreeSet { - assert!(self.inner.files.len() > num); - let mut imports: Vec<_> = (0..self.inner.files.len()).collect(); - imports.shuffle(rng); - imports.into_iter().filter(|i| *i != id).map(|id| self.get_import(id)).take(num).collect() - } - - /// Modifies the content of the given file - pub fn modify_file( - &self, - id: usize, - paths: &ProjectPathsConfig, - version: &str, - ) -> Result { - let file = &self.inner.files[id]; - let target = file.target_path(self, paths); - let content = file.modified_content(version, self.get_imports(id).join("\n").as_str()); - super::create_contract_file(&target, content)?; - Ok(target) - } - - /// generates exactly `num` unique imports in the range of a lib's files - /// - /// # Panics - /// - /// if `num` can't be satisfied because the range is too narrow - fn unique_imports_for_lib( - &self, - rng: &mut R, - lib_id: usize, - id: usize, - num: usize, - ) -> BTreeSet { - let lib = &self.inner.libraries[lib_id]; - assert!(lib.num_files > num); - let mut imports: Vec<_> = (lib.offset..(lib.offset + lib.len())).collect(); - imports.shuffle(rng); - imports.into_iter().filter(|i| *i != id).map(|id| self.get_import(id)).take(num).collect() - } -} - -impl Default for MockProjectGenerator { - fn default() -> Self { - Self { name_strategy: Box::::default(), inner: Default::default() } - } -} - -impl From for MockProjectGenerator { - fn from(inner: MockProjectSkeleton) -> Self { - Self { inner, ..Default::default() } - } -} - -/// Used to determine the names for elements -trait NamingStrategy { - /// Return a new name for the given source file id - fn new_source_file_name(&mut self, id: usize) -> String; - - /// Return a new name for the given source file id - #[allow(unused)] - fn new_lib_file_name(&mut self, id: usize) -> String; - - /// Return a new name for the given lib id - fn new_lib_name(&mut self, id: usize) -> String; -} - -/// A primitive naming that simply uses ids to create unique names -#[derive(Clone, Copy, Debug, Default)] -#[non_exhaustive] -pub struct SimpleNamingStrategy; - -impl NamingStrategy for SimpleNamingStrategy { - fn new_source_file_name(&mut self, id: usize) -> String { - format!("SourceFile{id}") - } - - fn new_lib_file_name(&mut self, id: usize) -> String { - format!("LibFile{id}") - } - - fn new_lib_name(&mut self, id: usize) -> String { - format!("Lib{id}") - } -} - -/// Skeleton of a mock source file -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct MockFile { - /// internal id of this file - pub id: usize, - /// The source name of this file - pub name: String, - /// all the imported files - pub imports: BTreeSet, - /// lib id if this file is part of a lib - pub lib_id: Option, - /// whether this file should emit artifacts - pub emit_artifacts: bool, -} - -impl MockFile { - /// Returns `true` if this file is part of an external lib - pub fn is_external(&self) -> bool { - self.lib_id.is_some() - } - - pub fn target_path( - &self, - gen: &MockProjectGenerator, - paths: &ProjectPathsConfig, - ) -> PathBuf { - let mut target = if let Some(lib) = self.lib_id { - paths.root.join("lib").join(&gen.inner.libraries[lib].name).join("src").join(&self.name) - } else { - paths.sources.join(&self.name) - }; - target.set_extension("sol"); - - target - } - - /// Returns the content to use for a modified file - /// - /// The content here is arbitrary, it should only differ from the mocked content - pub fn modified_content(&self, version: &str, imports: &str) -> String { - format!( - r#" -// SPDX-License-Identifier: UNLICENSED -pragma solidity {}; -{} -contract {} {{ - function hello() public {{}} -}} - "#, - version, imports, self.name - ) - } - - /// Returns a mocked content for the file - pub fn mock_content(&self, version: &str, imports: &str) -> String { - if self.emit_artifacts { - format!( - r#" -// SPDX-License-Identifier: UNLICENSED -pragma solidity {}; -{} -contract {} {{}} - "#, - version, imports, self.name - ) - } else { - format!( - r#" -// SPDX-License-Identifier: UNLICENSED -pragma solidity {version}; -{imports} - "#, - ) - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -pub enum MockImport { - /// Import from the same project - Internal(usize), - /// external library import - /// (`lib id`, `file id`) - External(usize, usize), -} - -impl MockImport { - pub fn file_id(&self) -> usize { - *match self { - Self::Internal(id) => id, - Self::External(_, id) => id, - } - } -} - -/// Container of a mock lib -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct MockLib { - /// name of the lib, like `ds-test` - pub name: String, - /// internal id of this lib - pub id: usize, - /// offset in the total set of files - pub offset: usize, - /// number of files included in this lib - pub num_files: usize, -} - -impl MockLib { - pub fn len(&self) -> usize { - self.num_files - } - - pub fn is_empty(&self) -> bool { - self.len() == 0 - } -} - -/// Settings to use when generate a mock project -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct MockProjectSettings { - /// number of source files to generate - pub num_sources: usize, - /// number of libraries to use - pub num_libs: usize, - /// how many lib files to generate per lib - pub num_lib_files: usize, - /// min amount of import statements a file can use - pub min_imports: usize, - /// max amount of import statements a file can use - pub max_imports: usize, - /// whether to also use files that don't emit artifacts - pub allow_no_artifacts_files: bool, -} - -impl MockProjectSettings { - /// Generates a new instance with random settings within an arbitrary range - pub fn random() -> Self { - let mut rng = rand::thread_rng(); - // arbitrary thresholds - Self { - num_sources: rng.gen_range(2..25), - num_libs: rng.gen_range(0..5), - num_lib_files: rng.gen_range(1..10), - min_imports: rng.gen_range(0..3), - max_imports: rng.gen_range(4..10), - allow_no_artifacts_files: true, - } - } - - /// Generates settings for a large project - pub fn large() -> Self { - // arbitrary thresholds - Self { - num_sources: 35, - num_libs: 4, - num_lib_files: 15, - min_imports: 3, - max_imports: 12, - allow_no_artifacts_files: true, - } - } -} - -impl Default for MockProjectSettings { - fn default() -> Self { - // these are arbitrary - Self { - num_sources: 20, - num_libs: 2, - num_lib_files: 10, - min_imports: 0, - max_imports: 5, - allow_no_artifacts_files: true, - } - } -} - -/// An iterator over a node and its dependencies -struct NodesIter<'a> { - /// stack of nodes - stack: VecDeque, - visited: HashSet, - skeleton: &'a MockProjectSkeleton, -} - -impl<'a> NodesIter<'a> { - fn new(start: usize, skeleton: &'a MockProjectSkeleton) -> Self { - Self { stack: VecDeque::from([start]), visited: HashSet::new(), skeleton } - } -} - -impl<'a> Iterator for NodesIter<'a> { - type Item = usize; - fn next(&mut self) -> Option { - let file = self.stack.pop_front()?; - - if self.visited.insert(file) { - // push the file's direct imports to the stack if we haven't visited it already - self.stack.extend(self.skeleton.imported_nodes(file)); - } - Some(file) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_generate_mock_project() { - let _ = MockProjectGenerator::random(); - } -} diff --git a/crates/compilers/src/project_util/mod.rs b/crates/compilers/src/project_util/mod.rs deleted file mode 100644 index c7175731..00000000 --- a/crates/compilers/src/project_util/mod.rs +++ /dev/null @@ -1,552 +0,0 @@ -//! Utilities for mocking project workspaces. - -use crate::{ - cache::CompilerCache, - compilers::{ - multi::{MultiCompiler, MultiCompilerSettings}, - Compiler, - }, - config::ProjectPathsConfigBuilder, - solc::SolcSettings, - Artifact, ArtifactOutput, Artifacts, ConfigurableArtifacts, HardhatArtifacts, PathStyle, - Project, ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, -}; -use foundry_compilers_artifacts::{ConfigurableContractArtifact, Remapping, Settings}; -use foundry_compilers_core::{ - error::{Result, SolcError, SolcIoError}, - utils::{self, tempdir}, -}; -use fs_extra::{dir, file}; -use mock::{MockProjectGenerator, MockProjectSettings}; -use std::{ - fmt, - path::{Path, PathBuf}, - process, - process::Command, -}; -use tempfile::TempDir; - -pub mod mock; - -/// A [`Project`] wrapper that lives in a new temporary directory -/// -/// Once `TempProject` is dropped, the temp dir is automatically removed, see [`TempDir::drop()`] -pub struct TempProject { - /// temporary workspace root - _root: TempDir, - /// actual project workspace with the `root` tempdir as its root - inner: Project, -} - -impl TempProject { - /// Creates a new temp project using the provided paths and artifacts handler. - /// sets the project root to a temp dir - #[cfg(feature = "svm-solc")] - pub fn with_artifacts(paths: ProjectPathsConfigBuilder, artifacts: T) -> Result { - Self::prefixed_with_artifacts("temp-project", paths, artifacts) - } - - /// Overwrites the settings to pass to `solc` - pub fn with_solc_settings(mut self, settings: impl Into) -> Self { - self.inner.settings.solc = SolcSettings { settings: settings.into(), ..Default::default() }; - self - } - - /// Explicitly sets the solc version for the project - #[cfg(feature = "svm-solc")] - pub fn set_solc(&mut self, solc: &str) -> &mut Self { - use crate::compilers::{multi::MultiCompilerLanguage, solc::SolcLanguage}; - use semver::Version; - - let version = Version::parse(solc).unwrap(); - self.inner - .locked_versions - .insert(MultiCompilerLanguage::Solc(SolcLanguage::Solidity), version.clone()); - self.inner - .locked_versions - .insert(MultiCompilerLanguage::Solc(SolcLanguage::Yul), version.clone()); - self - } -} - -impl TempProject { - /// Creates a new temp project for the given `PathStyle` - #[cfg(feature = "svm-solc")] - pub fn with_style(prefix: &str, style: PathStyle) -> Result { - let tmp_dir = tempdir(prefix)?; - let paths = style.paths(tmp_dir.path())?; - let inner = - Project::builder().artifacts(T::default()).paths(paths).build(Default::default())?; - Ok(Self::create_new(tmp_dir, inner)?) - } -} - -impl fmt::Debug for TempProject { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("TempProject").field("paths", &self.inner.paths).finish() - } -} - -pub(crate) fn create_contract_file(path: &Path, content: impl AsRef) -> Result<()> { - if let Some(parent) = path.parent() { - std::fs::create_dir_all(parent) - .map_err(|err| SolcIoError::new(err, parent.to_path_buf()))?; - } - std::fs::write(path, content.as_ref()).map_err(|err| SolcIoError::new(err, path))?; - Ok(()) -} - -fn contract_file_name(name: &str) -> String { - let name = name.trim(); - if name.ends_with(".sol") || name.ends_with(".vy") || name.ends_with(".vyi") { - name.to_string() - } else { - format!("{name}.sol") - } -} - -#[cfg(feature = "svm-solc")] -impl TempProject { - /// Creates an empty new hardhat style workspace in a new temporary dir - pub fn hardhat() -> Result { - let tmp_dir = tempdir("tmp_hh")?; - - let paths = ProjectPathsConfig::hardhat(tmp_dir.path())?; - - let inner = Project::builder() - .artifacts(HardhatArtifacts::default()) - .paths(paths) - .build(Default::default())?; - Ok(Self::create_new(tmp_dir, inner)?) - } -} - -impl TempProject { - /// Makes sure all resources are created - pub fn create_new( - root: TempDir, - inner: Project, - ) -> std::result::Result { - let mut project = Self { _root: root, inner }; - project.inner.paths.create_all()?; - // ignore license warnings - project.inner.ignored_error_codes.push(1878); - Ok(project) - } - - /// Creates a new temp project using the provided paths and setting the project root to a temp - /// dir - #[cfg(feature = "svm-solc")] - pub fn new(paths: ProjectPathsConfigBuilder) -> Result { - Self::prefixed("temp-project", paths) - } - - /// Creates a new temp project inside a tempdir with a prefixed directory - #[cfg(feature = "svm-solc")] - pub fn prefixed(prefix: &str, paths: ProjectPathsConfigBuilder) -> Result { - Self::prefixed_with_artifacts(prefix, paths, T::default()) - } - - /// Creates a new temp project inside a tempdir with a prefixed directory and the given - /// artifacts handler - #[cfg(feature = "svm-solc")] - pub fn prefixed_with_artifacts( - prefix: &str, - paths: ProjectPathsConfigBuilder, - artifacts: T, - ) -> Result { - let tmp_dir = tempdir(prefix)?; - let paths = paths.build_with_root(tmp_dir.path()); - let inner = ProjectBuilder::::new(Default::default()) - .artifacts(artifacts) - .paths(paths) - .build(Default::default())?; - Ok(Self::create_new(tmp_dir, inner)?) - } - - /// Creates an empty new dapptools style workspace in a new temporary dir - pub fn dapptools() -> Result { - let tmp_dir = tempdir("tmp_dapp")?; - let paths = ProjectPathsConfig::dapptools(tmp_dir.path())?; - - let inner = ProjectBuilder::::new(Default::default()) - .paths(paths) - .build(Default::default())?; - Ok(Self::create_new(tmp_dir, inner)?) - } - - /// Creates an initialized dapptools style workspace in a new temporary dir - pub fn dapptools_init() -> Result { - let mut project = Self::dapptools()?; - let orig_root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - copy_dir(&orig_root, project.root())?; - project.project_mut().paths.remappings = Remapping::find_many(project.root()); - project.project_mut().paths.remappings.iter_mut().for_each(|r| r.slash_path()); - - Ok(project) - } - - pub fn project(&self) -> &Project { - &self.inner - } - - pub fn project_mut(&mut self) -> &mut Project { - &mut self.inner - } - - /// The configured paths of the project - pub fn paths(&self) -> &ProjectPathsConfig { - &self.project().paths - } - - /// The configured paths of the project - pub fn paths_mut(&mut self) -> &mut ProjectPathsConfig { - &mut self.project_mut().paths - } - - /// Copies a single file into the projects source - pub fn copy_source(&self, source: &Path) -> Result<()> { - copy_file(source, &self.paths().sources) - } - - pub fn copy_sources(&self, sources: I) -> Result<()> - where - I: IntoIterator, - S: AsRef, - { - for path in sources { - self.copy_source(path.as_ref())?; - } - Ok(()) - } - - fn get_lib(&self) -> Result { - self.paths() - .libraries - .first() - .cloned() - .ok_or_else(|| SolcError::msg("No libraries folders configured")) - } - - /// Copies a single file into the project's main library directory - pub fn copy_lib(&self, lib: &Path) -> Result<()> { - let lib_dir = self.get_lib()?; - copy_file(lib, &lib_dir) - } - - /// Copy a series of files into the main library dir - pub fn copy_libs(&self, libs: I) -> Result<()> - where - I: IntoIterator, - S: AsRef, - { - for path in libs { - self.copy_lib(path.as_ref())?; - } - Ok(()) - } - - /// Adds a new library file - pub fn add_lib(&self, name: &str, content: impl AsRef) -> Result { - let name = contract_file_name(name); - let lib_dir = self.get_lib()?; - let lib = lib_dir.join(name); - create_contract_file(&lib, content)?; - Ok(lib) - } - - /// Adds a basic lib contract `contract {}` as a new file - pub fn add_basic_lib(&self, name: &str, version: &str) -> Result { - let name = name.strip_suffix(".sol").unwrap_or(name); - self.add_lib( - name, - format!( - r#" -// SPDX-License-Identifier: UNLICENSED -pragma solidity {version}; -contract {name} {{}} - "#, - ), - ) - } - - /// Adds a new test file inside the project's test dir - pub fn add_test(&self, name: &str, content: impl AsRef) -> Result { - let name = contract_file_name(name); - let tests = self.paths().tests.join(name); - create_contract_file(&tests, content)?; - Ok(tests) - } - - /// Adds a new script file inside the project's script dir - pub fn add_script(&self, name: &str, content: impl AsRef) -> Result { - let name = contract_file_name(name); - let script = self.paths().scripts.join(name); - create_contract_file(&script, content)?; - Ok(script) - } - - /// Adds a new source file inside the project's source dir - pub fn add_source(&self, name: &str, content: impl AsRef) -> Result { - let name = contract_file_name(name); - let source = self.paths().sources.join(name); - create_contract_file(&source, content)?; - Ok(source) - } - - /// Adds a basic source contract `contract {}` as a new file - pub fn add_basic_source(&self, name: &str, version: &str) -> Result { - let name = name.strip_suffix(".sol").unwrap_or(name); - self.add_source( - name, - format!( - r#" -// SPDX-License-Identifier: UNLICENSED -pragma solidity {version}; -contract {name} {{}} - "#, - ), - ) - } - - /// Adds a solidity contract in the project's root dir. - /// This will also create all intermediary dirs. - pub fn add_contract(&self, name: &str, content: impl AsRef) -> Result { - let name = contract_file_name(name); - let source = self.root().join(name); - create_contract_file(&source, content)?; - Ok(source) - } - - /// Returns the path to the artifacts directory - pub fn artifacts_path(&self) -> &PathBuf { - &self.paths().artifacts - } - - /// Returns the path to the sources directory - pub fn sources_path(&self) -> &PathBuf { - &self.paths().sources - } - - /// Returns the path to the cache file - pub fn cache_path(&self) -> &PathBuf { - &self.paths().cache - } - - /// The root path of the temporary workspace - pub fn root(&self) -> &Path { - self.project().paths.root.as_path() - } - - pub fn compile(&self) -> Result> { - self.project().compile() - } - - /// Returns a snapshot of all cached artifacts - pub fn artifacts_snapshot(&self) -> Result> { - let cache = self.project().read_cache_file()?; - let artifacts = cache.read_artifacts::()?; - Ok(ArtifactsSnapshot { cache, artifacts }) - } - - /// Populate the project with mock files - pub fn mock(&self, gen: &MockProjectGenerator, version: &str) -> Result<()> { - gen.write_to(self.paths(), version) - } - - /// Compiles the project and ensures that the output does not contain errors - pub fn ensure_no_errors(&self) -> Result<&Self> { - let compiled = self.compile().unwrap(); - if compiled.has_compiler_errors() { - bail!("Compiled with errors {}", compiled) - } - Ok(self) - } - - /// Compiles the project and ensures that the output is __unchanged__ - pub fn ensure_unchanged(&self) -> Result<&Self> { - let compiled = self.compile().unwrap(); - if !compiled.is_unchanged() { - bail!("Compiled with detected changes {}", compiled) - } - Ok(self) - } - - /// Compiles the project and ensures that the output has __changed__ - pub fn ensure_changed(&self) -> Result<&Self> { - let compiled = self.compile().unwrap(); - if compiled.is_unchanged() { - bail!("Compiled without detecting changes {}", compiled) - } - Ok(self) - } - - /// Compiles the project and ensures that the output does not contain errors and no changes - /// exists on recompiled. - /// - /// This is a convenience function for `ensure_no_errors` + `ensure_unchanged`. - pub fn ensure_no_errors_recompile_unchanged(&self) -> Result<&Self> { - self.ensure_no_errors()?.ensure_unchanged() - } - - /// Compiles the project and asserts that the output does not contain errors and no changes - /// exists on recompiled. - /// - /// This is a convenience function for `assert_no_errors` + `assert_unchanged`. - #[track_caller] - pub fn assert_no_errors_recompile_unchanged(&self) -> &Self { - self.assert_no_errors().assert_unchanged() - } - - /// Compiles the project and asserts that the output does not contain errors - pub fn assert_no_errors(&self) -> &Self { - let compiled = self.compile().unwrap(); - compiled.assert_success(); - self - } - - /// Compiles the project and asserts that the output is unchanged - #[track_caller] - pub fn assert_unchanged(&self) -> &Self { - let compiled = self.compile().unwrap(); - assert!(compiled.is_unchanged()); - self - } - - /// Compiles the project and asserts that the output is _changed_ - pub fn assert_changed(&self) -> &Self { - let compiled = self.compile().unwrap(); - assert!(!compiled.is_unchanged()); - self - } - - /// Returns a list of all source files in the project's `src` directory - pub fn list_source_files(&self) -> Vec { - utils::sol_source_files(self.project().sources_path()) - } -} - -#[cfg(feature = "svm-solc")] -impl TempProject { - pub fn dapptools_with_ignore_paths(paths_to_ignore: Vec) -> Result { - let tmp_dir = tempdir("tmp_dapp")?; - let paths = ProjectPathsConfig::dapptools(tmp_dir.path())?; - - let inner = Project::builder() - .paths(paths) - .ignore_paths(paths_to_ignore) - .build(Default::default())?; - Ok(Self::create_new(tmp_dir, inner)?) - } - - /// Clones the given repo into a temp dir, initializes it recursively and configures it. - pub fn checkout(repo: &str) -> Result { - let tmp_dir = tempdir("tmp_checkout")?; - clone_remote(&format!("https://github.com/{repo}"), tmp_dir.path()) - .map_err(|err| SolcIoError::new(err, tmp_dir.path()))?; - let paths = ProjectPathsConfig::dapptools(tmp_dir.path())?; - - let inner = Project::builder().paths(paths).build(Default::default())?; - Ok(Self::create_new(tmp_dir, inner)?) - } - - /// Create a new temporary project and populate it with mock files. - pub fn mocked(settings: &MockProjectSettings, version: &str) -> Result { - let mut tmp = Self::dapptools()?; - let gen = MockProjectGenerator::new(settings); - tmp.mock(&gen, version)?; - let remappings = gen.remappings_at(tmp.root()); - tmp.paths_mut().remappings.extend(remappings); - Ok(tmp) - } - - /// Create a new temporary project and populate it with a random layout. - pub fn mocked_random(version: &str) -> Result { - Self::mocked(&MockProjectSettings::random(), version) - } -} - -impl AsRef> - for TempProject -{ - fn as_ref(&self) -> &Project { - self.project() - } -} - -/// The cache file and all the artifacts it references -#[derive(Clone, Debug)] -pub struct ArtifactsSnapshot { - pub cache: CompilerCache, - pub artifacts: Artifacts, -} - -impl ArtifactsSnapshot { - /// Ensures that all artifacts have abi, bytecode, deployedbytecode - pub fn assert_artifacts_essentials_present(&self) { - for artifact in self.artifacts.artifact_files() { - let c = artifact.artifact.clone().into_compact_contract(); - assert!(c.abi.is_some()); - assert!(c.bin.is_some()); - assert!(c.bin_runtime.is_some()); - } - } -} - -/// commonly used options for copying entire folders -fn dir_copy_options() -> dir::CopyOptions { - dir::CopyOptions { - overwrite: true, - skip_exist: false, - buffer_size: 64000, //64kb - copy_inside: true, - content_only: true, - depth: 0, - } -} - -/// commonly used options for copying files -fn file_copy_options() -> file::CopyOptions { - file::CopyOptions { - overwrite: true, - skip_exist: false, - buffer_size: 64000, //64kb - } -} - -/// Copies a single file into the given dir -pub fn copy_file(source: &Path, target_dir: &Path) -> Result<()> { - let target = target_dir.join( - source - .file_name() - .ok_or_else(|| SolcError::msg(format!("No file name for {}", source.display())))?, - ); - fs_extra::file::copy(source, target, &file_copy_options())?; - Ok(()) -} - -/// Copies all content of the source dir into the target dir -pub fn copy_dir(source: &Path, target_dir: &Path) -> Result<()> { - fs_extra::dir::copy(source, target_dir, &dir_copy_options())?; - Ok(()) -} - -/// Clones a remote repository into the specified directory. -pub fn clone_remote(repo_url: &str, target_dir: &Path) -> std::io::Result { - Command::new("git") - .args(["clone", "--depth", "1", "--recursive", repo_url]) - .arg(target_dir) - .output() -} - -#[cfg(test)] -#[cfg(feature = "svm-solc")] -mod tests { - use super::*; - - #[test] - fn can_mock_project() { - let _prj = TempProject::mocked(&Default::default(), "^0.8.11").unwrap(); - let _prj = TempProject::mocked_random("^0.8.11").unwrap(); - } -} diff --git a/crates/compilers/src/report/compiler.rs b/crates/compilers/src/report/compiler.rs deleted file mode 100644 index 04afa17c..00000000 --- a/crates/compilers/src/report/compiler.rs +++ /dev/null @@ -1,244 +0,0 @@ -//! Additional logging [SolcInput] and [CompilerOutput] -//! -//! Useful for debugging purposes. -//! As solc compiler input and output can become quite large (in the tens of MB) we still want a way -//! to get this info when debugging an issue. Most convenient way to look at these object is as a -//! separate json file - -use foundry_compilers_artifacts::{CompilerOutput, SolcInput}; -use semver::Version; -use std::{env, path::PathBuf, str::FromStr}; - -/// Debug Helper type that can be used to write the [crate::compilers::solc::Solc] [SolcInput] and -/// [CompilerOutput] to disk if configured. -/// -/// # Examples -/// -/// If `foundry_compilers_LOG=in=in.json,out=out.json` is then the reporter will be configured to -/// write the compiler input as pretty formatted json to `in.{solc version}.json` and the compiler -/// output to `out.{solc version}.json` -/// -/// ```no_run -/// use foundry_compilers::report::SolcCompilerIoReporter; -/// std::env::set_var("foundry_compilers_LOG", "in=in.json,out=out.json"); -/// let rep = SolcCompilerIoReporter::from_default_env(); -/// ``` -#[derive(Clone, Debug, Default)] -pub struct SolcCompilerIoReporter { - /// where to write the output to, `None` if not enabled - target: Option, -} - -impl SolcCompilerIoReporter { - /// Returns a new `SolcCompilerIOLayer` from the fields in the given string, - /// ignoring any that are invalid. - pub fn new(value: &str) -> Self { - Self { target: Some(value.parse().unwrap_or_default()) } - } - - /// `foundry_compilers_LOG` is the default environment variable used by - /// [`SolcCompilerIOLayer::from_default_env`] - /// - /// [`SolcCompilerIOLayer::from_default_env`]: #method.from_default_env - pub const DEFAULT_ENV: &'static str = "foundry_compilers_LOG"; - - /// Returns a new `SolcCompilerIOLayer` from the value of the `foundry_compilers_LOG` - /// environment variable, ignoring any invalid filter directives. - pub fn from_default_env() -> Self { - Self::from_env(Self::DEFAULT_ENV) - } - - /// Returns a new `SolcCompilerIOLayer` from the value of the given environment - /// variable, ignoring any invalid filter directives. - pub fn from_env(env: impl AsRef) -> Self { - env::var(env).map(|var| Self::new(&var)).unwrap_or_default() - } - - /// Callback to write the input to disk if target is set - pub fn log_compiler_input(&self, input: &SolcInput, version: &Version) { - if let Some(ref target) = self.target { - target.write_input(input, version) - } - } - - /// Callback to write the input to disk if target is set - pub fn log_compiler_output(&self, output: &CompilerOutput, version: &Version) { - if let Some(ref target) = self.target { - target.write_output(output, version) - } - } -} - -impl> From for SolcCompilerIoReporter { - fn from(s: S) -> Self { - Self::new(s.as_ref()) - } -} - -/// Represents the `in=,out=` value -#[derive(Clone, Debug, PartialEq, Eq)] -struct Target { - /// path where the compiler input file should be written to - dest_input: PathBuf, - /// path where the compiler output file should be written to - dest_output: PathBuf, -} - -impl Target { - fn write_input(&self, input: &SolcInput, version: &Version) { - trace!("logging compiler input to {}", self.dest_input.display()); - match serde_json::to_string_pretty(input) { - Ok(json) => { - if let Err(err) = std::fs::write(get_file_name(&self.dest_input, version), json) { - error!("Failed to write compiler input: {}", err) - } - } - Err(err) => { - error!("Failed to serialize compiler input: {}", err) - } - } - } - - fn write_output(&self, output: &CompilerOutput, version: &Version) { - trace!("logging compiler output to {}", self.dest_output.display()); - match serde_json::to_string_pretty(output) { - Ok(json) => { - if let Err(err) = std::fs::write(get_file_name(&self.dest_output, version), json) { - error!("Failed to write compiler output: {}", err) - } - } - Err(err) => { - error!("Failed to serialize compiler output: {}", err) - } - } - } -} - -impl Default for Target { - fn default() -> Self { - Self { - dest_input: "compiler-input.json".into(), - dest_output: "compiler-output.json".into(), - } - } -} - -impl FromStr for Target { - type Err = Box; - fn from_str(s: &str) -> Result { - let mut dest_input = None; - let mut dest_output = None; - for part in s.split(',') { - let (name, val) = - part.split_once('=').ok_or_else(|| BadName { name: part.to_string() })?; - match name { - "i" | "in" | "input" | "compilerinput" => { - dest_input = Some(PathBuf::from(val)); - } - "o" | "out" | "output" | "compileroutput" => { - dest_output = Some(PathBuf::from(val)); - } - _ => return Err(BadName { name: part.to_string() }.into()), - }; - } - - Ok(Self { - dest_input: dest_input.unwrap_or_else(|| "compiler-input.json".into()), - dest_output: dest_output.unwrap_or_else(|| "compiler-output.json".into()), - }) - } -} - -/// Indicates that a field name specified in the env value was invalid. -#[derive(Clone, Debug, thiserror::Error)] -#[error("{}", self.name)] -pub struct BadName { - name: String, -} - -/// Returns the file name for the given version -fn get_file_name(path: impl Into, v: &Version) -> PathBuf { - let mut path = path.into(); - if let Some(stem) = path.file_stem().and_then(|s| s.to_str().map(|s| s.to_string())) { - path.set_file_name(format!("{stem}.{}.{}.{}.json", v.major, v.minor, v.patch)); - } - path -} - -#[cfg(test)] -mod tests { - use super::*; - use std::fs; - use tempfile::tempdir; - - #[test] - fn can_set_file_name() { - let s = "/a/b/c/in.json"; - let p = get_file_name(s, &Version::new(0, 8, 10)); - assert_eq!(PathBuf::from("/a/b/c/in.0.8.10.json"), p); - - let s = "abc.json"; - let p = get_file_name(s, &Version::new(0, 8, 10)); - assert_eq!(PathBuf::from("abc.0.8.10.json"), p); - } - - #[test] - fn can_parse_target() { - let target: Target = "in=in.json,out=out.json".parse().unwrap(); - assert_eq!(target, Target { dest_input: "in.json".into(), dest_output: "out.json".into() }); - - let target: Target = "in=in.json".parse().unwrap(); - assert_eq!(target, Target { dest_input: "in.json".into(), ..Default::default() }); - - let target: Target = "out=out.json".parse().unwrap(); - assert_eq!(target, Target { dest_output: "out.json".into(), ..Default::default() }); - } - - #[test] - fn can_init_reporter_from_env() { - let rep = SolcCompilerIoReporter::from_default_env(); - assert!(rep.target.is_none()); - std::env::set_var("foundry_compilers_LOG", "in=in.json,out=out.json"); - let rep = SolcCompilerIoReporter::from_default_env(); - assert!(rep.target.is_some()); - assert_eq!( - rep.target.unwrap(), - Target { dest_input: "in.json".into(), dest_output: "out.json".into() } - ); - std::env::remove_var("foundry_compilers_LOG"); - } - - #[test] - fn check_no_write_when_no_target() { - let reporter = SolcCompilerIoReporter::default(); - let version = Version::parse("0.8.10").unwrap(); - let input = SolcInput::default(); - let output = CompilerOutput::default(); - - reporter.log_compiler_input(&input, &version); - reporter.log_compiler_output(&output, &version); - } - - #[test] - fn serialize_and_write_to_file() { - let dir = tempdir().unwrap(); - let input_path = dir.path().join("input.json"); - let output_path = dir.path().join("output.json"); - let version = Version::parse("0.8.10").unwrap(); - let target = Target { dest_input: input_path.clone(), dest_output: output_path.clone() }; - - let input = SolcInput::default(); - let output = CompilerOutput::default(); - - target.write_input(&input, &version); - target.write_output(&output, &version); - - let input_content = fs::read_to_string(get_file_name(&input_path, &version)).unwrap(); - let output_content = fs::read_to_string(get_file_name(&output_path, &version)).unwrap(); - - assert!(!input_content.is_empty()); - assert!(!output_content.is_empty()); - - dir.close().unwrap(); - } -} diff --git a/crates/compilers/src/report/mod.rs b/crates/compilers/src/report/mod.rs deleted file mode 100644 index b21a9f0f..00000000 --- a/crates/compilers/src/report/mod.rs +++ /dev/null @@ -1,501 +0,0 @@ -//! Subscribe to events in the compiler pipeline -//! -//! The _reporter_ is the component of the [`crate::Project::compile()`] pipeline which is -//! responsible for reporting on specific steps in the process. -//! -//! By default, the current reporter is a noop that does -//! nothing. -//! -//! To use another report implementation, it must be set as the current reporter. -//! There are two methods for doing so: [`with_scoped`] and -//! [`try_init`]. `with_scoped` sets the reporter for the -//! duration of a scope, while `set_global` sets a global default report -//! for the entire process. - -// - -#![allow(static_mut_refs)] // TODO - -use foundry_compilers_artifacts::remappings::Remapping; -use semver::Version; -use std::{ - any::{Any, TypeId}, - cell::RefCell, - error::Error, - fmt, - path::{Path, PathBuf}, - ptr::NonNull, - sync::{ - atomic::{AtomicBool, AtomicUsize, Ordering}, - Arc, - }, - time::Duration, -}; - -mod compiler; -pub use compiler::SolcCompilerIoReporter; - -thread_local! { - static CURRENT_STATE: State = State { - scoped: RefCell::new(Report::none()), - }; -} - -static EXISTS: AtomicBool = AtomicBool::new(false); -static SCOPED_COUNT: AtomicUsize = AtomicUsize::new(0); - -// tracks the state of `GLOBAL_REPORTER` -static GLOBAL_REPORTER_STATE: AtomicUsize = AtomicUsize::new(UN_SET); - -const UN_SET: usize = 0; -const SETTING: usize = 1; -const SET: usize = 2; - -static mut GLOBAL_REPORTER: Option = None; - -/// Install this `Reporter` as the global default if one is -/// not already set. -/// -/// # Errors -/// Returns an Error if the initialization was unsuccessful, likely -/// because a global reporter was already installed by another -/// call to `try_init`. -pub fn try_init(reporter: T) -> Result<(), Box> -where - T: Reporter + Send + Sync + 'static, -{ - set_global_reporter(Report::new(reporter))?; - Ok(()) -} - -/// Install this `Reporter` as the global default. -/// -/// # Panics -/// -/// Panics if the initialization was unsuccessful, likely because a -/// global reporter was already installed by another call to `try_init`. -/// ``` -/// use foundry_compilers::report::BasicStdoutReporter; -/// let subscriber = foundry_compilers::report::init(BasicStdoutReporter::default()); -/// ``` -pub fn init(reporter: T) -where - T: Reporter + Send + Sync + 'static, -{ - try_init(reporter).expect("Failed to install global reporter") -} - -/// Trait representing the functions required to emit information about various steps in the -/// compiler pipeline. -/// -/// This trait provides a series of callbacks that are invoked at certain parts of the -/// [`crate::Project::compile()`] process. -/// -/// Implementers of this trait can use these callbacks to emit additional information, for example -/// print custom messages to `stdout`. -/// -/// A `Reporter` is entirely passive and only listens to incoming "events". -pub trait Reporter: 'static + std::fmt::Debug { - /// Callback invoked right before [Compiler::compile] is called - /// - /// This contains the [Compiler] its [Version] and all files that triggered the compile job. The - /// dirty files are only provided to give a better feedback what was actually compiled. - /// - /// [Compiler]: crate::compilers::Compiler - /// [Compiler::compile]: crate::compilers::Compiler::compile - fn on_compiler_spawn( - &self, - _compiler_name: &str, - _version: &Version, - _dirty_files: &[PathBuf], - ) { - } - - /// Invoked with the `CompilerOutput` if [`Compiler::compile()`] was successful - /// - /// [`Compiler::compile()`]: crate::compilers::Compiler::compile - fn on_compiler_success(&self, _compiler_name: &str, _version: &Version, _duration: &Duration) {} - - /// Invoked before a new compiler version is installed - fn on_solc_installation_start(&self, _version: &Version) {} - - /// Invoked after a new compiler version was successfully installed - fn on_solc_installation_success(&self, _version: &Version) {} - - /// Invoked after a compiler installation failed - fn on_solc_installation_error(&self, _version: &Version, _error: &str) {} - - /// Invoked if imports couldn't be resolved with the given remappings, where `imports` is the - /// list of all import paths and the file they occurred in: `(import stmt, file)` - fn on_unresolved_imports(&self, _imports: &[(&Path, &Path)], _remappings: &[Remapping]) {} - - /// If `self` is the same type as the provided `TypeId`, returns an untyped - /// [`NonNull`] pointer to that type. Otherwise, returns `None`. - /// - /// If you wish to downcast a `Reporter`, it is strongly advised to use - /// the safe API provided by downcast_ref instead. - /// - /// This API is required for `downcast_raw` to be a trait method; a method - /// signature like downcast_ref (with a generic type parameter) is not - /// object-safe, and thus cannot be a trait method for `Reporter`. This - /// means that if we only exposed downcast_ref, `Reporter` - /// implementations could not override the downcasting behavior - /// - /// # Safety - /// - /// The downcast_ref method expects that the pointer returned by - /// `downcast_raw` points to a valid instance of the type - /// with the provided `TypeId`. Failure to ensure this will result in - /// undefined behaviour, so implementing `downcast_raw` is unsafe. - unsafe fn downcast_raw(&self, id: TypeId) -> Option> { - if id == TypeId::of::() { - Some(NonNull::from(self).cast()) - } else { - None - } - } -} - -impl dyn Reporter { - /// Returns `true` if this `Reporter` is the same type as `T`. - pub fn is(&self) -> bool { - self.downcast_ref::().is_some() - } - - /// Returns some reference to this `Reporter` value if it is of type `T`, - /// or `None` if it isn't. - pub fn downcast_ref(&self) -> Option<&T> { - unsafe { - let raw = self.downcast_raw(TypeId::of::())?; - Some(&*(raw.cast().as_ptr())) - } - } -} - -pub(crate) fn compiler_spawn(compiler_name: &str, version: &Version, dirty_files: &[PathBuf]) { - get_default(|r| r.reporter.on_compiler_spawn(compiler_name, version, dirty_files)); -} - -pub(crate) fn compiler_success(compiler_name: &str, version: &Version, duration: &Duration) { - get_default(|r| r.reporter.on_compiler_success(compiler_name, version, duration)); -} - -#[allow(dead_code)] -pub(crate) fn solc_installation_start(version: &Version) { - get_default(|r| r.reporter.on_solc_installation_start(version)); -} - -#[allow(dead_code)] -pub(crate) fn solc_installation_success(version: &Version) { - get_default(|r| r.reporter.on_solc_installation_success(version)); -} - -#[allow(dead_code)] -pub(crate) fn solc_installation_error(version: &Version, error: &str) { - get_default(|r| r.reporter.on_solc_installation_error(version, error)); -} - -pub(crate) fn unresolved_imports(imports: &[(&Path, &Path)], remappings: &[Remapping]) { - get_default(|r| r.reporter.on_unresolved_imports(imports, remappings)); -} - -fn get_global() -> Option<&'static Report> { - if GLOBAL_REPORTER_STATE.load(Ordering::SeqCst) != SET { - return None; - } - unsafe { - // This is safe given the invariant that setting the global reporter - // also sets `GLOBAL_REPORTER_STATE` to `SET`. - Some(GLOBAL_REPORTER.as_ref().expect( - "Reporter invariant violated: GLOBAL_REPORTER must be initialized before GLOBAL_REPORTER_STATE is set", - )) - } -} - -/// Executes a closure with a reference to this thread's current reporter. -#[inline(always)] -pub fn get_default(mut f: F) -> T -where - F: FnMut(&Report) -> T, -{ - if SCOPED_COUNT.load(Ordering::Acquire) == 0 { - // fast path if no scoped reporter has been set; use the global - // default. - return if let Some(glob) = get_global() { f(glob) } else { f(&Report::none()) }; - } - - get_default_scoped(f) -} - -#[inline(never)] -fn get_default_scoped(mut f: F) -> T -where - F: FnMut(&Report) -> T, -{ - CURRENT_STATE - .try_with(|state| { - let scoped = state.scoped.borrow_mut(); - f(&scoped) - }) - .unwrap_or_else(|_| f(&Report::none())) -} - -/// Executes a closure with a reference to the `Reporter`. -pub fn with_global(f: impl FnOnce(&Report) -> T) -> Option { - let report = get_global()?; - Some(f(report)) -} - -/// Sets this reporter as the scoped reporter for the duration of a closure. -pub fn with_scoped(report: &Report, f: impl FnOnce() -> T) -> T { - // When this guard is dropped, the scoped reporter will be reset to the - // prior reporter. Using this (rather than simply resetting after calling - // `f`) ensures that we always reset to the prior reporter even if `f` - // panics. - let _guard = set_scoped(report); - f() -} - -/// The report state of a thread. -struct State { - /// This thread's current scoped reporter. - scoped: RefCell, -} - -impl State { - /// Replaces the current scoped reporter on this thread with the provided - /// reporter. - /// - /// Dropping the returned `ResetGuard` will reset the scoped reporter to - /// the previous value. - #[inline] - fn set_scoped(new_report: Report) -> ScopeGuard { - let prior = CURRENT_STATE.try_with(|state| state.scoped.replace(new_report)).ok(); - EXISTS.store(true, Ordering::Release); - SCOPED_COUNT.fetch_add(1, Ordering::Release); - ScopeGuard(prior) - } -} - -/// A guard that resets the current scoped reporter to the prior -/// scoped reporter when dropped. -#[derive(Debug)] -pub struct ScopeGuard(Option); - -impl Drop for ScopeGuard { - #[inline] - fn drop(&mut self) { - SCOPED_COUNT.fetch_sub(1, Ordering::Release); - if let Some(report) = self.0.take() { - // Replace the reporter and then drop the old one outside - // of the thread-local context. - let prev = CURRENT_STATE.try_with(|state| state.scoped.replace(report)); - drop(prev) - } - } -} - -/// Sets the reporter as the scoped reporter for the duration of the lifetime -/// of the returned DefaultGuard -#[must_use = "Dropping the guard unregisters the reporter."] -pub fn set_scoped(reporter: &Report) -> ScopeGuard { - // When this guard is dropped, the scoped reporter will be reset to the - // prior default. Using this ensures that we always reset to the prior - // reporter even if the thread calling this function panics. - State::set_scoped(reporter.clone()) -} - -/// A no-op [`Reporter`] that does nothing. -#[derive(Clone, Copy, Debug, Default)] -pub struct NoReporter(()); - -impl Reporter for NoReporter {} - -/// A [`Reporter`] that emits some general information to `stdout` -#[derive(Clone, Debug, Default)] -pub struct BasicStdoutReporter { - _priv: (), -} - -impl Reporter for BasicStdoutReporter { - /// Callback invoked right before [`Compiler::compile()`] is called - /// - /// [`Compiler::compile()`]: crate::compilers::Compiler::compile - fn on_compiler_spawn(&self, compiler_name: &str, version: &Version, dirty_files: &[PathBuf]) { - println!( - "Compiling {} files with {} {}.{}.{}", - dirty_files.len(), - compiler_name, - version.major, - version.minor, - version.patch - ); - } - - fn on_compiler_success(&self, compiler_name: &str, version: &Version, duration: &Duration) { - println!( - "{} {}.{}.{} finished in {duration:.2?}", - compiler_name, version.major, version.minor, version.patch - ); - } - - /// Invoked before a new compiler is installed - fn on_solc_installation_start(&self, version: &Version) { - println!("installing solc version \"{version}\""); - } - - /// Invoked before a new compiler was successfully installed - fn on_solc_installation_success(&self, version: &Version) { - println!("Successfully installed solc {version}"); - } - - fn on_solc_installation_error(&self, version: &Version, error: &str) { - eprintln!("Failed to install solc {version}: {error}"); - } - - fn on_unresolved_imports(&self, imports: &[(&Path, &Path)], remappings: &[Remapping]) { - if imports.is_empty() { - return; - } - println!("{}", format_unresolved_imports(imports, remappings)) - } -} - -/// Creates a meaningful message for all unresolved imports -pub fn format_unresolved_imports(imports: &[(&Path, &Path)], remappings: &[Remapping]) -> String { - let info = imports - .iter() - .map(|(import, file)| format!("\"{}\" in \"{}\"", import.display(), file.display())) - .collect::>() - .join("\n "); - format!( - "Unable to resolve imports:\n {}\nwith remappings:\n {}", - info, - remappings.iter().map(|r| r.to_string()).collect::>().join("\n ") - ) -} - -/// Returned if setting the global reporter fails. -#[derive(Debug)] -pub struct SetGlobalReporterError { - // private marker so this type can't be initiated - _priv: (), -} - -impl fmt::Display for SetGlobalReporterError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.pad("a global reporter has already been set") - } -} - -impl Error for SetGlobalReporterError {} - -/// `Report` trace data to a [`Reporter`]. -#[derive(Clone)] -pub struct Report { - reporter: Arc, -} - -impl Report { - /// Returns a new `Report` that does nothing - pub fn none() -> Self { - Self { reporter: Arc::new(NoReporter::default()) } - } - - /// Returns a `Report` that forwards to the given [`Reporter`]. - /// - /// [`Reporter`]: ../reporter/trait.Reporter.html - pub fn new(reporter: S) -> Self - where - S: Reporter + Send + Sync + 'static, - { - Self { reporter: Arc::new(reporter) } - } - - /// Returns `true` if this `Report` forwards to a reporter of type - /// `T`. - #[inline] - pub fn is(&self) -> bool { - ::is::(&*self.reporter) - } -} - -impl fmt::Debug for Report { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.pad("Report(...)") - } -} - -/// Sets this report as the global default for the duration of the entire program. -/// -/// The global reporter can only be set once; additional attempts to set the global reporter will -/// fail. Returns `Err` if the global reporter has already been set. -fn set_global_reporter(report: Report) -> Result<(), SetGlobalReporterError> { - // `compare_exchange` tries to store `SETTING` if the current value is `UN_SET` - // this returns `Ok(_)` if the current value of `GLOBAL_REPORTER_STATE` was `UN_SET` and - // `SETTING` was written, this guarantees the value is `SETTING`. - if GLOBAL_REPORTER_STATE - .compare_exchange(UN_SET, SETTING, Ordering::SeqCst, Ordering::SeqCst) - .is_ok() - { - unsafe { - GLOBAL_REPORTER = Some(report); - } - GLOBAL_REPORTER_STATE.store(SET, Ordering::SeqCst); - Ok(()) - } else { - Err(SetGlobalReporterError { _priv: () }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::str::FromStr; - - #[test] - fn scoped_reporter_works() { - #[derive(Debug)] - struct TestReporter; - impl Reporter for TestReporter {} - - with_scoped(&Report::new(TestReporter), || { - get_default(|reporter| assert!(reporter.is::())) - }); - } - - #[test] - fn global_and_scoped_reporter_works() { - get_default(|reporter| { - assert!(reporter.is::()); - }); - - set_global_reporter(Report::new(BasicStdoutReporter::default())).unwrap(); - #[derive(Debug)] - struct TestReporter; - impl Reporter for TestReporter {} - - with_scoped(&Report::new(TestReporter), || { - get_default(|reporter| assert!(reporter.is::())) - }); - - get_default(|reporter| assert!(reporter.is::())) - } - - #[test] - fn test_unresolved_message() { - let unresolved = vec![(Path::new("./src/Import.sol"), Path::new("src/File.col"))]; - - let remappings = vec![Remapping::from_str("oz=a/b/c/d").unwrap()]; - - assert_eq!( - format_unresolved_imports(&unresolved, &remappings).trim(), - r#" -Unable to resolve imports: - "./src/Import.sol" in "src/File.col" -with remappings: - oz/=a/b/c/d/"# - .trim() - ) - } -} diff --git a/crates/compilers/src/resolver/mod.rs b/crates/compilers/src/resolver/mod.rs deleted file mode 100644 index b8a456ca..00000000 --- a/crates/compilers/src/resolver/mod.rs +++ /dev/null @@ -1,1109 +0,0 @@ -//! Resolution of the entire dependency graph for a project. -//! -//! This module implements the core logic in taking all contracts of a project and creating a -//! resolved graph with applied remappings for all source contracts. -//! -//! Some constraints we're working with when resolving contracts -//! -//! 1. Each file can contain several source units and can have any number of imports/dependencies -//! (using the term interchangeably). Each dependency can declare a version range that it is -//! compatible with, solidity version pragma. -//! 2. A dependency can be imported from any directory, see `Remappings` -//! -//! Finding all dependencies is fairly simple, we're simply doing a DFS, starting the source -//! contracts -//! -//! ## Solc version auto-detection -//! -//! Solving a constraint graph is an NP-hard problem. The algorithm for finding the "best" solution -//! makes several assumptions and tries to find a version of "Solc" that is compatible with all -//! source files. -//! -//! The algorithm employed here is fairly simple, we simply do a DFS over all the source files and -//! find the set of Solc versions that the file and all its imports are compatible with, and then we -//! try to find a single Solc version that is compatible with all the files. This is effectively the -//! intersection of all version sets. -//! -//! We always try to activate the highest (installed) solc version first. Uninstalled solc is only -//! used if this version is the only compatible version for a single file or in the intersection of -//! all version sets. -//! -//! This leads to finding the optimal version, if there is one. If there is no single Solc version -//! that is compatible with all sources and their imports, then suddenly this becomes a very -//! difficult problem, because what would be the "best" solution. In this case, just choose the -//! latest (installed) Solc version and try to minimize the number of Solc versions used. -//! -//! ## Performance -//! -//! Note that this is a relatively performance-critical portion of the ethers-solc preprocessing. -//! The data that needs to be processed is proportional to the size of the dependency -//! graph, which can, depending on the project, often be quite large. -//! -//! Note that, unlike the solidity compiler, we work with the filesystem, where we have to resolve -//! remappings and follow relative paths. We're also limiting the nodes in the graph to solidity -//! files, since we're only interested in their -//! [version pragma](https://docs.soliditylang.org/en/develop/layout-of-source-files.html#version-pragma), -//! which is defined on a per source file basis. - -use crate::{ - compilers::{Compiler, CompilerVersion, Language, ParsedSource}, - project::VersionedSources, - ProjectPathsConfig, -}; -use core::fmt; -use foundry_compilers_artifacts::sources::{Source, Sources}; -use foundry_compilers_core::{ - error::{Result, SolcError}, - utils::{self, find_case_sensitive_existing_file}, -}; -use parse::SolData; -use rayon::prelude::*; -use semver::{Version, VersionReq}; -use std::{ - collections::{BTreeSet, HashMap, HashSet, VecDeque}, - io, - path::{Path, PathBuf}, -}; -use yansi::{Color, Paint}; - -pub mod parse; -mod tree; - -pub use parse::SolImportAlias; -pub use tree::{print, Charset, TreeOptions}; - -/// The underlying edges of the graph which only contains the raw relationship data. -/// -/// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources` -/// set is determined. -#[derive(Debug)] -pub struct GraphEdges { - /// The indices of `edges` correspond to the `nodes`. That is, `edges[0]` - /// is the set of outgoing edges for `nodes[0]`. - edges: Vec>, - /// Reverse of `edges`. That is, `rev_edges[0]` is the set of incoming edges for `nodes[0]`. - rev_edges: Vec>, - /// index maps for a solidity file to an index, for fast lookup. - indices: HashMap, - /// reverse of `indices` for reverse lookup - rev_indices: HashMap, - /// the identified version requirement of a file - versions: HashMap>, - /// the extracted data from the source file - data: HashMap, - /// with how many input files we started with, corresponds to `let input_files = - /// nodes[..num_input_files]`. - /// - /// Combined with the `indices` this way we can determine if a file was original added to the - /// graph as input or was added as resolved import, see [`Self::is_input_file()`] - num_input_files: usize, - /// tracks all imports that we failed to resolve for a file - unresolved_imports: HashSet<(PathBuf, PathBuf)>, - /// tracks additional include paths resolved by scanning all imports of the graph - /// - /// Absolute imports, like `import "src/Contract.sol"` are possible, but this does not play - /// nice with the standard-json import format, since the VFS won't be able to resolve - /// "src/Contract.sol" without help via `--include-path` - resolved_solc_include_paths: BTreeSet, -} - -impl GraphEdges { - /// How many files are source files - pub fn num_source_files(&self) -> usize { - self.num_input_files - } - - /// Returns an iterator over all file indices - pub fn files(&self) -> impl Iterator + '_ { - 0..self.edges.len() - } - - /// Returns an iterator over all source file indices - pub fn source_files(&self) -> impl Iterator + '_ { - 0..self.num_input_files - } - - /// Returns an iterator over all library files - pub fn library_files(&self) -> impl Iterator + '_ { - self.files().skip(self.num_input_files) - } - - /// Returns all additional `--include-paths` - pub fn include_paths(&self) -> &BTreeSet { - &self.resolved_solc_include_paths - } - - /// Returns all imports that we failed to resolve - pub fn unresolved_imports(&self) -> &HashSet<(PathBuf, PathBuf)> { - &self.unresolved_imports - } - - /// Returns a list of nodes the given node index points to for the given kind. - pub fn imported_nodes(&self, from: usize) -> &[usize] { - &self.edges[from] - } - - /// Returns an iterator that yields all imports of a node and all their imports - pub fn all_imported_nodes(&self, from: usize) -> impl Iterator + '_ { - NodesIter::new(from, self).skip(1) - } - - /// Returns all files imported by the given file - pub fn imports(&self, file: &Path) -> HashSet<&PathBuf> { - if let Some(start) = self.indices.get(file).copied() { - NodesIter::new(start, self).skip(1).map(move |idx| &self.rev_indices[&idx]).collect() - } else { - HashSet::new() - } - } - - /// Returns all files that import the given file - pub fn importers(&self, file: &Path) -> HashSet<&PathBuf> { - if let Some(start) = self.indices.get(file).copied() { - self.rev_edges[start].iter().map(move |idx| &self.rev_indices[idx]).collect() - } else { - HashSet::new() - } - } - - /// Returns the id of the given file - pub fn node_id(&self, file: &Path) -> usize { - self.indices[file] - } - - /// Returns the path of the given node - pub fn node_path(&self, id: usize) -> &PathBuf { - &self.rev_indices[&id] - } - - /// Returns true if the `file` was originally included when the graph was first created and not - /// added when all `imports` were resolved - pub fn is_input_file(&self, file: &Path) -> bool { - if let Some(idx) = self.indices.get(file).copied() { - idx < self.num_input_files - } else { - false - } - } - - /// Returns the `VersionReq` for the given file - pub fn version_requirement(&self, file: &Path) -> Option<&VersionReq> { - self.indices.get(file).and_then(|idx| self.versions.get(idx)).and_then(Option::as_ref) - } - - /// Returns the parsed source data for the given file - pub fn get_parsed_source(&self, file: &Path) -> Option<&D> { - self.indices.get(file).and_then(|idx| self.data.get(idx)) - } -} - -/// Represents a fully-resolved solidity dependency graph. -/// -/// Each node in the graph is a file and edges represent dependencies between them. -/// -/// See also -#[derive(Debug)] -pub struct Graph { - /// all nodes in the project, a `Node` represents a single file - nodes: Vec>, - /// relationship of the nodes - edges: GraphEdges, - /// the root of the project this graph represents - root: PathBuf, -} - -impl Graph { - /// Print the graph to `StdOut` - pub fn print(&self) { - self.print_with_options(Default::default()) - } - - /// Print the graph to `StdOut` using the provided `TreeOptions` - pub fn print_with_options(&self, opts: TreeOptions) { - let stdout = io::stdout(); - let mut out = stdout.lock(); - tree::print(self, &opts, &mut out).expect("failed to write to stdout.") - } - - /// Returns a list of nodes the given node index points to for the given kind. - pub fn imported_nodes(&self, from: usize) -> &[usize] { - self.edges.imported_nodes(from) - } - - /// Returns an iterator that yields all imports of a node and all their imports - pub fn all_imported_nodes(&self, from: usize) -> impl Iterator + '_ { - self.edges.all_imported_nodes(from) - } - - /// Returns `true` if the given node has any outgoing edges. - pub(crate) fn has_outgoing_edges(&self, index: usize) -> bool { - !self.edges.edges[index].is_empty() - } - - /// Returns all the resolved files and their index in the graph - pub fn files(&self) -> &HashMap { - &self.edges.indices - } - - /// Gets a node by index. - /// - /// # Panics - /// - /// if the `index` node id is not included in the graph - pub fn node(&self, index: usize) -> &Node { - &self.nodes[index] - } - - pub(crate) fn display_node(&self, index: usize) -> DisplayNode<'_, D> { - DisplayNode { node: self.node(index), root: &self.root } - } - - /// Returns an iterator that yields all nodes of the dependency tree that the given node id - /// spans, starting with the node itself. - /// - /// # Panics - /// - /// if the `start` node id is not included in the graph - pub fn node_ids(&self, start: usize) -> impl Iterator + '_ { - NodesIter::new(start, &self.edges) - } - - /// Same as `Self::node_ids` but returns the actual `Node` - pub fn nodes(&self, start: usize) -> impl Iterator> + '_ { - self.node_ids(start).map(move |idx| self.node(idx)) - } - - fn split(self) -> (Vec<(PathBuf, Source)>, GraphEdges) { - let Self { nodes, mut edges, .. } = self; - // need to move the extracted data to the edges, essentially splitting the node so we have - // access to the data at a later stage in the compile pipeline - let mut sources = Vec::new(); - for (idx, node) in nodes.into_iter().enumerate() { - let Node { path, source, data } = node; - sources.push((path, source)); - edges.data.insert(idx, data); - } - - (sources, edges) - } - - /// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and - /// returning the `nodes` converted to `Sources` - pub fn into_sources(self) -> (Sources, GraphEdges) { - let (sources, edges) = self.split(); - (sources.into_iter().collect(), edges) - } - - /// Returns an iterator that yields only those nodes that represent input files. - /// See `Self::resolve_sources` - /// This won't yield any resolved library nodes - pub fn input_nodes(&self) -> impl Iterator> { - self.nodes.iter().take(self.edges.num_input_files) - } - - /// Returns all files imported by the given file - pub fn imports(&self, path: &Path) -> HashSet<&PathBuf> { - self.edges.imports(path) - } - - /// Resolves a number of sources within the given config - pub fn resolve_sources( - paths: &ProjectPathsConfig, - sources: Sources, - ) -> Result { - /// checks if the given target path was already resolved, if so it adds its id to the list - /// of resolved imports. If it hasn't been resolved yet, it queues in the file for - /// processing - fn add_node( - unresolved: &mut VecDeque<(PathBuf, Node)>, - index: &mut HashMap, - resolved_imports: &mut Vec, - target: PathBuf, - ) -> Result<()> { - if let Some(idx) = index.get(&target).copied() { - resolved_imports.push(idx); - } else { - // imported file is not part of the input files - let node = Node::read(&target)?; - unresolved.push_back((target.clone(), node)); - let idx = index.len(); - index.insert(target, idx); - resolved_imports.push(idx); - } - Ok(()) - } - - // we start off by reading all input files, which includes all solidity files from the - // source and test folder - let mut unresolved: VecDeque<_> = sources - .0 - .into_par_iter() - .map(|(path, source)| { - let data = D::parse(source.as_ref(), &path)?; - Ok((path.clone(), Node { path, source, data })) - }) - .collect::>()?; - - // identifiers of all resolved files - let mut index: HashMap<_, _> = - unresolved.iter().enumerate().map(|(idx, (p, _))| (p.clone(), idx)).collect(); - - let num_input_files = unresolved.len(); - - // contains the files and their dependencies - let mut nodes = Vec::with_capacity(unresolved.len()); - let mut edges = Vec::with_capacity(unresolved.len()); - let mut rev_edges = Vec::with_capacity(unresolved.len()); - - // tracks additional paths that should be used with `--include-path`, these are libraries - // that use absolute imports like `import "src/Contract.sol"` - let mut resolved_solc_include_paths = BTreeSet::new(); - resolved_solc_include_paths.insert(paths.root.clone()); - - // keep track of all unique paths that we failed to resolve to not spam the reporter with - // the same path - let mut unresolved_imports = HashSet::new(); - - // now we need to resolve all imports for the source file and those imported from other - // locations - while let Some((path, node)) = unresolved.pop_front() { - let mut resolved_imports = Vec::new(); - // parent directory of the current file - let cwd = match path.parent() { - Some(inner) => inner, - None => continue, - }; - - for import_path in node.data.resolve_imports(paths, &mut resolved_solc_include_paths)? { - match paths.resolve_import_and_include_paths( - cwd, - &import_path, - &mut resolved_solc_include_paths, - ) { - Ok(import) => { - add_node(&mut unresolved, &mut index, &mut resolved_imports, import) - .map_err(|err| { - match err { - SolcError::ResolveCaseSensitiveFileName { .. } - | SolcError::Resolve(_) => { - // make the error more helpful by providing additional - // context - SolcError::FailedResolveImport( - Box::new(err), - node.path.clone(), - import_path.clone(), - ) - } - _ => err, - } - })? - } - Err(err) => { - unresolved_imports.insert((import_path.to_path_buf(), node.path.clone())); - trace!( - "failed to resolve import component \"{:?}\" for {:?}", - err, - node.path - ) - } - }; - } - - nodes.push(node); - edges.push(resolved_imports); - // Will be populated later - rev_edges.push(Vec::new()); - } - - // Build `rev_edges` - for (idx, edges) in edges.iter().enumerate() { - for &edge in edges.iter() { - rev_edges[edge].push(idx); - } - } - - if !unresolved_imports.is_empty() { - // notify on all unresolved imports - crate::report::unresolved_imports( - &unresolved_imports - .iter() - .map(|(i, f)| (i.as_path(), f.as_path())) - .collect::>(), - &paths.remappings, - ); - } - - let edges = GraphEdges { - edges, - rev_edges, - rev_indices: index.iter().map(|(k, v)| (*v, k.clone())).collect(), - indices: index, - num_input_files, - versions: nodes - .iter() - .enumerate() - .map(|(idx, node)| (idx, node.data.version_req().cloned())) - .collect(), - data: Default::default(), - unresolved_imports, - resolved_solc_include_paths, - }; - Ok(Self { nodes, edges, root: paths.root.clone() }) - } - - /// Resolves the dependencies of a project's source contracts - pub fn resolve(paths: &ProjectPathsConfig) -> Result { - Self::resolve_sources(paths, paths.read_input_files()?) - } -} - -impl> Graph { - /// Consumes the nodes of the graph and returns all input files together with their appropriate - /// version and the edges of the graph - /// - /// First we determine the compatible version for each input file (from sources and test folder, - /// see `Self::resolve`) and then we add all resolved library imports. - pub fn into_sources_by_version>( - self, - offline: bool, - locked_versions: &HashMap, - compiler: &C, - ) -> Result<(VersionedSources, GraphEdges)> { - /// insert the imports of the given node into the sources map - /// There can be following graph: - /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` - /// where `C` is a library import, in which case we assign `C` only to the first input file. - /// However, it's not required to include them in the solc `CompilerInput` as they would get - /// picked up by solc otherwise, but we add them, so we can create a corresponding - /// cache entry for them as well. This can be optimized however - fn insert_imports( - idx: usize, - all_nodes: &mut HashMap, - sources: &mut Sources, - edges: &[Vec], - processed_sources: &mut HashSet, - ) { - // iterate over all dependencies not processed yet - for dep in edges[idx].iter().copied() { - // keep track of processed dependencies, if the dep was already in the set we have - // processed it already - if !processed_sources.insert(dep) { - continue; - } - - // library import - if let Some((path, source)) = all_nodes.get(&dep).cloned() { - sources.insert(path, source); - insert_imports(dep, all_nodes, sources, edges, processed_sources); - } - } - } - - let versioned_nodes_by_lang = - self.get_input_node_versions(offline, locked_versions, compiler)?; - let (nodes, edges) = self.split(); - - let mut all_nodes = nodes.into_iter().enumerate().collect::>(); - - let mut resulted_sources = HashMap::new(); - - // determine the `Sources` set for each solc version - for (language, versioned_nodes) in versioned_nodes_by_lang { - let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len()); - - for (version, input_node_indices) in versioned_nodes { - let mut sources = Sources::new(); - - // all input nodes will be processed - let mut processed_sources = input_node_indices.iter().copied().collect(); - - // we only process input nodes (from sources, tests for example) - for idx in input_node_indices { - // insert the input node in the sources set and remove it from the available set - let (path, source) = all_nodes.get(&idx).cloned().expect("node is preset. qed"); - sources.insert(path, source); - insert_imports( - idx, - &mut all_nodes, - &mut sources, - &edges.edges, - &mut processed_sources, - ); - } - versioned_sources.insert(version, sources); - } - - resulted_sources.insert(language, versioned_sources); - } - - Ok((resulted_sources, edges)) - } - - /// Writes the list of imported files into the given formatter: - /// - /// ```text - /// path/to/a.sol () imports: - /// path/to/b.sol () - /// path/to/c.sol () - /// ... - /// ``` - fn format_imports_list( - &self, - idx: usize, - incompatible: HashSet, - f: &mut W, - ) -> std::result::Result<(), std::fmt::Error> { - let format_node = |idx, f: &mut W| { - let node = self.node(idx); - let color = if incompatible.contains(&idx) { Color::Red } else { Color::White }; - - let mut line = utils::source_name(&node.path, &self.root).display().to_string(); - if let Some(req) = node.data.version_req() { - line.push_str(&format!(" {req}")); - } - - write!(f, "{}", line.paint(color)) - }; - format_node(idx, f)?; - write!(f, " imports:")?; - for dep in self.node_ids(idx).skip(1) { - write!(f, "\n ")?; - format_node(dep, f)?; - } - - Ok(()) - } - - /// Filters incompatible versions from the `candidates`. It iterates over node imports and in - /// case if there is no compatible version it returns the latest seen node id. - fn retain_compatible_versions( - &self, - idx: usize, - candidates: &mut Vec<&CompilerVersion>, - offline: bool, - ) -> Result<(), String> { - let mut all_versions = candidates.clone(); - - let nodes: Vec<_> = self.node_ids(idx).collect(); - let mut failed_node = None; - for node in nodes.iter() { - if let Some(req) = self.node(*node).data.version_req() { - candidates.retain(|v| req.matches(v.as_ref())); - - if candidates.is_empty() { - failed_node = Some(*node); - break; - } - } - } - - let Some(failed_node_idx) = failed_node else { - // everything is fine - return Ok(()); - }; - - // This now keeps data for the node which were the last one before we had no candidates - // left. It means that there - let failed_node = self.node(failed_node_idx); - - if let Err(version_err) = failed_node.check_available_version(&all_versions, offline) { - // check if the version is even valid - let f = utils::source_name(&failed_node.path, &self.root).display(); - return Err( - format!("Encountered invalid solc version in {f}: {version_err}").to_string() - ); - } else { - // if the node requirement makes sense, it means that there is at least one node - // which requirement conflicts with it - - // retain only versions compatible with the `failed_node` - if let Some(req) = failed_node.data.version_req() { - all_versions.retain(|v| req.matches(v.as_ref())); - } - - // iterate over all the nodes once again and find the one incompatible - for node in &nodes { - if self.node(*node).check_available_version(&all_versions, offline).is_err() { - let mut msg = "Found incompatible versions:\n".white().to_string(); - - self.format_imports_list(idx, [*node, failed_node_idx].into(), &mut msg) - .unwrap(); - return Err(msg); - } - } - } - - let mut msg = "Found incompatible versions:\n".white().to_string(); - self.format_imports_list(idx, nodes.into_iter().collect(), &mut msg).unwrap(); - Err(msg) - } - - fn input_nodes_by_language(&self) -> HashMap> { - let mut nodes = HashMap::new(); - - for idx in 0..self.edges.num_input_files { - nodes.entry(self.nodes[idx].data.language()).or_insert_with(Vec::new).push(idx); - } - - nodes - } - - /// Returns a map of versions together with the input nodes that are compatible with that - /// version. - /// - /// This will essentially do a DFS on all input sources and their transitive imports and - /// checking that all can compiled with the version stated in the input file. - /// - /// Returns an error message with __all__ input files that don't have compatible imports. - /// - /// This also attempts to prefer local installations over remote available. - /// If `offline` is set to `true` then only already installed. - fn get_input_node_versions>( - &self, - offline: bool, - locked_versions: &HashMap, - compiler: &C, - ) -> Result>>> { - trace!("resolving input node versions"); - - let mut resulted_nodes = HashMap::new(); - - for (language, nodes) in self.input_nodes_by_language() { - if let Some(version) = locked_versions.get(&language) { - resulted_nodes.insert(language, HashMap::from([(version.clone(), nodes)])); - continue; - } - // this is likely called by an application and will be eventually printed so we don't - // exit on first error, instead gather all the errors and return a bundled - // error message instead - let mut errors = Vec::new(); - - // the sorted list of all versions - let all_versions = if offline { - compiler - .available_versions(&language) - .into_iter() - .filter(|v| v.is_installed()) - .collect() - } else { - compiler.available_versions(&language) - }; - - if all_versions.is_empty() && !nodes.is_empty() { - return Err(SolcError::msg(format!( - "Found {language} sources, but no compiler versions are available for it" - ))); - } - - // stores all versions and their nodes that can be compiled - let mut versioned_nodes = HashMap::new(); - - // stores all files and the versions they're compatible with - let mut all_candidates = Vec::with_capacity(self.edges.num_input_files); - // walking through the node's dep tree and filtering the versions along the way - for idx in nodes { - let mut candidates = all_versions.iter().collect::>(); - // remove all incompatible versions from the candidates list by checking the node - // and all its imports - if let Err(err) = self.retain_compatible_versions(idx, &mut candidates, offline) { - errors.push(err); - } else { - // found viable candidates, pick the most recent version that's already - // installed - let candidate = - if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) { - candidates[pos] - } else { - candidates.last().expect("not empty; qed.") - } - .clone(); - - // also store all possible candidates to optimize the set - all_candidates.push((idx, candidates.into_iter().collect::>())); - - versioned_nodes - .entry(candidate) - .or_insert_with(|| Vec::with_capacity(1)) - .push(idx); - } - } - - // detected multiple versions but there might still exist a single version that - // satisfies all sources - if versioned_nodes.len() > 1 { - versioned_nodes = Self::resolve_multiple_versions(all_candidates); - } - - if versioned_nodes.len() == 1 { - trace!( - "found exact solc version for all sources \"{}\"", - versioned_nodes.keys().next().unwrap() - ); - } - - if errors.is_empty() { - trace!("resolved {} versions {:?}", versioned_nodes.len(), versioned_nodes.keys()); - resulted_nodes.insert( - language, - versioned_nodes - .into_iter() - .map(|(v, nodes)| (Version::from(v), nodes)) - .collect(), - ); - } else { - error!("failed to resolve versions"); - return Err(SolcError::msg(errors.join("\n"))); - } - } - - Ok(resulted_nodes) - } - - /// Tries to find the "best" set of versions to nodes, See [Solc version - /// auto-detection](#solc-version-auto-detection) - /// - /// This is a bit inefficient but is fine, the max. number of versions is ~80 and there's - /// a high chance that the number of source files is <50, even for larger projects. - fn resolve_multiple_versions( - all_candidates: Vec<(usize, HashSet<&CompilerVersion>)>, - ) -> HashMap> { - // returns the intersection as sorted set of nodes - fn intersection<'a>( - mut sets: Vec<&HashSet<&'a CompilerVersion>>, - ) -> Vec<&'a CompilerVersion> { - if sets.is_empty() { - return Vec::new(); - } - - let mut result = sets.pop().cloned().expect("not empty; qed."); - if !sets.is_empty() { - result.retain(|item| sets.iter().all(|set| set.contains(item))); - } - - let mut v = result.into_iter().collect::>(); - v.sort_unstable(); - v - } - - /// returns the highest version that is installed - /// if the candidates set only contains uninstalled versions then this returns the highest - /// uninstalled version - fn remove_candidate(candidates: &mut Vec<&CompilerVersion>) -> CompilerVersion { - debug_assert!(!candidates.is_empty()); - - if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) { - candidates.remove(pos) - } else { - candidates.pop().expect("not empty; qed.") - } - .clone() - } - - let all_sets = all_candidates.iter().map(|(_, versions)| versions).collect(); - - // find all versions that satisfy all nodes - let mut intersection = intersection(all_sets); - if !intersection.is_empty() { - let exact_version = remove_candidate(&mut intersection); - let all_nodes = all_candidates.into_iter().map(|(node, _)| node).collect(); - trace!("resolved solc version compatible with all sources \"{}\"", exact_version); - return HashMap::from([(exact_version, all_nodes)]); - } - - // no version satisfies all nodes - let mut versioned_nodes: HashMap<_, _> = HashMap::new(); - - // try to minimize the set of versions, this is guaranteed to lead to `versioned_nodes.len() - // > 1` as no solc version exists that can satisfy all sources - for (node, versions) in all_candidates { - // need to sort them again - let mut versions = versions.into_iter().collect::>(); - versions.sort_unstable(); - - let candidate = if let Some(idx) = - versions.iter().rposition(|v| versioned_nodes.contains_key(*v)) - { - // use a version that's already in the set - versions.remove(idx).clone() - } else { - // use the highest version otherwise - remove_candidate(&mut versions) - }; - - versioned_nodes.entry(candidate).or_insert_with(|| Vec::with_capacity(1)).push(node); - } - - trace!( - "no solc version can satisfy all source files, resolved multiple versions \"{:?}\"", - versioned_nodes.keys() - ); - - versioned_nodes - } -} - -/// An iterator over a node and its dependencies -#[derive(Debug)] -pub struct NodesIter<'a, D> { - /// stack of nodes - stack: VecDeque, - visited: HashSet, - graph: &'a GraphEdges, -} - -impl<'a, D> NodesIter<'a, D> { - fn new(start: usize, graph: &'a GraphEdges) -> Self { - Self { stack: VecDeque::from([start]), visited: HashSet::new(), graph } - } -} - -impl<'a, D> Iterator for NodesIter<'a, D> { - type Item = usize; - fn next(&mut self) -> Option { - let node = self.stack.pop_front()?; - - if self.visited.insert(node) { - // push the node's direct dependencies to the stack if we haven't visited it already - self.stack.extend(self.graph.imported_nodes(node).iter().copied()); - } - Some(node) - } -} - -#[derive(Debug)] -pub struct Node { - /// path of the solidity file - path: PathBuf, - /// content of the solidity file - source: Source, - /// parsed data - pub data: D, -} - -impl Node { - /// Reads the content of the file and returns a [Node] containing relevant information - pub fn read(file: &Path) -> Result { - let source = Source::read(file).map_err(|err| { - let exists = err.path().exists(); - if !exists && err.path().is_symlink() { - SolcError::ResolveBadSymlink(err) - } else { - // This is an additional check useful on OS that have case-sensitive paths, See also - if !exists { - // check if there exists a file with different case - if let Some(existing_file) = find_case_sensitive_existing_file(file) { - SolcError::ResolveCaseSensitiveFileName { error: err, existing_file } - } else { - SolcError::Resolve(err) - } - } else { - SolcError::Resolve(err) - } - } - })?; - let data = D::parse(source.as_ref(), file)?; - Ok(Self { path: file.to_path_buf(), source, data }) - } - - pub fn content(&self) -> &str { - &self.source.content - } - - pub fn unpack(&self) -> (&PathBuf, &Source) { - (&self.path, &self.source) - } - - /// Checks that the file's version is even available. - /// - /// This returns an error if the file's version is invalid semver, or is not available such as - /// 0.8.20, if the highest available version is `0.8.19` - fn check_available_version( - &self, - all_versions: &[&CompilerVersion], - offline: bool, - ) -> std::result::Result<(), SourceVersionError> { - let Some(req) = self.data.version_req() else { return Ok(()) }; - - if !all_versions.iter().any(|v| req.matches(v.as_ref())) { - return if offline { - Err(SourceVersionError::NoMatchingVersionOffline(req.clone())) - } else { - Err(SourceVersionError::NoMatchingVersion(req.clone())) - }; - } - - Ok(()) - } -} - -/// Helper type for formatting a node -pub(crate) struct DisplayNode<'a, D> { - node: &'a Node, - root: &'a PathBuf, -} - -impl<'a, D: ParsedSource> fmt::Display for DisplayNode<'a, D> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let path = utils::source_name(&self.node.path, self.root); - write!(f, "{}", path.display())?; - if let Some(ref v) = self.node.data.version_req() { - write!(f, " {v}")?; - } - Ok(()) - } -} - -/// Errors thrown when checking the solc version of a file -#[derive(Debug, thiserror::Error)] -#[allow(dead_code)] -enum SourceVersionError { - #[error("Failed to parse solidity version {0}: {1}")] - InvalidVersion(String, SolcError), - #[error("No solc version exists that matches the version requirement: {0}")] - NoMatchingVersion(VersionReq), - #[error("No solc version installed that matches the version requirement: {0}")] - NoMatchingVersionOffline(VersionReq), -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_resolve_hardhat_dependency_graph() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample"); - let paths = ProjectPathsConfig::hardhat(&root).unwrap(); - - let graph = Graph::::resolve(&paths).unwrap(); - - assert_eq!(graph.edges.num_input_files, 1); - assert_eq!(graph.files().len(), 2); - - assert_eq!( - graph.files().clone(), - HashMap::from([ - (paths.sources.join("Greeter.sol"), 0), - (paths.root.join("node_modules/hardhat/console.sol"), 1), - ]) - ); - } - - #[test] - fn can_resolve_dapp_dependency_graph() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - let paths = ProjectPathsConfig::dapptools(&root).unwrap(); - - let graph = Graph::::resolve(&paths).unwrap(); - - assert_eq!(graph.edges.num_input_files, 2); - assert_eq!(graph.files().len(), 3); - assert_eq!( - graph.files().clone(), - HashMap::from([ - (paths.sources.join("Dapp.sol"), 0), - (paths.sources.join("Dapp.t.sol"), 1), - (paths.root.join("lib/ds-test/src/test.sol"), 2), - ]) - ); - - let dapp_test = graph.node(1); - assert_eq!(dapp_test.path, paths.sources.join("Dapp.t.sol")); - assert_eq!( - dapp_test.data.imports.iter().map(|i| i.data().path()).collect::>(), - vec![&PathBuf::from("ds-test/test.sol"), &PathBuf::from("./Dapp.sol")] - ); - assert_eq!(graph.imported_nodes(1).to_vec(), vec![2, 0]); - } - - #[test] - #[cfg(not(target_os = "windows"))] - fn can_print_dapp_sample_graph() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - let paths = ProjectPathsConfig::dapptools(&root).unwrap(); - let graph = Graph::::resolve(&paths).unwrap(); - let mut out = Vec::::new(); - tree::print(&graph, &Default::default(), &mut out).unwrap(); - - assert_eq!( - " -src/Dapp.sol >=0.6.6 -src/Dapp.t.sol >=0.6.6 -├── lib/ds-test/src/test.sol >=0.4.23 -└── src/Dapp.sol >=0.6.6 -" - .trim_start() - .as_bytes() - .to_vec(), - out - ); - } - - #[test] - #[cfg(not(target_os = "windows"))] - fn can_print_hardhat_sample_graph() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample"); - let paths = ProjectPathsConfig::hardhat(&root).unwrap(); - let graph = Graph::::resolve(&paths).unwrap(); - let mut out = Vec::::new(); - tree::print(&graph, &Default::default(), &mut out).unwrap(); - assert_eq!( - "contracts/Greeter.sol >=0.6.0 -└── node_modules/hardhat/console.sol >=0.4.22, <0.9.0 -", - String::from_utf8(out).unwrap() - ); - } - - #[test] - #[cfg(feature = "svm-solc")] - fn test_print_unresolved() { - let root = - Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/incompatible-pragmas"); - let paths = ProjectPathsConfig::dapptools(&root).unwrap(); - let graph = Graph::::resolve(&paths).unwrap(); - let Err(SolcError::Message(err)) = graph.get_input_node_versions( - false, - &Default::default(), - &crate::solc::SolcCompiler::AutoDetect, - ) else { - panic!("expected error"); - }; - - snapbox::assert_data_eq!( - err, - snapbox::str![[r#" -Found incompatible versions: -src/A.sol =0.8.25 imports: - src/B.sol - src/C.sol =0.7.0 -"#]] - ); - } - - #[cfg(target_os = "linux")] - #[test] - fn can_read_different_case() { - use crate::resolver::parse::SolData; - use std::fs::{self, create_dir_all}; - use utils::tempdir; - - let tmp_dir = tempdir("out").unwrap(); - let path = tmp_dir.path().join("forge-std"); - create_dir_all(&path).unwrap(); - let existing = path.join("Test.sol"); - let non_existing = path.join("test.sol"); - fs::write( - existing, - " -pragma solidity ^0.8.10; -contract A {} - ", - ) - .unwrap(); - - assert!(!non_existing.exists()); - - let found = crate::resolver::Node::::read(&non_existing).unwrap_err(); - matches!(found, SolcError::ResolveCaseSensitiveFileName { .. }); - } -} diff --git a/crates/compilers/src/resolver/parse.rs b/crates/compilers/src/resolver/parse.rs deleted file mode 100644 index 6200671a..00000000 --- a/crates/compilers/src/resolver/parse.rs +++ /dev/null @@ -1,329 +0,0 @@ -use foundry_compilers_core::utils; -use semver::VersionReq; -use solang_parser::pt::{ - ContractPart, ContractTy, FunctionAttribute, FunctionDefinition, Import, ImportPath, Loc, - SourceUnitPart, Visibility, -}; -use std::{ - ops::Range, - path::{Path, PathBuf}, -}; - -/// Represents various information about a solidity file parsed via [solang_parser] -#[derive(Clone, Debug)] -pub struct SolData { - pub license: Option>, - pub version: Option>, - pub experimental: Option>, - pub imports: Vec>, - pub version_req: Option, - pub libraries: Vec, - pub is_yul: bool, -} - -impl SolData { - #[allow(dead_code)] - pub fn fmt_version( - &self, - f: &mut W, - ) -> std::result::Result<(), std::fmt::Error> { - if let Some(ref version) = self.version { - write!(f, "({})", version.data)?; - } - Ok(()) - } - - /// Extracts the useful data from a solidity source - /// - /// This will attempt to parse the solidity AST and extract the imports and version pragma. If - /// parsing fails, we'll fall back to extract that info via regex - pub fn parse(content: &str, file: &Path) -> Self { - let is_yul = file.extension().map_or(false, |ext| ext == "yul"); - let mut version = None; - let mut experimental = None; - let mut imports = Vec::>::new(); - let mut libraries = Vec::new(); - - match solang_parser::parse(content, 0) { - Ok((units, _)) => { - for unit in units.0 { - match unit { - SourceUnitPart::PragmaDirective(loc, Some(pragma), Some(value)) => { - if pragma.name == "solidity" { - // we're only interested in the solidity version pragma - version = Some(SolDataUnit::from_loc(value.string.clone(), loc)); - } - - if pragma.name == "experimental" { - experimental = Some(SolDataUnit::from_loc(value.string, loc)); - } - } - SourceUnitPart::ImportDirective(import) => { - let (import, ids, loc) = match import { - Import::Plain(s, l) => (s, vec![], l), - Import::GlobalSymbol(s, i, l) => (s, vec![(i, None)], l), - Import::Rename(s, i, l) => (s, i, l), - }; - let import = match import { - ImportPath::Filename(s) => s.string.clone(), - ImportPath::Path(p) => p.to_string(), - }; - let sol_import = SolImport::new(PathBuf::from(import)).set_aliases( - ids.into_iter() - .map(|(id, alias)| match alias { - Some(al) => SolImportAlias::Contract(al.name, id.name), - None => SolImportAlias::File(id.name), - }) - .collect(), - ); - imports.push(SolDataUnit::from_loc(sol_import, loc)); - } - SourceUnitPart::ContractDefinition(def) => { - let functions = def - .parts - .into_iter() - .filter_map(|part| match part { - ContractPart::FunctionDefinition(f) => Some(*f), - _ => None, - }) - .collect(); - if let ContractTy::Library(_) = def.ty { - libraries.push(SolLibrary { functions }); - } - } - _ => {} - } - } - } - Err(err) => { - trace!( - "failed to parse \"{}\" ast: \"{:?}\". Falling back to regex to extract data", - file.display(), - err - ); - version = utils::capture_outer_and_inner( - content, - &utils::RE_SOL_PRAGMA_VERSION, - &["version"], - ) - .first() - .map(|(cap, name)| SolDataUnit::new(name.as_str().to_owned(), cap.range())); - imports = capture_imports(content); - } - }; - let license = content.lines().next().and_then(|line| { - utils::capture_outer_and_inner( - line, - &utils::RE_SOL_SDPX_LICENSE_IDENTIFIER, - &["license"], - ) - .first() - .map(|(cap, l)| SolDataUnit::new(l.as_str().to_owned(), cap.range())) - }); - let version_req = version.as_ref().and_then(|v| Self::parse_version_req(v.data()).ok()); - - Self { version_req, version, experimental, imports, license, libraries, is_yul } - } - - /// Returns the corresponding SemVer version requirement for the solidity version. - /// - /// Note: This is a workaround for the fact that `VersionReq::parse` does not support whitespace - /// separators and requires comma separated operators. See [VersionReq]. - pub fn parse_version_req(version: &str) -> Result { - let version = version.replace(' ', ","); - - // Somehow, Solidity semver without an operator is considered to be "exact", - // but lack of operator automatically marks the operator as Caret, so we need - // to manually patch it? :shrug: - let exact = !matches!(&version[0..1], "*" | "^" | "=" | ">" | "<" | "~"); - let mut version = VersionReq::parse(&version)?; - if exact { - version.comparators[0].op = semver::Op::Exact; - } - - Ok(version) - } -} - -#[derive(Clone, Debug)] -pub struct SolImport { - path: PathBuf, - aliases: Vec, -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum SolImportAlias { - File(String), - Contract(String, String), -} - -impl SolImport { - pub fn new(path: PathBuf) -> Self { - Self { path, aliases: vec![] } - } - - pub fn path(&self) -> &PathBuf { - &self.path - } - - pub fn aliases(&self) -> &Vec { - &self.aliases - } - - fn set_aliases(mut self, aliases: Vec) -> Self { - self.aliases = aliases; - self - } -} - -/// Minimal representation of a contract inside a solidity file -#[derive(Clone, Debug)] -pub struct SolLibrary { - pub functions: Vec, -} - -impl SolLibrary { - /// Returns `true` if all functions of this library will be inlined. - /// - /// This checks if all functions are either internal or private, because internal functions can - /// only be accessed from within the current contract or contracts deriving from it. They cannot - /// be accessed externally. Since they are not exposed to the outside through the contract’s - /// ABI, they can take parameters of internal types like mappings or storage references. - /// - /// See also - pub fn is_inlined(&self) -> bool { - for f in self.functions.iter() { - for attr in f.attributes.iter() { - if let FunctionAttribute::Visibility( - Visibility::External(_) | Visibility::Public(_), - ) = attr - { - return false; - } - } - } - true - } -} - -/// Represents an item in a solidity file with its location in the file -#[derive(Clone, Debug)] -pub struct SolDataUnit { - loc: Range, - data: T, -} - -/// Solidity Data Unit decorated with its location within the file -impl SolDataUnit { - pub fn new(data: T, loc: Range) -> Self { - Self { data, loc } - } - - pub fn from_loc(data: T, loc: Loc) -> Self { - Self { - data, - loc: match loc { - Loc::File(_, start, end) => Range { start, end: end + 1 }, - _ => Range { start: 0, end: 0 }, - }, - } - } - - /// Returns the underlying data for the unit - pub fn data(&self) -> &T { - &self.data - } - - /// Returns the location of the given data unit - pub fn loc(&self) -> Range { - self.loc.clone() - } - - /// Returns the location of the given data unit adjusted by an offset. - /// Used to determine new position of the unit within the file after - /// content manipulation. - pub fn loc_by_offset(&self, offset: isize) -> Range { - utils::range_by_offset(&self.loc, offset) - } -} - -/// Capture the import statement information together with aliases -pub fn capture_imports(content: &str) -> Vec> { - let mut imports = vec![]; - for cap in utils::RE_SOL_IMPORT.captures_iter(content) { - if let Some(name_match) = ["p1", "p2", "p3", "p4"].iter().find_map(|name| cap.name(name)) { - let statement_match = cap.get(0).unwrap(); - let mut aliases = vec![]; - for alias_cap in utils::RE_SOL_IMPORT_ALIAS.captures_iter(statement_match.as_str()) { - if let Some(alias) = alias_cap.name("alias") { - let alias = alias.as_str().to_owned(); - let import_alias = match alias_cap.name("target") { - Some(target) => SolImportAlias::Contract(alias, target.as_str().to_owned()), - None => SolImportAlias::File(alias), - }; - aliases.push(import_alias); - } - } - let sol_import = - SolImport::new(PathBuf::from(name_match.as_str())).set_aliases(aliases); - imports.push(SolDataUnit::new(sol_import, statement_match.range())); - } - } - imports -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_capture_curly_imports() { - let content = r#" -import { T } from "../Test.sol"; -import {ReentrancyGuard} from "@openzeppelin/contracts/utils/ReentrancyGuard.sol"; -import {DsTest} from "ds-test/test.sol"; -"#; - - let captured_imports = - capture_imports(content).into_iter().map(|s| s.data.path).collect::>(); - - let expected = - utils::find_import_paths(content).map(|m| m.as_str().into()).collect::>(); - - assert_eq!(captured_imports, expected); - - assert_eq!( - captured_imports, - vec![ - PathBuf::from("../Test.sol"), - "@openzeppelin/contracts/utils/ReentrancyGuard.sol".into(), - "ds-test/test.sol".into(), - ] - ); - } - - #[test] - fn cap_capture_aliases() { - let content = r#" -import * as T from "./Test.sol"; -import { DsTest as Test } from "ds-test/test.sol"; -import "ds-test/test.sol" as Test; -import { FloatMath as Math, Math as FloatMath } from "./Math.sol"; -"#; - - let caputred_imports = - capture_imports(content).into_iter().map(|s| s.data.aliases).collect::>(); - assert_eq!( - caputred_imports, - vec![ - vec![SolImportAlias::File("T".into())], - vec![SolImportAlias::Contract("Test".into(), "DsTest".into())], - vec![SolImportAlias::File("Test".into())], - vec![ - SolImportAlias::Contract("Math".into(), "FloatMath".into()), - SolImportAlias::Contract("FloatMath".into(), "Math".into()), - ], - ] - ); - } -} diff --git a/crates/compilers/src/resolver/tree.rs b/crates/compilers/src/resolver/tree.rs deleted file mode 100644 index ca730985..00000000 --- a/crates/compilers/src/resolver/tree.rs +++ /dev/null @@ -1,173 +0,0 @@ -use crate::{compilers::ParsedSource, Graph}; -use std::{collections::HashSet, io, io::Write, str::FromStr}; - -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub enum Charset { - // when operating in a console on windows non-UTF-8 byte sequences are not supported on - // stdout, See also [`StdoutLock`] - #[cfg_attr(not(target_os = "windows"), default)] - Utf8, - #[cfg_attr(target_os = "windows", default)] - Ascii, -} - -impl FromStr for Charset { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "utf8" => Ok(Self::Utf8), - "ascii" => Ok(Self::Ascii), - s => Err(format!("invalid charset: {s}")), - } - } -} - -/// Options to configure formatting -#[derive(Clone, Debug, Default)] -pub struct TreeOptions { - /// The style of characters to use. - pub charset: Charset, - /// If `true`, duplicate imports will be repeated. - /// If `false`, duplicates are suffixed with `(*)`, and their imports - /// won't be shown. - pub no_dedupe: bool, -} - -/// Internal helper type for symbols -struct Symbols { - down: &'static str, - tee: &'static str, - ell: &'static str, - right: &'static str, -} - -static UTF8_SYMBOLS: Symbols = Symbols { down: "│", tee: "├", ell: "└", right: "─" }; - -static ASCII_SYMBOLS: Symbols = Symbols { down: "|", tee: "|", ell: "`", right: "-" }; - -pub fn print( - graph: &Graph, - opts: &TreeOptions, - out: &mut dyn Write, -) -> io::Result<()> { - let symbols = match opts.charset { - Charset::Utf8 => &UTF8_SYMBOLS, - Charset::Ascii => &ASCII_SYMBOLS, - }; - - // used to determine whether to display `(*)` - let mut visited_imports = HashSet::new(); - - // A stack of bools used to determine where | symbols should appear - // when printing a line. - let mut levels_continue = Vec::new(); - // used to detect dependency cycles when --no-dedupe is used. - // contains a `Node` for each level. - let mut write_stack = Vec::new(); - - for (node_index, _) in graph.input_nodes().enumerate() { - print_node( - graph, - node_index, - symbols, - opts.no_dedupe, - &mut visited_imports, - &mut levels_continue, - &mut write_stack, - out, - )?; - } - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -fn print_node( - graph: &Graph, - node_index: usize, - symbols: &Symbols, - no_dedupe: bool, - visited_imports: &mut HashSet, - levels_continue: &mut Vec, - write_stack: &mut Vec, - out: &mut dyn Write, -) -> io::Result<()> { - let new_node = no_dedupe || visited_imports.insert(node_index); - - if let Some((last_continues, rest)) = levels_continue.split_last() { - for continues in rest { - let c = if *continues { symbols.down } else { " " }; - write!(out, "{c} ")?; - } - - let c = if *last_continues { symbols.tee } else { symbols.ell }; - write!(out, "{0}{1}{1} ", c, symbols.right)?; - } - - let in_cycle = write_stack.contains(&node_index); - // if this node does not have any outgoing edges, don't include the (*) - // since there isn't really anything "deduplicated", and it generally just - // adds noise. - let has_deps = graph.has_outgoing_edges(node_index); - let star = if (new_node && !in_cycle) || !has_deps { "" } else { " (*)" }; - - writeln!(out, "{}{star}", graph.display_node(node_index))?; - - if !new_node || in_cycle { - return Ok(()); - } - write_stack.push(node_index); - - print_imports( - graph, - node_index, - symbols, - no_dedupe, - visited_imports, - levels_continue, - write_stack, - out, - )?; - - write_stack.pop(); - - Ok(()) -} - -/// Prints all the imports of a node -#[allow(clippy::too_many_arguments)] -fn print_imports( - graph: &Graph, - node_index: usize, - symbols: &Symbols, - no_dedupe: bool, - visited_imports: &mut HashSet, - levels_continue: &mut Vec, - write_stack: &mut Vec, - out: &mut dyn Write, -) -> io::Result<()> { - let imports = graph.imported_nodes(node_index); - if imports.is_empty() { - return Ok(()); - } - - let mut iter = imports.iter().peekable(); - - while let Some(import) = iter.next() { - levels_continue.push(iter.peek().is_some()); - print_node( - graph, - *import, - symbols, - no_dedupe, - visited_imports, - levels_continue, - write_stack, - out, - )?; - levels_continue.pop(); - } - - Ok(()) -} diff --git a/crates/compilers/src/zksync/artifact_output/zk.rs b/crates/compilers/src/zksync/artifact_output/zk.rs deleted file mode 100644 index 395c7d57..00000000 --- a/crates/compilers/src/zksync/artifact_output/zk.rs +++ /dev/null @@ -1,272 +0,0 @@ -use crate::{ - artifact_output::{ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, OutputContext}, - artifacts::{DevDoc, SourceFile, StorageLayout, UserDoc}, - compile::output::sources::VersionedSourceFiles, - config::ProjectPathsConfig, - error::{Result, SolcIoError}, - zksync::compile::output::contracts::VersionedContracts, -}; -use alloy_json_abi::JsonAbi; -use foundry_compilers_artifacts::{ - solc::{ - CompactBytecode, CompactContract, CompactContractBytecode, CompactContractBytecodeCow, - CompactDeployedBytecode, - }, - zksolc::contract::Contract, - SolcLanguage, -}; -use path_slash::PathBufExt; -use serde::{Deserialize, Serialize}; -use std::{ - borrow::Cow, - collections::{BTreeMap, HashSet}, - fs, - path::Path, -}; - -mod bytecode; -pub use bytecode::ZkArtifactBytecode; - -#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct ZkContractArtifact { - pub abi: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub bytecode: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub assembly: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub metadata: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub storage_layout: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub userdoc: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub devdoc: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ir_optimized: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub hash: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub factory_dependencies: Option>, - /// The identifier of the source file - #[serde(default, skip_serializing_if = "Option::is_none")] - pub id: Option, -} - -impl ZkContractArtifact { - pub fn missing_libraries(&self) -> Option<&Vec> { - self.bytecode.as_ref().map(|bc| &bc.missing_libraries) - } -} - -// CompactContract variants -// TODO: for zkEvm, the distinction between bytecode and deployed_bytecode makes little sense, -// and there some fields that the ouptut doesn't provide (e.g: source_map) -// However, we implement these because we get the Artifact trait and can reuse lots of -// the crate's helpers without needing to duplicate everything. Maybe there's a way -// we can get all these without having to add the same bytecode twice on each struct. -// Ideally the Artifacts trait would not be coupled to a specific Contract type -impl<'a> From<&'a ZkContractArtifact> for CompactContractBytecodeCow<'a> { - fn from(artifact: &'a ZkContractArtifact) -> Self { - // TODO: artifact.abi might have None, we need to get this field from solc_metadata - CompactContractBytecodeCow { - abi: artifact.abi.as_ref().map(Cow::Borrowed), - bytecode: artifact.bytecode.clone().map(|b| Cow::Owned(CompactBytecode::from(b))), - deployed_bytecode: artifact - .bytecode - .clone() - .map(|b| Cow::Owned(CompactDeployedBytecode::from(b))), - } - } -} - -impl From for CompactContractBytecode { - fn from(c: ZkContractArtifact) -> Self { - Self { - abi: c.abi.map(Into::into), - deployed_bytecode: c.bytecode.clone().map(|b| b.into()), - bytecode: c.bytecode.clone().map(|b| b.into()), - } - } -} - -impl From for CompactContract { - fn from(c: ZkContractArtifact) -> Self { - // TODO: c.abi might have None, we need to get this field from solc_metadata - Self { - bin: c.bytecode.clone().map(|b| b.object), - bin_runtime: c.bytecode.clone().map(|b| b.object), - abi: c.abi, - } - } -} - -#[derive(Debug, Copy, Clone, Eq, PartialEq, Default)] -pub struct ZkArtifactOutput(); - -impl ArtifactOutput for ZkArtifactOutput { - type Artifact = ZkContractArtifact; - - fn contract_to_artifact( - &self, - _file: &Path, - _name: &str, - _contract: foundry_compilers_artifacts::Contract, - _source_file: Option<&SourceFile>, - ) -> Self::Artifact { - panic!("Unsupported use zksync_contract_to_artifact instead"); - } - - fn standalone_source_file_to_artifact( - &self, - _path: &Path, - _file: &crate::VersionedSourceFile, - ) -> Option { - None - } -} - -impl ZkArtifactOutput { - fn zksync_contract_to_artifact( - &self, - _file: &Path, - _name: &str, - contract: Contract, - source_file: Option<&SourceFile>, - ) -> ZkContractArtifact { - let Contract { - abi, - metadata, - userdoc, - devdoc, - storage_layout, - eravm, - ir_optimized, - hash, - factory_dependencies, - missing_libraries, - } = contract; - - let (bytecode, assembly) = - eravm.map(|eravm| (eravm.bytecode, eravm.assembly)).unwrap_or_else(|| (None, None)); - let bytecode = bytecode.map(|object| ZkArtifactBytecode { object, missing_libraries }); - - ZkContractArtifact { - abi, - hash, - factory_dependencies, - storage_layout: Some(storage_layout), - bytecode, - assembly, - metadata, - userdoc: Some(userdoc), - devdoc: Some(devdoc), - ir_optimized, - id: source_file.as_ref().map(|s| s.id), - } - } - - pub fn zksync_on_output( - &self, - contracts: &VersionedContracts, - sources: &VersionedSourceFiles, - layout: &ProjectPathsConfig, - ctx: OutputContext<'_>, - ) -> Result> { - let mut artifacts = self.zksync_output_to_artifacts(contracts, sources, ctx, layout); - fs::create_dir_all(&layout.artifacts).map_err(|err| { - error!(dir=?layout.artifacts, "Failed to create artifacts folder"); - SolcIoError::new(err, &layout.artifacts) - })?; - - artifacts.join_all(&layout.artifacts); - artifacts.write_all()?; - - Ok(artifacts) - } - - /// Convert the compiler output into a set of artifacts - /// - /// **Note:** This does only convert, but _NOT_ write the artifacts to disk, See - /// [`Self::on_output()`] - pub fn zksync_output_to_artifacts( - &self, - contracts: &VersionedContracts, - sources: &VersionedSourceFiles, - ctx: OutputContext<'_>, - layout: &ProjectPathsConfig, - ) -> Artifacts { - let mut artifacts = ArtifactsMap::new(); - - // this tracks all the `SourceFile`s that we successfully mapped to a contract - let mut non_standalone_sources = HashSet::new(); - - // prepopulate taken paths set with cached artifacts - let mut taken_paths_lowercase = ctx - .existing_artifacts - .values() - .flat_map(|artifacts| artifacts.values().flat_map(|artifacts| artifacts.values())) - .map(|a| a.path.to_slash_lossy().to_lowercase()) - .collect::>(); - - let mut files = contracts.keys().collect::>(); - // Iterate starting with top-most files to ensure that they get the shortest paths. - files.sort_by(|file1, file2| { - (file1.components().count(), file1).cmp(&(file2.components().count(), file2)) - }); - for file in files { - for (name, versioned_contracts) in &contracts[file] { - for contract in versioned_contracts { - // track `SourceFile`s that can be mapped to contracts - let source_file = sources.find_file_and_version(file, &contract.version); - - if let Some(source) = source_file { - non_standalone_sources.insert((source.id, &contract.version)); - } - - let artifact_path = Self::get_artifact_path( - &ctx, - &taken_paths_lowercase, - file, - name, - layout.artifacts.as_path(), - &contract.version, - versioned_contracts.len() > 1, - ); - - taken_paths_lowercase.insert(artifact_path.to_slash_lossy().to_lowercase()); - - trace!( - "use artifact file {:?} for contract file {} {}", - artifact_path, - file.display(), - contract.version - ); - - let artifact = self.zksync_contract_to_artifact( - file, - name, - contract.contract.clone(), - source_file, - ); - - let artifact = ArtifactFile { - artifact, - file: artifact_path, - version: contract.version.clone(), - build_id: contract.build_id.clone(), - }; - - artifacts - .entry(file.to_path_buf()) - .or_default() - .entry(name.to_string()) - .or_default() - .push(artifact); - } - } - } - Artifacts(artifacts) - } -} diff --git a/crates/compilers/src/zksync/compile/mod.rs b/crates/compilers/src/zksync/compile/mod.rs deleted file mode 100644 index 9e53532e..00000000 --- a/crates/compilers/src/zksync/compile/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod output; -pub mod project; diff --git a/crates/compilers/src/zksync/compile/output/contracts.rs b/crates/compilers/src/zksync/compile/output/contracts.rs deleted file mode 100644 index d8ca8e53..00000000 --- a/crates/compilers/src/zksync/compile/output/contracts.rs +++ /dev/null @@ -1,239 +0,0 @@ -use crate::artifacts::FileToContractsMap; -use foundry_compilers_artifacts::{solc::CompactContractRef, zksolc::contract::Contract}; -use semver::Version; -use serde::{Deserialize, Serialize}; -use std::{ - collections::BTreeMap, - ops::Deref, - path::{Path, PathBuf}, -}; - -/// file -> [(contract name -> Contract + solc version)] -#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)] -#[serde(transparent)] -pub struct VersionedContracts(pub FileToContractsMap>); - -impl VersionedContracts { - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - #[cfg(windows)] - { - use path_slash::PathExt; - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(path, files)| (Path::new(&path).to_slash_lossy().to_string(), files)) - .collect() - } - } - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - pub fn len(&self) -> usize { - self.0.len() - } - - /// Returns an iterator over all files - pub fn files(&self) -> impl Iterator + '_ { - self.0.keys() - } - - /// Finds the _first_ contract with the given name - pub fn find_first(&self, contract: impl AsRef) -> Option> { - let contract_name = contract.as_ref(); - self.contracts().find_map(|(name, contract)| { - (name == contract_name).then(|| CompactContractRef::from(contract)) - }) - } - - /// Finds the contract with matching path and name - pub fn find( - &self, - path: impl AsRef, - contract: impl AsRef, - ) -> Option> { - let contract_path = path.as_ref(); - let contract_name = contract.as_ref(); - self.contracts_with_files().find_map(|(path, name, contract)| { - (path == contract_path && name == contract_name) - .then(|| CompactContractRef::from(contract)) - }) - } - - /// Removes the _first_ contract with the given name from the set - pub fn remove_first(&mut self, contract: impl AsRef) -> Option { - let contract_name = contract.as_ref(); - self.0.values_mut().find_map(|all_contracts| { - let mut contract = None; - if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { - if !contracts.is_empty() { - contract = Some(contracts.remove(0).contract); - } - if !contracts.is_empty() { - all_contracts.insert(c, contracts); - } - } - contract - }) - } - - /// Removes the contract with matching path and name - pub fn remove( - &mut self, - path: impl AsRef, - contract: impl AsRef, - ) -> Option { - let contract_name = contract.as_ref(); - let (key, mut all_contracts) = self.0.remove_entry(path.as_ref())?; - let mut contract = None; - if let Some((c, mut contracts)) = all_contracts.remove_entry(contract_name) { - if !contracts.is_empty() { - contract = Some(contracts.remove(0).contract); - } - if !contracts.is_empty() { - all_contracts.insert(c, contracts); - } - } - - if !all_contracts.is_empty() { - self.0.insert(key, all_contracts); - } - contract - } - - /// Given the contract file's path and the contract's name, tries to return the contract's - /// bytecode, runtime bytecode, and ABI. - pub fn get( - &self, - path: impl AsRef, - contract: impl AsRef, - ) -> Option> { - let contract = contract.as_ref(); - self.0 - .get(path.as_ref()) - .and_then(|contracts| { - contracts.get(contract).and_then(|c| c.first().map(|c| &c.contract)) - }) - .map(CompactContractRef::from) - } - - /// Returns an iterator over all contracts and their names. - pub fn contracts(&self) -> impl Iterator { - self.0 - .values() - .flat_map(|c| c.iter().flat_map(|(name, c)| c.iter().map(move |c| (name, &c.contract)))) - } - - /// Returns an iterator over (`file`, `name`, `Contract`). - pub fn contracts_with_files(&self) -> impl Iterator { - self.0.iter().flat_map(|(file, contracts)| { - contracts - .iter() - .flat_map(move |(name, c)| c.iter().map(move |c| (file, name, &c.contract))) - }) - } - - /// Returns an iterator over (`file`, `name`, `Contract`, `Version`). - pub fn contracts_with_files_and_version( - &self, - ) -> impl Iterator { - self.0.iter().flat_map(|(file, contracts)| { - contracts.iter().flat_map(move |(name, c)| { - c.iter().map(move |c| (file, name, &c.contract, &c.version)) - }) - }) - } - - /// Returns an iterator over all contracts and their source names. - pub fn into_contracts(self) -> impl Iterator { - self.0.into_values().flat_map(|c| { - c.into_iter() - .flat_map(|(name, c)| c.into_iter().map(move |c| (name.clone(), c.contract))) - }) - } - - /// Returns an iterator over (`file`, `name`, `Contract`) - pub fn into_contracts_with_files(self) -> impl Iterator { - self.0.into_iter().flat_map(|(file, contracts)| { - contracts.into_iter().flat_map(move |(name, c)| { - let file = file.clone(); - c.into_iter().map(move |c| (file.clone(), name.clone(), c.contract)) - }) - }) - } - - /// Returns an iterator over (`file`, `name`, `Contract`, `Version`) - pub fn into_contracts_with_files_and_version( - self, - ) -> impl Iterator { - self.0.into_iter().flat_map(|(file, contracts)| { - contracts.into_iter().flat_map(move |(name, c)| { - let file = file.clone(); - c.into_iter().map(move |c| (file.clone(), name.clone(), c.contract, c.version)) - }) - }) - } - - /// Sets the contract's file paths to `root` adjoined to `self.file`. - pub fn join_all(&mut self, root: impl AsRef) -> &mut Self { - let root = root.as_ref(); - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(contract_path, contracts)| (root.join(contract_path), contracts)) - .collect(); - self - } - - /// Removes `base` from all contract paths - pub fn strip_prefix_all(&mut self, base: impl AsRef) -> &mut Self { - let base = base.as_ref(); - self.0 = std::mem::take(&mut self.0) - .into_iter() - .map(|(contract_path, contracts)| { - ( - contract_path.strip_prefix(base).unwrap_or(&contract_path).to_path_buf(), - contracts, - ) - }) - .collect(); - self - } -} - -impl AsRef>> for VersionedContracts { - fn as_ref(&self) -> &FileToContractsMap> { - &self.0 - } -} - -impl AsMut>> for VersionedContracts { - fn as_mut(&mut self) -> &mut FileToContractsMap> { - &mut self.0 - } -} - -impl Deref for VersionedContracts { - type Target = FileToContractsMap>; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl IntoIterator for VersionedContracts { - type Item = (PathBuf, BTreeMap>); - type IntoIter = - std::collections::btree_map::IntoIter>>; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -/// A contract and the compiler version used to compile it -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct VersionedContract { - pub contract: Contract, - pub version: Version, - pub build_id: String, -} diff --git a/crates/compilers/src/zksync/compile/output/mod.rs b/crates/compilers/src/zksync/compile/output/mod.rs deleted file mode 100644 index 74d502e5..00000000 --- a/crates/compilers/src/zksync/compile/output/mod.rs +++ /dev/null @@ -1,589 +0,0 @@ -use crate::{ - artifact_output::{ArtifactId, Artifacts}, - artifacts::error::Severity, - buildinfo::RawBuildInfo, - compile::output::{ - info::ContractInfoRef, - sources::{VersionedSourceFile, VersionedSourceFiles}, - }, - output::Builds, - zksync::{ - artifact_output::zk::{ZkArtifactOutput, ZkContractArtifact}, - compile::output::contracts::{VersionedContract, VersionedContracts}, - }, - ArtifactOutput, -}; -use foundry_compilers_artifacts::{ - solc::CompactContractRef, - zksolc::{contract::Contract, error::Error, CompilerOutput}, - SolcLanguage, -}; -use foundry_compilers_core::error::{SolcError, SolcIoError}; -use semver::Version; -use serde::{Deserialize, Serialize}; -use std::{ - fmt, - path::{Path, PathBuf}, -}; -use yansi::Paint; - -pub mod contracts; - -#[derive(Clone, Debug)] -pub struct ProjectCompileOutput { - /// contains the aggregated `CompilerOutput` - pub compiler_output: AggregatedCompilerOutput, - /// all artifact files from `output` that were freshly compiled and written - pub compiled_artifacts: Artifacts, - /// All artifacts that were read from cache - pub cached_artifacts: Artifacts, - /// errors that should be omitted - pub ignored_error_codes: Vec, - /// paths that should be omitted - pub ignored_file_paths: Vec, - /// set minimum level of severity that is treated as an error - pub compiler_severity_filter: Severity, - /// all build infos that were just compiled - pub builds: Builds, -} - -impl ProjectCompileOutput { - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - self.compiler_output.slash_paths(); - self.compiled_artifacts.slash_paths(); - self.cached_artifacts.slash_paths(); - } - - /// All artifacts together with their contract file name and name `:`. - /// - /// This returns a chained iterator of both cached and recompiled contract artifacts. - pub fn artifact_ids(&self) -> impl Iterator { - let Self { cached_artifacts, compiled_artifacts, .. } = self; - cached_artifacts - .artifacts::() - .chain(compiled_artifacts.artifacts::()) - } - - /// All artifacts together with their contract file name and name `:` - /// - /// This returns a chained iterator of both cached and recompiled contract artifacts - pub fn into_artifacts(self) -> impl Iterator { - let Self { cached_artifacts, compiled_artifacts, .. } = self; - cached_artifacts - .into_artifacts::() - .chain(compiled_artifacts.into_artifacts::()) - } - - pub fn with_stripped_file_prefixes(mut self, base: impl AsRef) -> Self { - let base = base.as_ref(); - self.cached_artifacts = self.cached_artifacts.into_stripped_file_prefixes(base); - self.compiled_artifacts = self.compiled_artifacts.into_stripped_file_prefixes(base); - self.compiler_output.strip_prefix_all(base); - self - } - - /// Returns whether this type does not contain compiled contracts. - pub fn is_unchanged(&self) -> bool { - self.compiler_output.is_unchanged() - } - - /// Returns whether any errors were emitted by the compiler. - pub fn has_compiler_errors(&self) -> bool { - self.compiler_output.has_error( - &self.ignored_error_codes, - &self.ignored_file_paths, - &self.compiler_severity_filter, - ) - } - - /// Panics if any errors were emitted by the compiler. - #[track_caller] - pub fn assert_success(&self) { - assert!(!self.has_compiler_errors(), "\n{self}\n"); - } - - pub fn versioned_artifacts( - &self, - ) -> impl Iterator { - self.cached_artifacts - .artifact_files() - .chain(self.compiled_artifacts.artifact_files()) - .filter_map(|artifact| { - ZkArtifactOutput::contract_name(&artifact.file) - .map(|name| (name, (&artifact.artifact, &artifact.version))) - }) - } - - pub fn artifacts(&self) -> impl Iterator { - self.versioned_artifacts().map(|(name, (artifact, _))| (name, artifact)) - } - - pub fn output(&self) -> &AggregatedCompilerOutput { - &self.compiler_output - } - - pub fn into_output(self) -> AggregatedCompilerOutput { - self.compiler_output - } - - /// Finds the artifact with matching path and name - pub fn find(&self, path: &Path, name: &str) -> Option<&ZkContractArtifact> { - if let artifact @ Some(_) = self.compiled_artifacts.find(path, name) { - return artifact; - } - self.cached_artifacts.find(path, name) - } - - /// Finds the first contract with the given name - pub fn find_first(&self, name: &str) -> Option<&ZkContractArtifact> { - if let artifact @ Some(_) = self.compiled_artifacts.find_first(name) { - return artifact; - } - self.cached_artifacts.find_first(name) - } - - /// Returns the set of `Artifacts` that were cached and got reused during - /// [`crate::Project::compile()`] - pub fn cached_artifacts(&self) -> &Artifacts { - &self.cached_artifacts - } - - /// Returns the set of `Artifacts` that were compiled with `zksolc` in - /// [`crate::Project::compile()`] - pub fn compiled_artifacts(&self) -> &Artifacts { - &self.compiled_artifacts - } - - /// Removes the artifact with matching path and name - pub fn remove(&mut self, path: &Path, name: &str) -> Option { - if let artifact @ Some(_) = self.compiled_artifacts.remove(path, name) { - return artifact; - } - self.cached_artifacts.remove(path, name) - } - - /// Removes the _first_ contract with the given name from the set - pub fn remove_first(&mut self, contract_name: impl AsRef) -> Option { - let contract_name = contract_name.as_ref(); - if let artifact @ Some(_) = self.compiled_artifacts.remove_first(contract_name) { - return artifact; - } - self.cached_artifacts.remove_first(contract_name) - } - - /// Removes the contract with matching path and name using the `:` pattern - /// where `path` is optional. - /// - /// If the `path` segment is `None`, then the first matching `Contract` is returned, see - /// [Self::remove_first] - pub fn remove_contract<'a>( - &mut self, - info: impl Into>, - ) -> Option { - let ContractInfoRef { path, name } = info.into(); - if let Some(path) = path { - self.remove(path[..].as_ref(), &name) - } else { - self.remove_first(&name) - } - } -} - -impl fmt::Display for ProjectCompileOutput { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.compiler_output.is_unchanged() { - f.write_str("Nothing to compile") - } else { - self.compiler_output - .diagnostics( - &self.ignored_error_codes, - &self.ignored_file_paths, - self.compiler_severity_filter, - ) - .fmt(f) - } - } -} - -/// The aggregated output of (multiple) compile jobs -/// -/// This is effectively a solc version aware `CompilerOutput` -#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] -pub struct AggregatedCompilerOutput { - /// all errors from all `CompilerOutput` - pub errors: Vec, - /// All source files combined with the solc version used to compile them - pub sources: VersionedSourceFiles, - /// All compiled contracts combined with the solc version used to compile them - pub contracts: VersionedContracts, - // All the `BuildInfo`s of zksolc invocations. - pub build_infos: Vec>, -} - -impl AggregatedCompilerOutput { - /// Converts all `\\` separators in _all_ paths to `/` - pub fn slash_paths(&mut self) { - self.sources.slash_paths(); - self.contracts.slash_paths(); - } - - /// Whether the output contains a compiler error - /// - /// This adheres to the given `compiler_severity_filter` and also considers [Error] with the - /// given [Severity] as errors. For example [Severity::Warning] will consider [Error]s with - /// [Severity::Warning] and [Severity::Error] as errors. - pub fn has_error( - &self, - ignored_error_codes: &[u64], - ignored_file_paths: &[PathBuf], - compiler_severity_filter: &Severity, - ) -> bool { - self.errors.iter().any(|err| { - if err.is_error() { - // [Severity::Error] is always treated as an error - return true; - } - // check if the filter is set to something higher than the error's severity - if compiler_severity_filter.ge(&err.severity) { - if compiler_severity_filter.is_warning() { - // skip ignored error codes and file path from warnings - return self.has_warning(ignored_error_codes, ignored_file_paths); - } - return true; - } - false - }) - } - - /// Checks if there are any compiler warnings that are not ignored by the specified error codes - /// and file paths. - pub fn has_warning(&self, ignored_error_codes: &[u64], ignored_file_paths: &[PathBuf]) -> bool { - self.errors - .iter() - .any(|error| !self.should_ignore(ignored_error_codes, ignored_file_paths, error)) - } - - pub fn should_ignore( - &self, - ignored_error_codes: &[u64], - ignored_file_paths: &[PathBuf], - error: &Error, - ) -> bool { - if !error.is_warning() { - return false; - } - - let mut ignore = false; - - if let Some(code) = error.error_code { - ignore |= ignored_error_codes.contains(&code); - if let Some(loc) = error.source_location.as_ref() { - let path = Path::new(&loc.file); - ignore |= - ignored_file_paths.iter().any(|ignored_path| path.starts_with(ignored_path)); - - // we ignore spdx and contract size warnings in test - // files. if we are looking at one of these warnings - // from a test file we skip - ignore |= self.is_test(path) && (code == 1878 || code == 5574); - } - } - - ignore - } - - /// Returns true if the contract is a expected to be a test - fn is_test(&self, contract_path: &Path) -> bool { - if contract_path.to_string_lossy().ends_with(".t.sol") { - return true; - } - - self.contracts.contracts_with_files().filter(|(path, _, _)| *path == contract_path).any( - |(_, _, contract)| { - contract.abi.as_ref().map_or(false, |abi| abi.functions.contains_key("IS_TEST")) - }, - ) - } - - pub fn diagnostics<'a>( - &'a self, - ignored_error_codes: &'a [u64], - ignored_file_paths: &'a [PathBuf], - compiler_severity_filter: Severity, - ) -> OutputDiagnostics<'a> { - OutputDiagnostics { - compiler_output: self, - ignored_error_codes, - ignored_file_paths, - compiler_severity_filter, - } - } - - pub fn is_empty(&self) -> bool { - self.contracts.is_empty() - } - - pub fn is_unchanged(&self) -> bool { - self.contracts.is_empty() && self.errors.is_empty() - } - - /// adds a new `CompilerOutput` to the aggregated output - pub fn extend( - &mut self, - version: Version, - build_info: RawBuildInfo, - output: CompilerOutput, - ) { - let build_id = build_info.id.clone(); - self.build_infos.push(build_info); - - let CompilerOutput { errors, sources, contracts, .. } = output; - self.errors.extend(errors); - - for (path, source_file) in sources { - let sources = self.sources.as_mut().entry(path).or_default(); - sources.push(VersionedSourceFile { - source_file, - version: version.clone(), - build_id: build_id.clone(), - }); - } - - for (file_name, new_contracts) in contracts { - let contracts = self.contracts.as_mut().entry(file_name).or_default(); - for (contract_name, contract) in new_contracts { - let versioned = contracts.entry(contract_name).or_default(); - versioned.push(VersionedContract { - contract, - version: version.clone(), - build_id: build_id.clone(), - }); - } - } - } - - /// Creates all `BuildInfo` files in the given `build_info_dir` - /// - /// There can be multiple `BuildInfo`, since we support multiple versions. - /// - /// The created files have the md5 hash `{_format,solcVersion,solcLongVersion,input}` as their - /// file name - pub fn write_build_infos(&self, build_info_dir: &Path) -> Result<(), SolcError> { - if self.build_infos.is_empty() { - return Ok(()); - } - std::fs::create_dir_all(build_info_dir) - .map_err(|err| SolcIoError::new(err, build_info_dir))?; - for build_info in &self.build_infos { - trace!("writing build info file {}", build_info.id); - let file_name = format!("{}.json", build_info.id); - let file = build_info_dir.join(file_name); - std::fs::write(&file, &serde_json::to_string(build_info)?) - .map_err(|err| SolcIoError::new(err, file))?; - } - Ok(()) - } - - /// Finds the _first_ contract with the given name - pub fn find_first(&self, contract: impl AsRef) -> Option> { - self.contracts.find_first(contract) - } - - /// Removes the _first_ contract with the given name from the set - pub fn remove_first(&mut self, contract: impl AsRef) -> Option { - self.contracts.remove_first(contract) - } - - /// Removes the contract with matching path and name - pub fn remove( - &mut self, - path: impl AsRef, - contract: impl AsRef, - ) -> Option { - self.contracts.remove(path, contract) - } - - /// Removes the contract with matching path and name using the `:` pattern - /// where `path` is optional. - /// - /// If the `path` segment is `None`, then the first matching `Contract` is returned, see - /// [Self::remove_first] - pub fn remove_contract<'a>( - &mut self, - info: impl Into>, - ) -> Option { - let ContractInfoRef { path, name } = info.into(); - if let Some(path) = path { - self.remove(Path::new(path.as_ref()), name) - } else { - self.remove_first(name) - } - } - - /// Iterate over all contracts and their names - pub fn contracts_iter(&self) -> impl Iterator { - self.contracts.contracts() - } - - /// Iterate over all contracts and their names - pub fn contracts_into_iter(self) -> impl Iterator { - self.contracts.into_contracts() - } - - /// Returns an iterator over (`file`, `name`, `Contract`) - pub fn contracts_with_files_iter( - &self, - ) -> impl Iterator { - self.contracts.contracts_with_files() - } - - /// Returns an iterator over (`file`, `name`, `Contract`) - pub fn contracts_with_files_into_iter( - self, - ) -> impl Iterator { - self.contracts.into_contracts_with_files() - } - - /// Returns an iterator over (`file`, `name`, `Contract`, `Version`) - pub fn contracts_with_files_and_version_iter( - &self, - ) -> impl Iterator { - self.contracts.contracts_with_files_and_version() - } - - /// Returns an iterator over (`file`, `name`, `Contract`, `Version`) - pub fn contracts_with_files_and_version_into_iter( - self, - ) -> impl Iterator { - self.contracts.into_contracts_with_files_and_version() - } - - /// Given the contract file's path and the contract's name, tries to return the contract's - /// bytecode, runtime bytecode, and ABI. - pub fn get( - &self, - path: impl AsRef, - contract: impl AsRef, - ) -> Option> { - self.contracts.get(path, contract) - } - - /// Returns the output's source files and contracts separately, wrapped in helper types that - /// provide several helper methods - pub fn split(self) -> (VersionedSourceFiles, VersionedContracts) { - (self.sources, self.contracts) - } - - /// Joins all file path with `root` - pub fn join_all(&mut self, root: impl AsRef) -> &mut Self { - let root = root.as_ref(); - self.contracts.join_all(root); - self.sources.join_all(root); - self - } - - /// Strips the given prefix from all file paths to make them relative to the given - /// `base` argument. - /// - /// Convenience method for [Self::strip_prefix_all()] that consumes the type. - pub fn with_stripped_file_prefixes(mut self, base: impl AsRef) -> Self { - let base = base.as_ref(); - self.contracts.strip_prefix_all(base); - self.sources.strip_prefix_all(base); - self - } - - /// Removes `base` from all contract paths - pub fn strip_prefix_all(&mut self, base: impl AsRef) -> &mut Self { - let base = base.as_ref(); - self.contracts.strip_prefix_all(base); - self.sources.strip_prefix_all(base); - self - } -} - -/// Helper type to implement display for solc errors -#[derive(Clone, Debug)] -pub struct OutputDiagnostics<'a> { - /// output of the compiled project - compiler_output: &'a AggregatedCompilerOutput, - /// the error codes to ignore - ignored_error_codes: &'a [u64], - /// the file paths to ignore - ignored_file_paths: &'a [PathBuf], - /// set minimum level of severity that is treated as an error - compiler_severity_filter: Severity, -} - -impl<'a> OutputDiagnostics<'a> { - /// Returns true if there is at least one error of high severity - pub fn has_error(&self) -> bool { - self.compiler_output.has_error( - self.ignored_error_codes, - self.ignored_file_paths, - &self.compiler_severity_filter, - ) - } - - /// Returns true if there is at least one warning - pub fn has_warning(&self) -> bool { - self.compiler_output.has_warning(self.ignored_error_codes, self.ignored_file_paths) - } - - /// Returns true if the contract is a expected to be a test - fn is_test>(&self, contract_path: T) -> bool { - if contract_path.as_ref().ends_with(".t.sol") { - return true; - } - - self.compiler_output.find_first(&contract_path).map_or(false, |contract| { - contract.abi.map_or(false, |abi| abi.functions.contains_key("IS_TEST")) - }) - } -} - -impl<'a> fmt::Display for OutputDiagnostics<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("Compiler run ")?; - if self.has_error() { - Paint::red("failed:") - } else if self.has_warning() { - Paint::yellow("successful with warnings:") - } else { - Paint::green("successful!") - } - .fmt(f)?; - - for err in &self.compiler_output.errors { - let mut ignored = false; - if err.severity.is_warning() { - if let Some(code) = err.error_code { - if let Some(source_location) = &err.source_location { - // we ignore spdx and contract size warnings in test - // files. if we are looking at one of these warnings - // from a test file we skip - ignored = - self.is_test(&source_location.file) && (code == 1878 || code == 5574); - - // we ignore warnings coming from ignored files - let source_path = Path::new(&source_location.file); - ignored |= self - .ignored_file_paths - .iter() - .any(|ignored_path| source_path.starts_with(ignored_path)); - } - - ignored |= self.ignored_error_codes.contains(&code); - } - } - - if !ignored { - f.write_str("\n")?; - err.fmt(f)?; - } - } - - Ok(()) - } -} diff --git a/crates/compilers/src/zksync/compile/project.rs b/crates/compilers/src/zksync/compile/project.rs deleted file mode 100644 index 6d0e97ea..00000000 --- a/crates/compilers/src/zksync/compile/project.rs +++ /dev/null @@ -1,400 +0,0 @@ -use crate::{ - artifact_output::Artifacts, - cache::ArtifactsCache, - compilers::{zksolc::ZkSolcCompiler, CompilerInput, CompilerSettings}, - error::Result, - filter::SparseOutputFilter, - output::Builds, - report, - resolver::{parse::SolData, GraphEdges}, - zksolc::input::ZkSolcVersionedInput, - zksync::{ - self, - artifact_output::zk::{ZkArtifactOutput, ZkContractArtifact}, - compile::output::{AggregatedCompilerOutput, ProjectCompileOutput}, - }, - Graph, Project, Sources, -}; -use foundry_compilers_artifacts::{zksolc::CompilerOutput, SolcLanguage}; -use semver::Version; -use std::{collections::HashMap, path::PathBuf, time::Instant}; - -/// A set of different Solc installations with their version and the sources to be compiled -pub(crate) type VersionedSources = HashMap>; - -#[derive(Debug)] -pub struct ProjectCompiler<'a> { - /// Contains the relationship of the source files and their imports - edges: GraphEdges, - project: &'a Project, - /// how to compile all the sources - sources: CompilerSources, -} - -impl<'a> ProjectCompiler<'a> { - /// Create a new `ProjectCompiler` to bootstrap the compilation process of the project's - /// sources. - pub fn new(project: &'a Project) -> Result { - Self::with_sources(project, project.paths.read_input_files()?) - } - - /// Bootstraps the compilation process by resolving the dependency graph of all sources and the - /// appropriate `Solc` -> `Sources` set as well as the compile mode to use (parallel, - /// sequential) - /// - /// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows - /// multiple `jobs`, see [`crate::Project::set_solc_jobs()`]. - pub fn with_sources( - project: &'a Project, - mut sources: Sources, - ) -> Result { - if let Some(filter) = &project.sparse_output { - sources.retain(|f, _| filter.is_match(f)) - } - let graph = Graph::resolve_sources(&project.paths, sources)?; - let (sources, edges) = graph.into_sources_by_version( - project.offline, - &project.locked_versions, - &project.compiler, - )?; - // If there are multiple different versions, and we can use multiple jobs we can compile - // them in parallel. - let sources = CompilerSources { sources }; - Ok(Self { edges, project, sources }) - } - - pub fn compile(self) -> Result { - let slash_paths = self.project.slash_paths; - - // drive the compiler statemachine to completion - let mut output = self.preprocess()?.compile()?.write_artifacts()?.write_cache()?; - - if slash_paths { - // ensures we always use `/` paths - output.slash_paths(); - } - - Ok(output) - } - - /// Does basic preprocessing - /// - sets proper source unit names - /// - check cache - fn preprocess(self) -> Result> { - trace!("preprocessing"); - let Self { edges, project, mut sources } = self; - - // convert paths on windows to ensure consistency with the `CompilerOutput` `solc` emits, - // which is unix style `/` - sources.slash_paths(); - - let mut cache = ArtifactsCache::new(project, edges)?; - // retain and compile only dirty sources and all their imports - sources.filter(&mut cache); - - Ok(PreprocessedState { sources, cache }) - } -} - -/// A series of states that comprise the [`ProjectCompiler::compile()`] state machine -/// -/// The main reason is to debug all states individually -#[derive(Debug)] -struct PreprocessedState<'a> { - /// Contains all the sources to compile. - sources: CompilerSources, - - /// Cache that holds `CacheEntry` objects if caching is enabled and the project is recompiled - cache: ArtifactsCache<'a, ZkArtifactOutput, ZkSolcCompiler>, -} - -impl<'a> PreprocessedState<'a> { - /// advance to the next state by compiling all sources - fn compile(self) -> Result> { - trace!("compiling"); - let PreprocessedState { sources, mut cache } = self; - - let mut output = sources.compile(&mut cache)?; - - // source paths get stripped before handing them over to solc, so solc never uses absolute - // paths, instead `--base-path ` is set. this way any metadata that's derived from - // data (paths) is relative to the project dir and should be independent of the current OS - // disk. However internally we still want to keep absolute paths, so we join the - // contracts again - output.join_all(cache.project().root()); - - Ok(CompiledState { output, cache }) - } -} - -/// Represents the state after `zksolc` was successfully invoked -#[derive(Debug)] -struct CompiledState<'a> { - output: AggregatedCompilerOutput, - cache: ArtifactsCache<'a, ZkArtifactOutput, ZkSolcCompiler>, -} - -impl<'a> CompiledState<'a> { - /// advance to the next state by handling all artifacts - /// - /// Writes all output contracts to disk if enabled in the `Project` and if the build was - /// successful - #[instrument(skip_all, name = "write-artifacts")] - fn write_artifacts(self) -> Result> { - let CompiledState { output, cache } = self; - - let project = cache.project(); - let ctx = cache.output_ctx(); - // write all artifacts via the handler but only if the build succeeded and project wasn't - // configured with `no_artifacts == true` - let compiled_artifacts = if project.no_artifacts { - project.artifacts.zksync_output_to_artifacts( - &output.contracts, - &output.sources, - ctx, - &project.paths, - ) - } else if output.has_error( - &project.ignored_error_codes, - &project.ignored_file_paths, - &project.compiler_severity_filter, - ) { - trace!("skip writing cache file due to solc errors: {:?}", output.errors); - project.artifacts.zksync_output_to_artifacts( - &output.contracts, - &output.sources, - ctx, - &project.paths, - ) - } else { - trace!( - "handling artifact output for {} contracts and {} sources", - output.contracts.len(), - output.sources.len() - ); - // this emits the artifacts via the project's artifacts handler - let artifacts = project.artifacts.zksync_on_output( - &output.contracts, - &output.sources, - &project.paths, - ctx, - )?; - - // emits all the build infos, if they exist - output.write_build_infos(project.build_info_path())?; - - artifacts - }; - - Ok(ArtifactsState { output, cache, compiled_artifacts }) - } -} - -/// Represents the state after all artifacts were written to disk -#[derive(Debug)] -struct ArtifactsState<'a> { - output: AggregatedCompilerOutput, - cache: ArtifactsCache<'a, ZkArtifactOutput, ZkSolcCompiler>, - compiled_artifacts: Artifacts, -} - -impl<'a> ArtifactsState<'a> { - /// Writes the cache file - /// - /// this concludes the [`Project::compile()`] statemachine - fn write_cache(self) -> Result { - let ArtifactsState { output, cache, compiled_artifacts } = self; - let project = cache.project(); - let ignored_error_codes = project.ignored_error_codes.clone(); - let ignored_file_paths = project.ignored_file_paths.clone(); - let compiler_severity_filter = project.compiler_severity_filter; - let has_error = - output.has_error(&ignored_error_codes, &ignored_file_paths, &compiler_severity_filter); - let skip_write_to_disk = project.no_artifacts || has_error; - trace!(has_error, project.no_artifacts, skip_write_to_disk, cache_path=?project.cache_path(),"prepare writing cache file"); - - let (cached_artifacts, cached_builds) = - cache.consume(&compiled_artifacts, &output.build_infos, !skip_write_to_disk)?; - - //project.artifacts_handler().handle_cached_artifacts(&cached_artifacts)?; - // - let builds = Builds( - output - .build_infos - .iter() - .map(|build_info| (build_info.id.clone(), build_info.build_context.clone())) - .chain(cached_builds) - .map(|(id, context)| (id, context.with_joined_paths(project.paths.root.as_path()))) - .collect(), - ); - - Ok(ProjectCompileOutput { - compiler_output: output, - compiled_artifacts, - cached_artifacts, - ignored_error_codes, - ignored_file_paths, - compiler_severity_filter, - builds, - }) - } -} - -/// Determines how the `solc <-> sources` pairs are executed -#[derive(Debug, Clone)] -struct CompilerSources { - sources: VersionedSources, -} - -impl CompilerSources { - /// Converts all `\\` separators to `/` - /// - /// This effectively ensures that `solc` can find imported files like `/src/Cheats.sol` in the - /// VFS (the `ZkSolcInput` as json) under `src/Cheats.sol`. - fn slash_paths(&mut self) { - #[cfg(windows)] - { - use path_slash::PathBufExt; - - self.sources.values_mut().for_each(|versioned_sources| { - versioned_sources.values_mut().for_each(|sources| { - *sources = std::mem::take(sources) - .into_iter() - .map(|(path, source)| { - (PathBuf::from(path.to_slash_lossy().as_ref()), source) - }) - .collect() - }) - }); - } - } - - /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] - fn filter(&mut self, cache: &mut ArtifactsCache<'_, ZkArtifactOutput, ZkSolcCompiler>) { - cache.remove_dirty_sources(); - for versioned_sources in self.sources.values_mut() { - for (version, sources) in versioned_sources { - trace!("Filtering {} sources for {}", sources.len(), version); - cache.filter(sources, version); - trace!( - "Detected {} sources to compile {:?}", - sources.dirty().count(), - sources.dirty_files().collect::>() - ); - } - } - } - - /// Compiles all the files with `ZkSolc` - fn compile( - self, - cache: &mut ArtifactsCache<'_, ZkArtifactOutput, ZkSolcCompiler>, - ) -> Result { - let project = cache.project(); - let graph = cache.graph(); - - let sparse_output = SparseOutputFilter::new(project.sparse_output.as_deref()); - - // Include additional paths collected during graph resolution. - let mut include_paths = project.paths.include_paths.clone(); - include_paths.extend(graph.include_paths().clone()); - - let mut jobs = Vec::new(); - for (language, versioned_sources) in self.sources { - for (version, sources) in versioned_sources { - if sources.is_empty() { - // nothing to compile - trace!("skip {} for empty sources set", version); - continue; - } - - // depending on the composition of the filtered sources, the output selection can be - // optimized - let mut opt_settings = project.settings.clone(); - let actually_dirty = - sparse_output.sparse_sources(&sources, &mut opt_settings, graph); - - if actually_dirty.is_empty() { - // nothing to compile for this particular language, all dirty files are in the - // other language set - trace!("skip {} run due to empty source set", version); - continue; - } - - trace!("calling {} with {} sources {:?}", version, sources.len(), sources.keys()); - let zksync_settings = project - .settings - .clone() - .with_base_path(&project.paths.root) - .with_allow_paths(&project.paths.allowed_paths) - .with_include_paths(&include_paths) - .with_remappings(&project.paths.remappings); - - let mut input = ZkSolcVersionedInput::build( - sources, - zksync_settings, - language, - version.clone(), - ); - - input.strip_prefix(project.paths.root.as_path()); - - jobs.push((input, actually_dirty)); - } - } - - let results = compile_sequential(&project.compiler, jobs)?; - - let mut aggregated = AggregatedCompilerOutput::default(); - - for (input, mut output, actually_dirty) in results { - let version = input.version(); - - // Mark all files as seen by the compiler - for file in &actually_dirty { - cache.compiler_seen(file); - } - - let build_info = zksync::raw_build_info_new(&input, &output, project.build_info)?; - - output.retain_files( - actually_dirty - .iter() - .map(|f| f.strip_prefix(project.paths.root.as_path()).unwrap_or(f)), - ); - output.join_all(project.paths.root.as_path()); - - aggregated.extend(version.clone(), build_info, output); - } - - Ok(aggregated) - } -} - -/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s -fn compile_sequential( - zksolc_compiler: &ZkSolcCompiler, - jobs: Vec<(ZkSolcVersionedInput, Vec)>, -) -> Result)>> { - jobs.into_iter() - .map(|(input, actually_dirty)| { - let zksolc = zksolc_compiler.zksolc(&input)?; - - let (compiler_name, version) = - if let Some(zk_version) = zksolc.solc_version_info.zksync_version.as_ref() { - ("zksolc and ZKsync solc".to_string(), zk_version.clone()) - } else { - (input.compiler_name().to_string(), input.version().clone()) - }; - - let start = Instant::now(); - report::compiler_spawn(&compiler_name, &version, actually_dirty.as_slice()); - - let output = zksolc.compile(&input.input)?; - - report::compiler_success(&compiler_name, &version, &start.elapsed()); - - Ok((input, output, actually_dirty)) - }) - .collect() -} diff --git a/crates/compilers/src/zksync/mod.rs b/crates/compilers/src/zksync/mod.rs deleted file mode 100644 index 4bf38ec8..00000000 --- a/crates/compilers/src/zksync/mod.rs +++ /dev/null @@ -1,182 +0,0 @@ -use std::{ - collections::{BTreeMap, HashSet}, - path::{Path, PathBuf}, -}; - -use alloy_primitives::hex; -use foundry_compilers_artifacts::{zksolc::CompilerOutput, SolcLanguage}; -use foundry_compilers_core::error::SolcError; - -use crate::{ - buildinfo::{BuildContext, RawBuildInfo, ETHERS_FORMAT_VERSION}, - error::Result, - resolver::parse::SolData, - zksolc::{ - input::{StandardJsonCompilerInput, ZkSolcVersionedInput}, - settings::ZkSolcSettings, - ZkSolcCompiler, - }, - CompilerInput, Graph, Project, Source, -}; - -use md5::Digest; - -use self::{artifact_output::zk::ZkArtifactOutput, compile::output::ProjectCompileOutput}; - -pub mod artifact_output; -pub mod compile; - -pub fn project_compile( - project: &Project, -) -> Result { - self::compile::project::ProjectCompiler::new(project)?.compile() -} - -pub fn project_compile_files( - project: &Project, - files: I, -) -> Result -where - I: IntoIterator, - P: Into, -{ - let sources = Source::read_all(files)?; - self::compile::project::ProjectCompiler::with_sources(project, sources)?.compile() -} - -pub fn project_standard_json_input( - project: &Project, - target: &Path, -) -> Result { - tracing::debug!(?target, "standard_json_input for zksync"); - let graph = Graph::::resolve(&project.paths)?; - let target_index = graph - .files() - .get(target) - .ok_or_else(|| SolcError::msg(format!("cannot resolve file at {:?}", target.display())))?; - - let mut sources = Vec::new(); - let mut unique_paths = HashSet::new(); - let (path, source) = graph.node(*target_index).unpack(); - unique_paths.insert(path.clone()); - sources.push((path, source)); - sources.extend( - graph - .all_imported_nodes(*target_index) - .map(|index| graph.node(index).unpack()) - .filter(|(p, _)| unique_paths.insert(p.to_path_buf())), - ); - - let root = project.root(); - let sources = sources - .into_iter() - .map(|(path, source)| (rebase_path(root, path), source.clone())) - .collect(); - - let mut zk_solc_settings: ZkSolcSettings = project.settings.clone(); - // strip the path to the project root from all remappings - zk_solc_settings.settings.remappings = project - .paths - .remappings - .clone() - .into_iter() - .map(|r| r.into_relative(project.root()).to_relative_remapping()) - .collect::>(); - - zk_solc_settings.settings.libraries.libs = zk_solc_settings - .settings - .libraries - .libs - .into_iter() - .map(|(f, libs)| (f.strip_prefix(project.root()).unwrap_or(&f).to_path_buf(), libs)) - .collect(); - - let input = StandardJsonCompilerInput::new(sources, zk_solc_settings.settings); - - Ok(input) -} - -// Copied from compilers/lib private method -fn rebase_path(base: &Path, path: &Path) -> PathBuf { - use path_slash::PathExt; - - let mut base_components = base.components(); - let mut path_components = path.components(); - - let mut new_path = PathBuf::new(); - - while let Some(path_component) = path_components.next() { - let base_component = base_components.next(); - - if Some(path_component) != base_component { - if base_component.is_some() { - new_path.extend( - std::iter::repeat(std::path::Component::ParentDir) - .take(base_components.count() + 1), - ); - } - - new_path.push(path_component); - new_path.extend(path_components); - - break; - } - } - - new_path.to_slash_lossy().into_owned().into() -} - -pub fn build_context_new( - input: &ZkSolcVersionedInput, - output: &CompilerOutput, -) -> Result> { - let mut source_id_to_path = BTreeMap::new(); - - let input_sources = input.sources().map(|(path, _)| path).collect::>(); - for (path, source) in output.sources.iter() { - if input_sources.contains(path.as_path()) { - source_id_to_path.insert(source.id, path.to_path_buf()); - } - } - - Ok(BuildContext { source_id_to_path, language: input.language() }) -} - -pub fn raw_build_info_new( - input: &ZkSolcVersionedInput, - output: &CompilerOutput, - full_build_info: bool, -) -> Result> { - // TODO: evaluate if this should be zksolc version instead - let version = input.solc_version.clone(); - let build_context = build_context_new(input, output)?; - - let mut hasher = md5::Md5::new(); - - hasher.update(ETHERS_FORMAT_VERSION); - - let solc_short = format!("{}.{}.{}", version.major, version.minor, version.patch); - hasher.update(&solc_short); - hasher.update(version.to_string()); - - let input = serde_json::to_value(input)?; - hasher.update(&serde_json::to_string(&input)?); - - // create the hash for `{_format,solcVersion,solcLongVersion,input}` - // N.B. this is not exactly the same as hashing the json representation of these values but - // the must efficient one - let result = hasher.finalize(); - let id = hex::encode(result); - - let mut build_info = BTreeMap::new(); - - if full_build_info { - build_info.insert("_format".to_string(), serde_json::to_value(ETHERS_FORMAT_VERSION)?); - build_info.insert("solcVersion".to_string(), serde_json::to_value(&solc_short)?); - build_info.insert("solcLongVersion".to_string(), serde_json::to_value(&version)?); - build_info.insert("input".to_string(), input); - build_info.insert("output".to_string(), serde_json::to_value(output)?); - } - - Ok(RawBuildInfo { id, build_info, build_context }) -} diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml deleted file mode 100644 index d132dedc..00000000 --- a/crates/core/Cargo.toml +++ /dev/null @@ -1,43 +0,0 @@ -[package] -name = "foundry-compilers-core" -description = "Core utilities for foundry-compilers crates" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -alloy-primitives.workspace = true -cfg-if.workspace = true -dunce.workspace = true -once_cell.workspace = true -path-slash.workspace = true -regex.workspace = true -semver.workspace = true -serde_json.workspace = true -serde.workspace = true -thiserror.workspace = true -walkdir.workspace = true - -svm = { workspace = true, optional = true } -tokio = { workspace = true, optional = true } - -tempfile = { workspace = true, optional = true } -fs_extra = { version = "1.3", optional = true } - -[dev-dependencies] -tempfile.workspace = true - -[features] -project-util = ["dep:tempfile", "dep:fs_extra"] -svm-solc = ["dep:svm", "dep:tokio"] -async = ["dep:tokio"] -test-utils = ["dep:tempfile"] diff --git a/crates/core/src/error.rs b/crates/core/src/error.rs deleted file mode 100644 index d58b0f44..00000000 --- a/crates/core/src/error.rs +++ /dev/null @@ -1,130 +0,0 @@ -use semver::Version; -use std::{ - io, - path::{Path, PathBuf}, -}; -use thiserror::Error; - -pub type Result = std::result::Result; - -#[allow(unused_macros)] -#[macro_export] -macro_rules! format_err { - ($($tt:tt)*) => { - $crate::error::SolcError::msg(format!($($tt)*)) - }; -} - -#[allow(unused_macros)] -#[macro_export] -macro_rules! bail { - ($($tt:tt)*) => { return Err(format_err!($($tt)*)) }; -} - -/// Various error types -#[derive(Debug, Error)] -pub enum SolcError { - /// Errors related to the Solc executable itself. - #[error("solc exited with {0}\n{1}")] - SolcError(std::process::ExitStatus, String), - #[error("invalid UTF-8 in Solc output")] - InvalidUtf8, - #[error("missing pragma from Solidity file")] - PragmaNotFound, - #[error("could not find Solc version locally or upstream")] - VersionNotFound, - #[error("checksum mismatch for {file}: expected {expected} found {detected} for {version}")] - ChecksumMismatch { version: Version, expected: String, detected: String, file: PathBuf }, - #[error("checksum not found for {version}")] - ChecksumNotFound { version: Version }, - #[error(transparent)] - SemverError(#[from] semver::Error), - /// Deserialization error - #[error(transparent)] - SerdeJson(#[from] serde_json::Error), - /// Filesystem IO error - #[error(transparent)] - Io(#[from] SolcIoError), - #[error("file could not be resolved due to broken symlink: {0}")] - ResolveBadSymlink(SolcIoError), - /// Failed to resolve a file - #[error("failed to resolve file: {0}; check configured remappings")] - Resolve(SolcIoError), - #[error("file cannot be resolved due to mismatch of file name case: {error}.\nFound existing file: {existing_file:?}\nPlease check the case of the import.")] - ResolveCaseSensitiveFileName { error: SolcIoError, existing_file: PathBuf }, - #[error( - "{0}\n\t\ - --> {1}\n\t\ - {2}" - )] - FailedResolveImport(Box, PathBuf, PathBuf), - #[cfg(feature = "svm-solc")] - #[error(transparent)] - SvmError(#[from] svm::SvmError), - #[error("no contracts found at \"{0}\"")] - NoContracts(String), - /// General purpose message. - #[error("{0}")] - Message(String), - - #[error("no artifact found for `{}:{}`", .0.display(), .1)] - ArtifactNotFound(PathBuf, String), - - #[cfg(feature = "project-util")] - #[error(transparent)] - FsExtra(#[from] fs_extra::error::Error), -} - -impl SolcError { - pub fn io(err: io::Error, path: impl Into) -> Self { - SolcIoError::new(err, path).into() - } - - /// Create an error from the Solc executable's output. - pub fn solc_output(output: &std::process::Output) -> Self { - let mut msg = String::from_utf8_lossy(&output.stderr); - let mut trimmed = msg.trim(); - if trimmed.is_empty() { - msg = String::from_utf8_lossy(&output.stdout); - trimmed = msg.trim(); - if trimmed.is_empty() { - trimmed = ""; - } - } - Self::SolcError(output.status, trimmed.into()) - } - - /// General purpose message. - pub fn msg(msg: impl std::fmt::Display) -> Self { - Self::Message(msg.to_string()) - } -} - -#[derive(Debug, Error)] -#[error("\"{}\": {io}", self.path.display())] -pub struct SolcIoError { - io: io::Error, - path: PathBuf, -} - -impl SolcIoError { - pub fn new(io: io::Error, path: impl Into) -> Self { - Self { io, path: path.into() } - } - - /// The path at which the error occurred - pub fn path(&self) -> &Path { - &self.path - } - - /// The underlying `io::Error` - pub fn source(&self) -> &io::Error { - &self.io - } -} - -impl From for io::Error { - fn from(err: SolcIoError) -> Self { - err.io - } -} diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs deleted file mode 100644 index 0137e2a4..00000000 --- a/crates/core/src/lib.rs +++ /dev/null @@ -1,7 +0,0 @@ -//! Core utilities for foundry-compilers crates. - -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -pub mod error; -pub mod utils; diff --git a/crates/core/src/utils.rs b/crates/core/src/utils.rs deleted file mode 100644 index 62441497..00000000 --- a/crates/core/src/utils.rs +++ /dev/null @@ -1,918 +0,0 @@ -//! Utility functions - -use crate::error::{SolcError, SolcIoError}; -use alloy_primitives::{hex, keccak256}; -use cfg_if::cfg_if; -use once_cell::sync::Lazy; -use regex::{Match, Regex}; -use semver::{Version, VersionReq}; -use serde::{de::DeserializeOwned, Serialize}; -use std::{ - collections::HashSet, - fs, - io::Write, - ops::Range, - path::{Component, Path, PathBuf}, -}; -use walkdir::WalkDir; - -/// A regex that matches the import path and identifier of a solidity import -/// statement with the named groups "path", "id". -// Adapted from -pub static RE_SOL_IMPORT: Lazy = Lazy::new(|| { - Regex::new(r#"import\s+(?:(?:"(?P.*)"|'(?P.*)')(?:\s+as\s+\w+)?|(?:(?:\w+(?:\s+as\s+\w+)?|\*\s+as\s+\w+|\{\s*(?:\w+(?:\s+as\s+\w+)?(?:\s*,\s*)?)+\s*\})\s+from\s+(?:"(?P.*)"|'(?P.*)')))\s*;"#).unwrap() -}); - -/// A regex that matches an alias within an import statement -pub static RE_SOL_IMPORT_ALIAS: Lazy = - Lazy::new(|| Regex::new(r#"(?:(?P\w+)|\*|'|")\s+as\s+(?P\w+)"#).unwrap()); - -/// A regex that matches the version part of a solidity pragma -/// as follows: `pragma solidity ^0.5.2;` => `^0.5.2` -/// statement with the named group "version". -// Adapted from -pub static RE_SOL_PRAGMA_VERSION: Lazy = - Lazy::new(|| Regex::new(r"pragma\s+solidity\s+(?P.+?);").unwrap()); - -/// A regex that matches the SDPX license identifier -/// statement with the named group "license". -pub static RE_SOL_SDPX_LICENSE_IDENTIFIER: Lazy = - Lazy::new(|| Regex::new(r"///?\s*SPDX-License-Identifier:\s*(?P.+)").unwrap()); - -/// A regex used to remove extra lines in flatenned files -pub static RE_THREE_OR_MORE_NEWLINES: Lazy = Lazy::new(|| Regex::new("\n{3,}").unwrap()); - -/// A regex that matches version pragma in a Vyper -pub static RE_VYPER_VERSION: Lazy = - Lazy::new(|| Regex::new(r"#(?:pragma version|@version)\s+(?P.+)").unwrap()); - -/// Extensions acceptable by solc compiler. -pub const SOLC_EXTENSIONS: &[&str] = &["sol", "yul"]; - -/// Support for configuring the EVM version -/// -pub const BYZANTIUM_SOLC: Version = Version::new(0, 4, 21); - -/// Bug fix for configuring the EVM version with Constantinople -/// -pub const CONSTANTINOPLE_SOLC: Version = Version::new(0, 4, 22); - -/// Petersburg support -/// -pub const PETERSBURG_SOLC: Version = Version::new(0, 5, 5); - -/// Istanbul support -/// -pub const ISTANBUL_SOLC: Version = Version::new(0, 5, 14); - -/// Berlin support -/// -pub const BERLIN_SOLC: Version = Version::new(0, 8, 5); - -/// London support -/// -pub const LONDON_SOLC: Version = Version::new(0, 8, 7); - -/// Paris support -/// -pub const PARIS_SOLC: Version = Version::new(0, 8, 18); - -/// Shanghai support -/// -pub const SHANGHAI_SOLC: Version = Version::new(0, 8, 20); - -/// Cancun support -/// -pub const CANCUN_SOLC: Version = Version::new(0, 8, 24); - -/// Prague support -/// -pub const PRAGUE_SOLC: Version = Version::new(0, 8, 27); - -// `--base-path` was introduced in 0.6.9 -pub static SUPPORTS_BASE_PATH: Lazy = - Lazy::new(|| VersionReq::parse(">=0.6.9").unwrap()); - -// `--include-path` was introduced in 0.8.8 -pub static SUPPORTS_INCLUDE_PATH: Lazy = - Lazy::new(|| VersionReq::parse(">=0.8.8").unwrap()); - -/// Create a regex that matches any library or contract name inside a file -pub fn create_contract_or_lib_name_regex(name: &str) -> Regex { - Regex::new(&format!(r#"(?:using\s+(?P{name})\s+|is\s+(?:\w+\s*,\s*)*(?P{name})(?:\s*,\s*\w+)*|(?:(?P(?:function|error|as)\s+|\n[^\n]*(?:"([^"\n]|\\")*|'([^'\n]|\\')*))|\W+)(?P{name})(?:\.|\(| ))"#)).unwrap() -} - -/// Move a range by a specified offset -pub fn range_by_offset(range: &Range, offset: isize) -> Range { - Range { - start: offset.saturating_add(range.start as isize) as usize, - end: offset.saturating_add(range.end as isize) as usize, - } -} - -/// Returns all path parts from any solidity import statement in a string, -/// `import "./contracts/Contract.sol";` -> `"./contracts/Contract.sol"`. -/// -/// See also -pub fn find_import_paths(contract: &str) -> impl Iterator> { - RE_SOL_IMPORT.captures_iter(contract).filter_map(|cap| { - cap.name("p1") - .or_else(|| cap.name("p2")) - .or_else(|| cap.name("p3")) - .or_else(|| cap.name("p4")) - }) -} - -/// Returns the solidity version pragma from the given input: -/// `pragma solidity ^0.5.2;` => `^0.5.2` -pub fn find_version_pragma(contract: &str) -> Option> { - RE_SOL_PRAGMA_VERSION.captures(contract)?.name("version") -} - -/// Returns an iterator that yields all solidity/yul files funder under the given root path or the -/// `root` itself, if it is a sol/yul file -/// -/// This also follows symlinks. -pub fn source_files_iter<'a>( - root: &Path, - extensions: &'a [&'a str], -) -> impl Iterator + 'a { - WalkDir::new(root) - .follow_links(true) - .into_iter() - .filter_map(Result::ok) - .filter(|e| e.file_type().is_file()) - .filter(|e| { - e.path().extension().map(|ext| extensions.iter().any(|e| ext == *e)).unwrap_or_default() - }) - .map(|e| e.path().into()) -} - -/// Returns a list of absolute paths to all the solidity files under the root, or the file itself, -/// if the path is a solidity file. -/// -/// This also follows symlinks. -/// -/// NOTE: this does not resolve imports from other locations -/// -/// # Examples -/// -/// ```no_run -/// use foundry_compilers_core::utils; -/// let sources = utils::source_files("./contracts".as_ref(), &utils::SOLC_EXTENSIONS); -/// ``` -pub fn source_files(root: &Path, extensions: &[&str]) -> Vec { - source_files_iter(root, extensions).collect() -} - -/// Same as [source_files] but only returns files acceptable by Solc compiler. -pub fn sol_source_files(root: &Path) -> Vec { - source_files(root, SOLC_EXTENSIONS) -} - -/// Returns a list of _unique_ paths to all folders under `root` that contain at least one solidity -/// file (`*.sol`). -/// -/// # Examples -/// -/// ```no_run -/// use foundry_compilers_core::utils; -/// let dirs = utils::solidity_dirs("./lib".as_ref()); -/// ``` -/// -/// for following layout will return -/// `["lib/ds-token/src", "lib/ds-token/src/test", "lib/ds-token/lib/ds-math/src", ...]` -/// -/// ```text -/// lib -/// └── ds-token -/// ├── lib -/// │ ├── ds-math -/// │ │ └── src/Contract.sol -/// │ ├── ds-stop -/// │ │ └── src/Contract.sol -/// │ ├── ds-test -/// │ └── src//Contract.sol -/// └── src -/// ├── base.sol -/// ├── test -/// │ ├── base.t.sol -/// └── token.sol -/// ``` -pub fn solidity_dirs(root: &Path) -> Vec { - let sources = sol_source_files(root); - sources - .iter() - .filter_map(|p| p.parent()) - .collect::>() - .into_iter() - .map(|p| p.to_path_buf()) - .collect() -} - -/// Returns the source name for the given source path, the ancestors of the root path. -/// -/// `/Users/project/sources/contract.sol` -> `sources/contracts.sol` -pub fn source_name<'a>(source: &'a Path, root: &Path) -> &'a Path { - strip_prefix(source, root) -} - -/// Strips `root` from `source` and returns the relative path. -pub fn strip_prefix<'a>(source: &'a Path, root: &Path) -> &'a Path { - source.strip_prefix(root).unwrap_or(source) -} - -/// Strips `root` from `source` and returns the relative path. -pub fn strip_prefix_owned(source: PathBuf, root: &Path) -> PathBuf { - source.strip_prefix(root).map(Path::to_path_buf).unwrap_or(source) -} - -/// Attempts to determine if the given source is a local, relative import. -pub fn is_local_source_name(libs: &[impl AsRef], source: impl AsRef) -> bool { - resolve_library(libs, source.as_ref()).is_none() -} - -/// Canonicalize the path, platform-agnostic. -/// -/// On windows this will ensure the path only consists of `/` separators. -pub fn canonicalize(path: impl AsRef) -> Result { - let path = path.as_ref(); - let res = dunce::canonicalize(path); - #[cfg(windows)] - let res = res.map(|p| { - use path_slash::PathBufExt; - PathBuf::from(p.to_slash_lossy().as_ref()) - }); - res.map_err(|err| SolcIoError::new(err, path)) -} - -/// Returns a normalized Solidity file path for the given import path based on the specified -/// directory. -/// -/// This function resolves `./` and `../`, but, unlike [`canonicalize`], it does not resolve -/// symbolic links. -/// -/// The function returns an error if the normalized path does not exist in the file system. -/// -/// See also: -pub fn normalize_solidity_import_path( - directory: &Path, - import_path: &Path, -) -> Result { - let original = directory.join(import_path); - let cleaned = clean_solidity_path(&original); - - // this is to align the behavior with `canonicalize` - use path_slash::PathExt; - let normalized = PathBuf::from(dunce::simplified(&cleaned).to_slash_lossy().as_ref()); - - // checks if the path exists without reading its content and obtains an io error if it doesn't. - normalized.metadata().map(|_| normalized).map_err(|err| SolcIoError::new(err, original)) -} - -// This function lexically cleans the given path. -// -// It performs the following transformations for the path: -// -// * Resolves references (current directories (`.`) and parent (`..`) directories). -// * Reduces repeated separators to a single separator (e.g., from `//` to `/`). -// -// This transformation is lexical, not involving the file system, which means it does not account -// for symlinks. This approach has a caveat. For example, consider a filesystem-accessible path -// `a/b/../c.sol` passed to this function. It returns `a/c.sol`. However, if `b` is a symlink, -// `a/c.sol` might not be accessible in the filesystem in some environments. Despite this, it's -// unlikely that this will pose a problem for our intended use. -// -// # How it works -// -// The function splits the given path into components, where each component roughly corresponds to a -// string between separators. It then iterates over these components (starting from the leftmost -// part of the path) to reconstruct the path. The following steps are applied to each component: -// -// * If the component is a current directory, it's removed. -// * If the component is a parent directory, the following rules are applied: -// * If the preceding component is a normal, then both the preceding normal component and the -// parent directory component are removed. (Examples of normal components include `a` and `b` -// in `a/b`.) -// * Otherwise (if there is no preceding component, or if the preceding component is a parent, -// root, or prefix), it remains untouched. -// * Otherwise, the component remains untouched. -// -// Finally, the processed components are reassembled into a path. -fn clean_solidity_path(original_path: &Path) -> PathBuf { - let mut new_path = Vec::new(); - - for component in original_path.components() { - match component { - Component::Prefix(..) | Component::RootDir | Component::Normal(..) => { - new_path.push(component); - } - Component::CurDir => {} - Component::ParentDir => { - if let Some(Component::Normal(..)) = new_path.last() { - new_path.pop(); - } else { - new_path.push(component); - } - } - } - } - - new_path.iter().collect() -} - -/// Returns the same path config but with canonicalized paths. -/// -/// This will take care of potential symbolic linked directories. -/// For example, the tempdir library is creating directories hosted under `/var/`, which in OS X -/// is a symbolic link to `/private/var/`. So if when we try to resolve imports and a path is -/// rooted in a symbolic directory we might end up with different paths for the same file, like -/// `private/var/.../Dapp.sol` and `/var/.../Dapp.sol` -/// -/// This canonicalizes all the paths but does not treat non existing dirs as an error -pub fn canonicalized(path: impl Into) -> PathBuf { - let path = path.into(); - canonicalize(&path).unwrap_or(path) -} - -/// Returns the path to the library if the source path is in fact determined to be a library path, -/// and it exists. -/// Note: this does not handle relative imports or remappings. -pub fn resolve_library(libs: &[impl AsRef], source: impl AsRef) -> Option { - let source = source.as_ref(); - let comp = source.components().next()?; - match comp { - Component::Normal(first_dir) => { - // attempt to verify that the root component of this source exists under a library - // folder - for lib in libs { - let lib = lib.as_ref(); - let contract = lib.join(source); - if contract.exists() { - // contract exists in / - return Some(contract); - } - // check for //src/name.sol - let contract = lib - .join(first_dir) - .join("src") - .join(source.strip_prefix(first_dir).expect("is first component")); - if contract.exists() { - return Some(contract); - } - } - None - } - Component::RootDir => Some(source.into()), - _ => None, - } -} - -/// Tries to find an absolute import like `src/interfaces/IConfig.sol` in `cwd`, moving up the path -/// until the `root` is reached. -/// -/// If an existing file under `root` is found, this returns the path up to the `import` path and the -/// normalized `import` path itself: -/// -/// For example for following layout: -/// -/// ```text -/// /mydependency/ -/// ├── src (`cwd`) -/// │ ├── interfaces -/// │ │ ├── IConfig.sol -/// ``` -/// and `import` as `src/interfaces/IConfig.sol` and `cwd` as `src` this will return -/// (`/mydependency/`, `/mydependency/src/interfaces/IConfig.sol`) -pub fn resolve_absolute_library( - root: &Path, - cwd: &Path, - import: &Path, -) -> Option<(PathBuf, PathBuf)> { - let mut parent = cwd.parent()?; - while parent != root { - if let Ok(import) = normalize_solidity_import_path(parent, import) { - return Some((parent.to_path_buf(), import)); - } - parent = parent.parent()?; - } - None -} - -/// Reads the list of Solc versions that have been installed in the machine. -/// -/// The version list is sorted in ascending order. -/// -/// Checks for installed solc versions under the given path as `/`, -/// (e.g.: `~/.svm/0.8.10`) and returns them sorted in ascending order. -pub fn installed_versions(root: &Path) -> Result, SolcError> { - let mut versions: Vec<_> = walkdir::WalkDir::new(root) - .max_depth(1) - .into_iter() - .filter_map(std::result::Result::ok) - .filter(|e| e.file_type().is_dir()) - .filter_map(|e: walkdir::DirEntry| { - e.path().file_name().and_then(|v| Version::parse(v.to_string_lossy().as_ref()).ok()) - }) - .collect(); - versions.sort(); - Ok(versions) -} - -/// Returns the 36 char (deprecated) fully qualified name placeholder -/// -/// If the name is longer than 36 char, then the name gets truncated, -/// If the name is shorter than 36 char, then the name is filled with trailing `_` -pub fn library_fully_qualified_placeholder(name: &str) -> String { - name.chars().chain(std::iter::repeat('_')).take(36).collect() -} - -/// Returns the library hash placeholder as `$hex(library_hash(name))$` -pub fn library_hash_placeholder(name: impl AsRef<[u8]>) -> String { - let mut s = String::with_capacity(34 + 2); - s.push('$'); - s.push_str(hex::Buffer::<17, false>::new().format(&library_hash(name))); - s.push('$'); - s -} - -/// Returns the library placeholder for the given name -/// The placeholder is a 34 character prefix of the hex encoding of the keccak256 hash of the fully -/// qualified library name. -/// -/// See also -pub fn library_hash(name: impl AsRef<[u8]>) -> [u8; 17] { - let hash = keccak256(name); - hash[..17].try_into().unwrap() -} - -/// Find the common ancestor, if any, between the given paths -/// -/// # Examples -/// -/// ``` -/// use foundry_compilers_core::utils::common_ancestor_all; -/// use std::path::{Path, PathBuf}; -/// -/// let baz = Path::new("/foo/bar/baz"); -/// let bar = Path::new("/foo/bar/bar"); -/// let foo = Path::new("/foo/bar/foo"); -/// let common = common_ancestor_all([baz, bar, foo]).unwrap(); -/// assert_eq!(common, Path::new("/foo/bar").to_path_buf()); -/// ``` -pub fn common_ancestor_all(paths: I) -> Option -where - I: IntoIterator, - P: AsRef, -{ - let mut iter = paths.into_iter(); - let mut ret = iter.next()?.as_ref().to_path_buf(); - for path in iter { - if let Some(r) = common_ancestor(&ret, path.as_ref()) { - ret = r; - } else { - return None; - } - } - Some(ret) -} - -/// Finds the common ancestor of both paths -/// -/// # Examples -/// -/// ``` -/// use foundry_compilers_core::utils::common_ancestor; -/// use std::path::{Path, PathBuf}; -/// -/// let foo = Path::new("/foo/bar/foo"); -/// let bar = Path::new("/foo/bar/bar"); -/// let ancestor = common_ancestor(foo, bar).unwrap(); -/// assert_eq!(ancestor, Path::new("/foo/bar")); -/// ``` -pub fn common_ancestor(a: &Path, b: &Path) -> Option { - let a = a.components(); - let b = b.components(); - let mut ret = PathBuf::new(); - let mut found = false; - for (c1, c2) in a.zip(b) { - if c1 == c2 { - ret.push(c1); - found = true; - } else { - break; - } - } - if found { - Some(ret) - } else { - None - } -} - -/// Returns the right subpath in a dir -/// -/// Returns `/` if it exists or `/` does not exist, -/// Returns `/` if it exists and `/` does not exist. -pub fn find_fave_or_alt_path(root: &Path, fave: &str, alt: &str) -> PathBuf { - let p = root.join(fave); - if !p.exists() { - let alt = root.join(alt); - if alt.exists() { - return alt; - } - } - p -} - -/// Attempts to find a file with different case that exists next to the `non_existing` file -pub fn find_case_sensitive_existing_file(non_existing: &Path) -> Option { - let non_existing_file_name = non_existing.file_name()?; - let parent = non_existing.parent()?; - WalkDir::new(parent) - .max_depth(1) - .into_iter() - .filter_map(Result::ok) - .filter(|e| e.file_type().is_file()) - .find_map(|e| { - let existing_file_name = e.path().file_name()?; - if existing_file_name.eq_ignore_ascii_case(non_existing_file_name) - && existing_file_name != non_existing_file_name - { - return Some(e.path().to_path_buf()); - } - None - }) -} - -cfg_if! { - if #[cfg(any(feature = "async", feature = "svm-solc"))] { - use tokio::runtime::{Handle, Runtime}; - - #[derive(Debug)] - pub enum RuntimeOrHandle { - Runtime(Runtime), - Handle(Handle), - } - - impl Default for RuntimeOrHandle { - fn default() -> Self { - Self::new() - } - } - - impl RuntimeOrHandle { - pub fn new() -> Self { - match Handle::try_current() { - Ok(handle) => Self::Handle(handle), - Err(_) => Self::Runtime(Runtime::new().expect("Failed to start runtime")), - } - } - - pub fn block_on(&self, f: F) -> F::Output { - match &self { - Self::Runtime(runtime) => runtime.block_on(f), - Self::Handle(handle) => tokio::task::block_in_place(|| handle.block_on(f)), - } - } - } - } -} - -/// Creates a new named tempdir. -#[cfg(any(test, feature = "project-util", feature = "test-utils"))] -pub fn tempdir(name: &str) -> Result { - tempfile::Builder::new().prefix(name).tempdir().map_err(|err| SolcIoError::new(err, name)) -} - -/// Reads the json file and deserialize it into the provided type. -pub fn read_json_file(path: &Path) -> Result { - // See: https://github.com/serde-rs/json/issues/160 - let s = fs::read_to_string(path).map_err(|err| SolcError::io(err, path))?; - serde_json::from_str(&s).map_err(Into::into) -} - -/// Writes serializes the provided value to JSON and writes it to a file. -pub fn write_json_file( - value: &T, - path: &Path, - capacity: usize, -) -> Result<(), SolcError> { - let file = fs::File::create(path).map_err(|err| SolcError::io(err, path))?; - let mut writer = std::io::BufWriter::with_capacity(capacity, file); - serde_json::to_writer(&mut writer, value)?; - writer.flush().map_err(|e| SolcError::io(e, path)) -} - -/// Creates the parent directory of the `file` and all its ancestors if it does not exist. -/// -/// See [`fs::create_dir_all()`]. -pub fn create_parent_dir_all(file: &Path) -> Result<(), SolcError> { - if let Some(parent) = file.parent() { - fs::create_dir_all(parent).map_err(|err| { - SolcError::msg(format!( - "Failed to create artifact parent folder \"{}\": {}", - parent.display(), - err - )) - })?; - } - Ok(()) -} - -/// Given the regex and the target string, find all occurrences of named groups within the string. -/// -/// This method returns the tuple of matches `(a, b)` where `a` is the match for the entire regex -/// and `b` is the match for the first named group. -/// -/// NOTE: This method will return the match for the first named group, so the order of passed named -/// groups matters. -pub fn capture_outer_and_inner<'a>( - content: &'a str, - regex: ®ex::Regex, - names: &[&str], -) -> Vec<(regex::Match<'a>, regex::Match<'a>)> { - regex - .captures_iter(content) - .filter_map(|cap| { - let cap_match = names.iter().find_map(|name| cap.name(name)); - cap_match.and_then(|m| cap.get(0).map(|outer| (outer.to_owned(), m))) - }) - .collect() -} - -#[cfg(any(test, feature = "test-utils"))] -// -pub fn touch(path: &std::path::Path) -> std::io::Result<()> { - match std::fs::OpenOptions::new().create(true).write(true).truncate(false).open(path) { - Ok(_) => Ok(()), - Err(e) => Err(e), - } -} - -#[cfg(any(test, feature = "test-utils"))] -pub fn mkdir_or_touch(tmp: &std::path::Path, paths: &[&str]) { - for path in paths { - if let Some(parent) = Path::new(path).parent() { - std::fs::create_dir_all(tmp.join(parent)).unwrap(); - } - if path.ends_with(".sol") { - let path = tmp.join(path); - touch(&path).unwrap(); - } else { - let path: PathBuf = tmp.join(path); - std::fs::create_dir_all(path).unwrap(); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use std::fs::{create_dir_all, File}; - - #[test] - fn can_find_different_case() { - let tmp_dir = tempdir("out").unwrap(); - let path = tmp_dir.path().join("forge-std"); - create_dir_all(&path).unwrap(); - let existing = path.join("Test.sol"); - let non_existing = path.join("test.sol"); - fs::write(&existing, b"").unwrap(); - - #[cfg(target_os = "linux")] - assert!(!non_existing.exists()); - - let found = find_case_sensitive_existing_file(&non_existing).unwrap(); - assert_eq!(found, existing); - } - - #[test] - fn can_create_parent_dirs_with_ext() { - let tmp_dir = tempdir("out").unwrap(); - let path = tmp_dir.path().join("IsolationModeMagic.sol/IsolationModeMagic.json"); - create_parent_dir_all(&path).unwrap(); - assert!(path.parent().unwrap().is_dir()); - } - - #[test] - fn can_create_parent_dirs_versioned() { - let tmp_dir = tempdir("out").unwrap(); - let path = tmp_dir.path().join("IVersioned.sol/IVersioned.0.8.16.json"); - create_parent_dir_all(&path).unwrap(); - assert!(path.parent().unwrap().is_dir()); - let path = tmp_dir.path().join("IVersioned.sol/IVersioned.json"); - create_parent_dir_all(&path).unwrap(); - assert!(path.parent().unwrap().is_dir()); - } - - #[test] - fn can_determine_local_paths() { - assert!(is_local_source_name(&[""], "./local/contract.sol")); - assert!(is_local_source_name(&[""], "../local/contract.sol")); - assert!(!is_local_source_name(&[""], "/ds-test/test.sol")); - - let tmp_dir = tempdir("contracts").unwrap(); - let dir = tmp_dir.path().join("ds-test"); - create_dir_all(&dir).unwrap(); - File::create(dir.join("test.sol")).unwrap(); - - assert!(!is_local_source_name(&[tmp_dir.path()], "ds-test/test.sol")); - } - - #[test] - fn can_find_solidity_sources() { - let tmp_dir = tempdir("contracts").unwrap(); - - let file_a = tmp_dir.path().join("a.sol"); - let file_b = tmp_dir.path().join("a.sol"); - let nested = tmp_dir.path().join("nested"); - let file_c = nested.join("c.sol"); - let nested_deep = nested.join("deep"); - let file_d = nested_deep.join("d.sol"); - File::create(&file_a).unwrap(); - File::create(&file_b).unwrap(); - create_dir_all(nested_deep).unwrap(); - File::create(&file_c).unwrap(); - File::create(&file_d).unwrap(); - - let files: HashSet<_> = sol_source_files(tmp_dir.path()).into_iter().collect(); - let expected: HashSet<_> = [file_a, file_b, file_c, file_d].into(); - assert_eq!(files, expected); - } - - #[test] - fn can_parse_curly_bracket_imports() { - let s = - r#"import {ReentrancyGuard} from "@openzeppelin/contracts/utils/ReentrancyGuard.sol";"#; - let imports: Vec<_> = find_import_paths(s).map(|m| m.as_str()).collect(); - assert_eq!(imports, vec!["@openzeppelin/contracts/utils/ReentrancyGuard.sol"]) - } - - #[test] - fn can_find_single_quote_imports() { - let content = r" -// SPDX-License-Identifier: MIT -pragma solidity 0.8.6; - -import '@openzeppelin/contracts/access/Ownable.sol'; -import '@openzeppelin/contracts/utils/Address.sol'; - -import './../interfaces/IJBDirectory.sol'; -import './../libraries/JBTokens.sol'; - "; - let imports: Vec<_> = find_import_paths(content).map(|m| m.as_str()).collect(); - - assert_eq!( - imports, - vec![ - "@openzeppelin/contracts/access/Ownable.sol", - "@openzeppelin/contracts/utils/Address.sol", - "./../interfaces/IJBDirectory.sol", - "./../libraries/JBTokens.sol", - ] - ); - } - - #[test] - fn can_find_import_paths() { - let s = r#"//SPDX-License-Identifier: Unlicense -pragma solidity ^0.8.0; -import "hardhat/console.sol"; -import "../contract/Contract.sol"; -import { T } from "../Test.sol"; -import { T } from '../Test2.sol'; -"#; - assert_eq!( - vec!["hardhat/console.sol", "../contract/Contract.sol", "../Test.sol", "../Test2.sol"], - find_import_paths(s).map(|m| m.as_str()).collect::>() - ); - } - #[test] - fn can_find_version() { - let s = r"//SPDX-License-Identifier: Unlicense -pragma solidity ^0.8.0; -"; - assert_eq!(Some("^0.8.0"), find_version_pragma(s).map(|s| s.as_str())); - } - - #[test] - fn can_normalize_solidity_import_path() { - let dir = tempfile::tempdir().unwrap(); - let dir_path = dir.path(); - - // File structure: - // - // `dir_path` - // └── src (`cwd`) - // ├── Token.sol - // └── common - // └── Burnable.sol - - fs::create_dir_all(dir_path.join("src/common")).unwrap(); - fs::write(dir_path.join("src/Token.sol"), "").unwrap(); - fs::write(dir_path.join("src/common/Burnable.sol"), "").unwrap(); - - // assume that the import path is specified in Token.sol - let cwd = dir_path.join("src"); - - assert_eq!( - normalize_solidity_import_path(&cwd, "./common/Burnable.sol".as_ref()).unwrap(), - dir_path.join("src/common/Burnable.sol"), - ); - - assert!(normalize_solidity_import_path(&cwd, "./common/Pausable.sol".as_ref()).is_err()); - } - - // This test is exclusive to unix because creating a symlink is a privileged action on Windows. - // https://doc.rust-lang.org/std/os/windows/fs/fn.symlink_dir.html#limitations - #[test] - #[cfg(unix)] - fn can_normalize_solidity_import_path_symlink() { - let dir = tempfile::tempdir().unwrap(); - let dir_path = dir.path(); - - // File structure: - // - // `dir_path` - // ├── dependency - // │   └── Math.sol - // └── project - // ├── node_modules - // │   └── dependency -> symlink to actual 'dependency' directory - // └── src (`cwd`) - // └── Token.sol - - fs::create_dir_all(dir_path.join("project/src")).unwrap(); - fs::write(dir_path.join("project/src/Token.sol"), "").unwrap(); - fs::create_dir(dir_path.join("project/node_modules")).unwrap(); - - fs::create_dir(dir_path.join("dependency")).unwrap(); - fs::write(dir_path.join("dependency/Math.sol"), "").unwrap(); - - std::os::unix::fs::symlink( - dir_path.join("dependency"), - dir_path.join("project/node_modules/dependency"), - ) - .unwrap(); - - // assume that the import path is specified in Token.sol - let cwd = dir_path.join("project/src"); - - assert_eq!( - normalize_solidity_import_path(&cwd, "../node_modules/dependency/Math.sol".as_ref()) - .unwrap(), - dir_path.join("project/node_modules/dependency/Math.sol"), - ); - } - - #[test] - fn can_clean_solidity_path() { - let clean_solidity_path = |s: &str| clean_solidity_path(s.as_ref()); - assert_eq!(clean_solidity_path("a"), PathBuf::from("a")); - assert_eq!(clean_solidity_path("./a"), PathBuf::from("a")); - assert_eq!(clean_solidity_path("../a"), PathBuf::from("../a")); - assert_eq!(clean_solidity_path("/a/"), PathBuf::from("/a")); - assert_eq!(clean_solidity_path("//a"), PathBuf::from("/a")); - assert_eq!(clean_solidity_path("a/b"), PathBuf::from("a/b")); - assert_eq!(clean_solidity_path("a//b"), PathBuf::from("a/b")); - assert_eq!(clean_solidity_path("/a/b"), PathBuf::from("/a/b")); - assert_eq!(clean_solidity_path("a/./b"), PathBuf::from("a/b")); - assert_eq!(clean_solidity_path("a/././b"), PathBuf::from("a/b")); - assert_eq!(clean_solidity_path("/a/../b"), PathBuf::from("/b")); - assert_eq!(clean_solidity_path("a/./../b/."), PathBuf::from("b")); - assert_eq!(clean_solidity_path("a/b/c"), PathBuf::from("a/b/c")); - assert_eq!(clean_solidity_path("a/b/../c"), PathBuf::from("a/c")); - assert_eq!(clean_solidity_path("a/b/../../c"), PathBuf::from("c")); - assert_eq!(clean_solidity_path("a/b/../../../c"), PathBuf::from("../c")); - assert_eq!( - clean_solidity_path("a/../b/../../c/./Token.sol"), - PathBuf::from("../c/Token.sol") - ); - } - - #[test] - fn can_find_ancestor() { - let a = Path::new("/foo/bar/bar/test.txt"); - let b = Path::new("/foo/bar/foo/example/constract.sol"); - let expected = Path::new("/foo/bar"); - assert_eq!(common_ancestor(a, b).unwrap(), expected.to_path_buf()) - } - - #[test] - fn no_common_ancestor_path() { - let a = Path::new("/foo/bar"); - let b = Path::new("./bar/foo"); - assert!(common_ancestor(a, b).is_none()); - } - - #[test] - fn can_find_all_ancestor() { - let a = Path::new("/foo/bar/foo/example.txt"); - let b = Path::new("/foo/bar/foo/test.txt"); - let c = Path::new("/foo/bar/bar/foo/bar"); - let expected = Path::new("/foo/bar"); - let paths = vec![a, b, c]; - assert_eq!(common_ancestor_all(paths).unwrap(), expected.to_path_buf()) - } -} diff --git a/src/artifacts/mod.rs b/src/artifacts/mod.rs new file mode 100644 index 00000000..0156730a --- /dev/null +++ b/src/artifacts/mod.rs @@ -0,0 +1 @@ +pub mod zksolc; diff --git a/crates/artifacts/zksolc/src/contract.rs b/src/artifacts/zksolc/contract.rs similarity index 94% rename from crates/artifacts/zksolc/src/contract.rs rename to src/artifacts/zksolc/contract.rs index 39d58677..75035c9e 100644 --- a/crates/artifacts/zksolc/src/contract.rs +++ b/src/artifacts/zksolc/contract.rs @@ -1,5 +1,5 @@ //! Contract related types. -use crate::EraVM; +use crate::artifacts::zksolc::EraVM; use alloy_json_abi::JsonAbi; use foundry_compilers_artifacts_solc::{ Bytecode, BytecodeObject, CompactBytecode, CompactContractBytecode, CompactContractBytecodeCow, @@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize}; use std::{borrow::Cow, collections::BTreeMap}; /// Represents a compiled solidity contract -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct Contract { pub abi: Option, @@ -23,7 +23,7 @@ pub struct Contract { #[serde(default, skip_serializing_if = "Option::is_none")] pub ir_optimized: Option, /// The contract storage layout. - #[serde(default, skip_serializing_if = "StorageLayout::is_empty")] + #[serde(default, skip_serializing_if = "storage_layout_is_empty")] pub storage_layout: StorageLayout, /// The contract EraVM bytecode hash. #[serde(default, skip_serializing_if = "Option::is_none")] @@ -39,6 +39,10 @@ pub struct Contract { pub missing_libraries: Vec, } +fn storage_layout_is_empty(storage_layout: &StorageLayout) -> bool { + storage_layout.storage.is_empty() && storage_layout.types.is_empty() +} + impl Contract { pub fn is_unlinked(&self) -> bool { self.hash.is_none() || !self.missing_libraries.is_empty() diff --git a/crates/artifacts/zksolc/src/error.rs b/src/artifacts/zksolc/error.rs similarity index 100% rename from crates/artifacts/zksolc/src/error.rs rename to src/artifacts/zksolc/error.rs diff --git a/crates/artifacts/zksolc/src/lib.rs b/src/artifacts/zksolc/mod.rs similarity index 99% rename from crates/artifacts/zksolc/src/lib.rs rename to src/artifacts/zksolc/mod.rs index 0c868e74..4f7bd237 100644 --- a/crates/artifacts/zksolc/src/lib.rs +++ b/src/artifacts/zksolc/mod.rs @@ -18,7 +18,7 @@ use self::{contract::Contract, error::Error}; /// file -> (contract name -> Contract) pub type Contracts = FileToContractsMap; -/// Output type `solc` produces +/// Output type `zksolc` produces #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Default)] pub struct CompilerOutput { #[serde(default, skip_serializing_if = "Vec::is_empty")] diff --git a/crates/artifacts/zksolc/src/output_selection.rs b/src/artifacts/zksolc/output_selection.rs similarity index 100% rename from crates/artifacts/zksolc/src/output_selection.rs rename to src/artifacts/zksolc/output_selection.rs diff --git a/src/compilers/compilers/mod.rs b/src/compilers/compilers/mod.rs new file mode 100644 index 00000000..0156730a --- /dev/null +++ b/src/compilers/compilers/mod.rs @@ -0,0 +1 @@ +pub mod zksolc; diff --git a/crates/compilers/src/compilers/zksolc/input.rs b/src/compilers/compilers/zksolc/input.rs similarity index 99% rename from crates/compilers/src/compilers/zksolc/input.rs rename to src/compilers/compilers/zksolc/input.rs index 3d6f5861..3b96219e 100644 --- a/crates/compilers/src/compilers/zksolc/input.rs +++ b/src/compilers/compilers/zksolc/input.rs @@ -2,7 +2,7 @@ use super::{ settings::{ZkSolcError, ZkSolcSettings, ZkSolcWarning}, ZkSettings, }; -use crate::{ +use foundry_compilers::{ compilers::{solc::SolcLanguage, CompilerInput}, solc, }; @@ -14,6 +14,7 @@ use std::{ collections::HashSet, path::{Path, PathBuf}, }; +use tracing::warn; #[derive(Debug, Clone, Serialize)] pub struct ZkSolcVersionedInput { diff --git a/crates/compilers/src/compilers/zksolc/mod.rs b/src/compilers/compilers/zksolc/mod.rs similarity index 91% rename from crates/compilers/src/compilers/zksolc/mod.rs rename to src/compilers/compilers/zksolc/mod.rs index 3739d882..da94baa8 100644 --- a/crates/compilers/src/compilers/zksolc/mod.rs +++ b/src/compilers/compilers/zksolc/mod.rs @@ -1,14 +1,16 @@ use self::input::{ZkSolcInput, ZkSolcVersionedInput}; -use crate::{ +use crate::artifacts::zksolc::{ + contract::Contract, error::Error, CompilerOutput as ZkCompilerOutput, +}; +use alloy_json_abi::JsonAbi; +use foundry_compilers::{ error::{Result, SolcError}, resolver::parse::SolData, solc::SolcCompiler, - CompilationError, Compiler, CompilerVersion, + CompilationError, Compiler, CompilerContract, CompilerOutput, CompilerVersion, }; use foundry_compilers_artifacts::{ - solc::error::SourceLocation, - zksolc::{error::Error, CompilerOutput}, - Severity, SolcLanguage, + solc::error::SourceLocation, BytecodeObject, Severity, SolcLanguage, }; use itertools::Itertools; @@ -21,14 +23,13 @@ use std::{ str::FromStr, }; -#[cfg(feature = "async")] use std::{ fs::{self, create_dir_all, set_permissions, File}, io::Write, }; +use tracing::{debug, instrument, trace}; #[cfg(target_family = "unix")] -#[cfg(feature = "async")] use std::os::unix::fs::PermissionsExt; pub mod input; @@ -39,6 +40,42 @@ pub const ZKSOLC: &str = "zksolc"; pub const ZKSYNC_SOLC_RELEASE: Version = Version::new(1, 0, 1); pub const ZKSOLC_VERSION: Version = Version::new(1, 5, 7); +#[cfg(test)] +macro_rules! take_solc_installer_lock { + ($lock:ident) => { + let lock_path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join(".lock"); + let lock_file = std::fs::OpenOptions::new() + .read(true) + .write(true) + .create(true) + .truncate(false) + .open(lock_path) + .unwrap(); + let mut lock = fd_lock::RwLock::new(lock_file); + let $lock = lock.write().unwrap(); + }; +} + +impl CompilerContract for Contract { + fn abi_ref(&self) -> Option<&JsonAbi> { + self.abi.as_ref() + } + fn bin_ref(&self) -> Option<&BytecodeObject> { + if let Some(ref eravm) = self.eravm { + eravm.bytecode.as_ref() + } else { + None + } + } + fn bin_runtime_ref(&self) -> Option<&BytecodeObject> { + if let Some(ref eravm) = self.eravm { + eravm.bytecode.as_ref() + } else { + None + } + } +} + #[derive(Debug, Clone, Serialize)] enum ZkSolcOS { LinuxAMD64, @@ -99,19 +136,23 @@ impl Default for ZkSolcCompiler { impl Compiler for ZkSolcCompiler { type Input = ZkSolcVersionedInput; type CompilationError = Error; + type CompilerContract = Contract; type ParsedSource = SolData; type Settings = ZkSolcSettings; type Language = SolcLanguage; fn compile( &self, - _input: &Self::Input, - ) -> Result> { - // This method cannot be implemented until CompilerOutput is decoupled from - // evm Contract - panic!( - "`Compiler::compile` not supported for `ZkSolcCompiler`, should call ZkSolc::compile()" - ); + input: &Self::Input, + ) -> Result> { + let zksolc = self.zksolc(&input)?; + + let zk_output = zksolc.compile(&input.input)?; + Ok(CompilerOutput { + sources: zk_output.sources, + errors: zk_output.errors, + contracts: zk_output.contracts, + }) } // NOTE: This is used in the context of matching source files to compiler version so @@ -154,7 +195,7 @@ impl ZkSolcCompiler { SolcCompiler::Specific(solc) => Some(solc.solc.clone()), SolcCompiler::AutoDetect => { #[cfg(test)] - crate::take_solc_installer_lock!(_lock); + take_solc_installer_lock!(_lock); let solc_version_without_metadata = format!( "{}.{}.{}", @@ -165,7 +206,6 @@ impl ZkSolcCompiler { if let Some(solc) = maybe_solc { Some(solc) } else { - #[cfg(feature = "async")] { let installed_solc_path = ZkSolc::solc_blocking_install(&solc_version_without_metadata)?; @@ -295,13 +335,13 @@ impl ZkSolc { } /// Compiles with `--standard-json` and deserializes the output as [`CompilerOutput`]. - pub fn compile(&self, input: &ZkSolcInput) -> Result { + pub fn compile(&self, input: &ZkSolcInput) -> Result { // If solc is zksync solc, override the returned version to put the complete zksolc one let output = self.compile_output(input)?; // Only run UTF-8 validation once. let output = std::str::from_utf8(&output).map_err(|_| SolcError::InvalidUtf8)?; - let mut compiler_output: CompilerOutput = serde_json::from_str(output)?; + let mut compiler_output: ZkCompilerOutput = serde_json::from_str(output)?; // Add zksync version so that there's some way to identify if zksync solc was used // by looking at build info @@ -415,7 +455,6 @@ impl ZkSolc { } /// Install zksolc version and block the thread - #[cfg(feature = "async")] pub fn blocking_install(version: &Version) -> Result { let os = get_operating_system()?; let compiler_prefix = os.get_zksolc_prefix(); @@ -457,7 +496,6 @@ impl ZkSolc { } /// Install zksync solc version and block the thread - #[cfg(feature = "async")] pub fn solc_blocking_install(version_str: &str) -> Result { let os = get_operating_system()?; let solc_os_namespace = os.get_solc_prefix(); @@ -556,7 +594,6 @@ impl CompilationError for Error { } } -#[cfg(feature = "async")] fn compiler_blocking_install( compiler_path: PathBuf, lock_path: PathBuf, @@ -622,7 +659,6 @@ fn compiler_blocking_install( } /// Creates the file and locks it exclusively, this will block if the file is currently locked -#[cfg(feature = "async")] fn try_lock_file(lock_path: PathBuf) -> Result { use fs4::FileExt; let _lock_file = std::fs::OpenOptions::new() @@ -637,13 +673,11 @@ fn try_lock_file(lock_path: PathBuf) -> Result { } /// Represents a lockfile that's removed once dropped -#[cfg(feature = "async")] struct LockFile { _lock_file: File, lock_path: PathBuf, } -#[cfg(feature = "async")] impl Drop for LockFile { fn drop(&mut self) { let _ = fs::remove_file(&self.lock_path); @@ -651,7 +685,6 @@ impl Drop for LockFile { } /// Returns the lockfile to use for a specific file -#[cfg(feature = "async")] fn lock_file_path(compiler: &str, version: &str) -> PathBuf { ZkSolc::compilers_dir() .expect("could not detect zksolc compilers directory") @@ -662,7 +695,7 @@ fn lock_file_path(compiler: &str, version: &str) -> PathBuf { mod tests { use similar_asserts::assert_eq; - use crate::solc::Solc; + use foundry_compilers::solc::Solc; use super::*; @@ -670,7 +703,7 @@ mod tests { let zksolc_path = ZkSolc::get_path_for_version(&ZKSOLC_VERSION).unwrap(); let solc_version = "0.8.27"; - crate::take_solc_installer_lock!(_lock); + take_solc_installer_lock!(_lock); let maybe_solc = ZkSolc::find_solc_installed_version(solc_version).unwrap(); let solc_path = if let Some(solc) = maybe_solc { solc @@ -714,7 +747,7 @@ mod tests { #[test] fn zksolc_compile_works() { - let input = include_str!("../../../../../test-data/zksync/in/compiler-in-1.json"); + let input = include_str!("../../../../test-data/zksync/in/compiler-in-1.json"); let input: ZkSolcInput = serde_json::from_str(input).unwrap(); let out = zksolc().compile(&input).unwrap(); assert!(!out.has_error()); @@ -723,7 +756,7 @@ mod tests { #[test] fn zksolc_can_compile_with_remapped_links() { let input: ZkSolcInput = serde_json::from_str(include_str!( - "../../../../../test-data/zksync/library-remapping-in.json" + "../../../../test-data/zksync/library-remapping-in.json" )) .unwrap(); let out = zksolc().compile(&input).unwrap(); @@ -736,7 +769,7 @@ mod tests { #[test] fn zksolc_can_compile_with_remapped_links_temp_dir() { let input: ZkSolcInput = serde_json::from_str(include_str!( - "../../../../../test-data/zksync/library-remapping-in-2.json" + "../../../../test-data/zksync/library-remapping-in-2.json" )) .unwrap(); let out = zksolc().compile(&input).unwrap(); diff --git a/crates/compilers/src/compilers/zksolc/settings.rs b/src/compilers/compilers/zksolc/settings.rs similarity index 98% rename from crates/compilers/src/compilers/zksolc/settings.rs rename to src/compilers/compilers/zksolc/settings.rs index 76c228c5..52b82160 100644 --- a/crates/compilers/src/compilers/zksolc/settings.rs +++ b/src/compilers/compilers/zksolc/settings.rs @@ -1,11 +1,10 @@ -use crate::{ +use crate::artifacts::zksolc::output_selection::OutputSelection as ZkOutputSelection; +use foundry_compilers::{ artifacts::{serde_helpers, EvmVersion, Libraries}, compilers::CompilerSettings, - solc, OutputSelection, -}; -use foundry_compilers_artifacts::{ - remappings::Remapping, zksolc::output_selection::OutputSelection as ZkOutputSelection, + solc, }; +use foundry_compilers_artifacts::{remappings::Remapping, solc::output_selection::OutputSelection}; use semver::Version; use serde::{Deserialize, Serialize}; use std::{ diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs new file mode 100644 index 00000000..ab2d1208 --- /dev/null +++ b/src/compilers/mod.rs @@ -0,0 +1,2 @@ +pub mod compilers; +pub mod zksync; diff --git a/crates/compilers/tests/mocked.rs b/src/compilers/tests/mocked.rs similarity index 100% rename from crates/compilers/tests/mocked.rs rename to src/compilers/tests/mocked.rs diff --git a/crates/compilers/tests/project.rs b/src/compilers/tests/project.rs similarity index 100% rename from crates/compilers/tests/project.rs rename to src/compilers/tests/project.rs diff --git a/crates/compilers/tests/zksync.rs b/src/compilers/tests/zksync.rs similarity index 100% rename from crates/compilers/tests/zksync.rs rename to src/compilers/tests/zksync.rs diff --git a/crates/compilers/src/zksync/artifact_output/mod.rs b/src/compilers/zksync/artifact_output/mod.rs similarity index 100% rename from crates/compilers/src/zksync/artifact_output/mod.rs rename to src/compilers/zksync/artifact_output/mod.rs diff --git a/src/compilers/zksync/artifact_output/zk.rs b/src/compilers/zksync/artifact_output/zk.rs new file mode 100644 index 00000000..66a3c776 --- /dev/null +++ b/src/compilers/zksync/artifact_output/zk.rs @@ -0,0 +1,146 @@ +use crate::artifacts::zksolc::contract::Contract; +use alloy_json_abi::JsonAbi; +use foundry_compilers::{ + artifacts::{DevDoc, SourceFile, StorageLayout, UserDoc}, + sources::VersionedSourceFile, + ArtifactOutput, +}; +use foundry_compilers_artifacts::solc::{ + CompactBytecode, CompactContract, CompactContractBytecode, CompactContractBytecodeCow, + CompactDeployedBytecode, +}; +use serde::{Deserialize, Serialize}; +use std::{borrow::Cow, collections::BTreeMap, path::Path}; + +mod bytecode; +pub use bytecode::ZkArtifactBytecode; + +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct ZkContractArtifact { + pub abi: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub bytecode: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub assembly: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub metadata: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub storage_layout: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub userdoc: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub devdoc: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub ir_optimized: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub hash: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub factory_dependencies: Option>, + /// The identifier of the source file + #[serde(default, skip_serializing_if = "Option::is_none")] + pub id: Option, +} + +impl ZkContractArtifact { + pub fn missing_libraries(&self) -> Option<&Vec> { + self.bytecode.as_ref().map(|bc| &bc.missing_libraries) + } +} + +// CompactContract variants +// TODO: for zkEvm, the distinction between bytecode and deployed_bytecode makes little sense, +// and there some fields that the ouptut doesn't provide (e.g: source_map) +// However, we implement these because we get the Artifact trait and can reuse lots of +// the crate's helpers without needing to duplicate everything. Maybe there's a way +// we can get all these without having to add the same bytecode twice on each struct. +// Ideally the Artifacts trait would not be coupled to a specific Contract type +impl<'a> From<&'a ZkContractArtifact> for CompactContractBytecodeCow<'a> { + fn from(artifact: &'a ZkContractArtifact) -> Self { + // TODO: artifact.abi might have None, we need to get this field from solc_metadata + CompactContractBytecodeCow { + abi: artifact.abi.as_ref().map(Cow::Borrowed), + bytecode: artifact.bytecode.clone().map(|b| Cow::Owned(CompactBytecode::from(b))), + deployed_bytecode: artifact + .bytecode + .clone() + .map(|b| Cow::Owned(CompactDeployedBytecode::from(b))), + } + } +} + +impl From for CompactContractBytecode { + fn from(c: ZkContractArtifact) -> Self { + Self { + abi: c.abi.map(Into::into), + deployed_bytecode: c.bytecode.clone().map(|b| b.into()), + bytecode: c.bytecode.clone().map(|b| b.into()), + } + } +} + +impl From for CompactContract { + fn from(c: ZkContractArtifact) -> Self { + // TODO: c.abi might have None, we need to get this field from solc_metadata + Self { + bin: c.bytecode.clone().map(|b| b.object), + bin_runtime: c.bytecode.clone().map(|b| b.object), + abi: c.abi, + } + } +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Default)] +pub struct ZkArtifactOutput(); + +impl ArtifactOutput for ZkArtifactOutput { + type Artifact = ZkContractArtifact; + type CompilerContract = Contract; + + fn contract_to_artifact( + &self, + _file: &Path, + _name: &str, + contract: Self::CompilerContract, + source_file: Option<&SourceFile>, + ) -> Self::Artifact { + let Contract { + abi, + metadata, + userdoc, + devdoc, + storage_layout, + eravm, + ir_optimized, + hash, + factory_dependencies, + missing_libraries, + } = contract; + + let (bytecode, assembly) = + eravm.map(|eravm| (eravm.bytecode, eravm.assembly)).unwrap_or_else(|| (None, None)); + let bytecode = bytecode.map(|object| ZkArtifactBytecode { object, missing_libraries }); + + ZkContractArtifact { + abi, + hash, + factory_dependencies, + storage_layout: Some(storage_layout), + bytecode, + assembly, + metadata, + userdoc: Some(userdoc), + devdoc: Some(devdoc), + ir_optimized, + id: source_file.as_ref().map(|s| s.id), + } + } + + fn standalone_source_file_to_artifact( + &self, + _path: &Path, + _file: &VersionedSourceFile, + ) -> Option { + None + } +} diff --git a/crates/compilers/src/zksync/artifact_output/zk/bytecode.rs b/src/compilers/zksync/artifact_output/zk/bytecode.rs similarity index 90% rename from crates/compilers/src/zksync/artifact_output/zk/bytecode.rs rename to src/compilers/zksync/artifact_output/zk/bytecode.rs index 852c674c..c2b18065 100644 --- a/crates/compilers/src/zksync/artifact_output/zk/bytecode.rs +++ b/src/compilers/zksync/artifact_output/zk/bytecode.rs @@ -1,7 +1,8 @@ use std::collections::BTreeMap; +use crate::artifacts::zksolc::contract::Contract; use foundry_compilers_artifacts::{ - zksolc::contract::Contract, BytecodeObject, CompactBytecode, CompactDeployedBytecode, Offsets, + BytecodeObject, CompactBytecode, CompactDeployedBytecode, Offsets, }; use serde::{Deserialize, Serialize}; diff --git a/src/compilers/zksync/mod.rs b/src/compilers/zksync/mod.rs new file mode 100644 index 00000000..cc928097 --- /dev/null +++ b/src/compilers/zksync/mod.rs @@ -0,0 +1 @@ +pub mod artifact_output; diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 00000000..2af8d8a0 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,2 @@ +pub mod artifacts; +pub mod compilers; diff --git a/test-data/zksync/yul-sample/SimpleStore.yul b/test-data/zksync/yul-sample/SimpleStore.yul deleted file mode 100644 index cef5d9bc..00000000 --- a/test-data/zksync/yul-sample/SimpleStore.yul +++ /dev/null @@ -1,11 +0,0 @@ -object "SimpleStore" { - code { - datacopy(0, dataoffset("SimpleStore_deployed"), datasize("SimpleStore_deployed")) - return(0, datasize("SimpleStore_deployed")) - } - object "SimpleStore_deployed" { - code { - calldatacopy(0, 0, 36) // write calldata to memory - } - } -} diff --git a/test-data/zksync/yul-sample/SimpleStore.yul.json b/test-data/zksync/yul-sample/SimpleStore.yul.json new file mode 100644 index 00000000..efe0e56e --- /dev/null +++ b/test-data/zksync/yul-sample/SimpleStore.yul.json @@ -0,0 +1 @@ +{"abi":null,"bytecode":{"object":"00000001002001900000000c0000c13d000000002101043c000000000010043f000000200100043d0000000701100197000000000202043b0000000802200197000000000112019f000000200010043f0000000001000019000000000001042d000000240000043f0000002001000039000001000010044300000120000004430000000601000041000000130001042e0000001200000432000000130001042e0000001400010430000000000000000000000000000000000000000000000000000000020000000000000000000000000000004000000100000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007398c50ed69a6e38b1620032291e0aa1c6263ff4cba1019dacb3863c23a72d6d"},"methodIdentifiers":{},"storageLayout":{"storage":[],"types":{}},"userdoc":{},"devdoc":{},"hash":"0100000b1f6d7fad6174398c18c808e0bf41101c231ce71955dd9a823462ed8e","factoryDependencies":{},"id":0} \ No newline at end of file From e2219d5a7473001ac58c42e4adbfefd389c414a1 Mon Sep 17 00:00:00 2001 From: elfedy Date: Sat, 16 Nov 2024 13:47:38 -0300 Subject: [PATCH 2/2] Make tests work + impl placeholder restrictions --- Cargo.toml | 17 +- src/compilers/compilers/zksolc/mod.rs | 21 +- src/compilers/compilers/zksolc/settings.rs | 18 +- src/compilers/tests/mocked.rs | 132 - src/compilers/tests/project.rs | 4026 ----------------- test-data/zksync/yul-sample/SimpleStore.yul | 11 + .../tests/zksync.rs => tests/zksync_test.rs | 132 +- 7 files changed, 124 insertions(+), 4233 deletions(-) delete mode 100644 src/compilers/tests/mocked.rs delete mode 100644 src/compilers/tests/project.rs create mode 100644 test-data/zksync/yul-sample/SimpleStore.yul rename src/compilers/tests/zksync.rs => tests/zksync_test.rs (81%) diff --git a/Cargo.toml b/Cargo.toml index e9c520bd..7d1cdaca 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,10 +28,10 @@ unused-must-use = "deny" all = "warn" [dependencies] -foundry-compilers = { path = "../foundry-upstream/compilers/crates/compilers", features = ["svm-solc"] } -foundry-compilers-artifacts = { path = "../foundry-upstream/compilers/crates/artifacts/artifacts", version = "0.11.6" } -foundry-compilers-artifacts-solc = { path = "../foundry-upstream/compilers/crates/artifacts/solc", version = "0.11.6" } -foundry-compilers-core = { path = "../foundry-upstream/compilers/crates/core", version = "0.11.6" } +foundry-compilers = { git = "https://github.com/elfedy/compilers.git", branch = "elfedy-compiler-output", features = ["svm-solc"] } +foundry-compilers-artifacts = { git = "https://github.com/elfedy/compilers.git", branch = "elfedy-compiler-output" } +foundry-compilers-artifacts-solc = { git = "https://github.com/elfedy/compilers.git", branch = "elfedy-compiler-output" } +foundry-compilers-core = { git = "https://github.com/elfedy/compilers.git", branch = "elfedy-compiler-output" } alloy-json-abi = { version = "0.8", features = ["serde_json"] } alloy-primitives = { version = "0.8", features = ["serde", "rand"] } @@ -70,7 +70,14 @@ reqwest = "0.12.9" fd-lock = "4.0.2" [features] - +# TODO: see how tests are run without this as a default feature +default = ["project-util"] project-util = [ "foundry-compilers-core/project-util", + "foundry-compilers/project-util", ] + +[[test]] +name = "zksync_test" +path = "tests/zksync_test.rs" +required-features = ["project-util"] diff --git a/src/compilers/compilers/zksolc/mod.rs b/src/compilers/compilers/zksolc/mod.rs index da94baa8..792a8b73 100644 --- a/src/compilers/compilers/zksolc/mod.rs +++ b/src/compilers/compilers/zksolc/mod.rs @@ -17,7 +17,7 @@ use itertools::Itertools; use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::BTreeSet, + collections::{BTreeMap, BTreeSet}, path::{Path, PathBuf}, process::{Command, Output, Stdio}, str::FromStr, @@ -145,13 +145,28 @@ impl Compiler for ZkSolcCompiler { &self, input: &Self::Input, ) -> Result> { - let zksolc = self.zksolc(&input)?; + let zksolc = self.zksolc(input)?; + + let mut zk_output = zksolc.compile(&input.input)?; + let mut metadata = BTreeMap::new(); + if let Some(solc_version) = zk_output.version.take() { + metadata.insert("solcVersion".to_string(), solc_version); + } + if let Some(solc_long_version) = zk_output.long_version.take() { + metadata.insert("solcLongVersion".to_string(), solc_long_version); + } + if let Some(zk_version) = zk_output.zk_version.take() { + metadata.insert("zksolcVersion".to_string(), zk_version); + } + if let Some(zksync_solc_version) = zk_output.zksync_solc_version { + metadata.insert("zksyncSolcVersion".to_string(), zksync_solc_version.to_string()); + } - let zk_output = zksolc.compile(&input.input)?; Ok(CompilerOutput { sources: zk_output.sources, errors: zk_output.errors, contracts: zk_output.contracts, + metadata, }) } diff --git a/src/compilers/compilers/zksolc/settings.rs b/src/compilers/compilers/zksolc/settings.rs index 52b82160..23a7734d 100644 --- a/src/compilers/compilers/zksolc/settings.rs +++ b/src/compilers/compilers/zksolc/settings.rs @@ -2,7 +2,7 @@ use crate::artifacts::zksolc::output_selection::OutputSelection as ZkOutputSelec use foundry_compilers::{ artifacts::{serde_helpers, EvmVersion, Libraries}, compilers::CompilerSettings, - solc, + solc, CompilerSettingsRestrictions, }; use foundry_compilers_artifacts::{remappings::Remapping, solc::output_selection::OutputSelection}; use semver::Version; @@ -209,7 +209,18 @@ impl Default for ZkSettings { } } +#[derive(Debug, Clone, Copy, Default)] +pub struct ZkSolcRestrictions(); + +impl CompilerSettingsRestrictions for ZkSolcRestrictions { + fn merge(self, _other: Self) -> Option { + None + } +} + impl CompilerSettings for ZkSolcSettings { + type Restrictions = ZkSolcRestrictions; + fn update_output_selection(&mut self, _f: impl FnOnce(&mut OutputSelection) + Copy) { // TODO: see how to support this, noop for now //f(&mut self.output_selection) @@ -273,6 +284,11 @@ impl CompilerSettings for ZkSolcSettings { self.cli_settings.include_paths.clone_from(include_paths); self } + + fn satisfies_restrictions(&self, _restrictions: &Self::Restrictions) -> bool { + // TODO + true + } } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] diff --git a/src/compilers/tests/mocked.rs b/src/compilers/tests/mocked.rs deleted file mode 100644 index 34cf8027..00000000 --- a/src/compilers/tests/mocked.rs +++ /dev/null @@ -1,132 +0,0 @@ -//! mocked project tests - -use foundry_compilers::{ - compilers::multi::MultiCompiler, - project_util::{ - mock::{MockProjectGenerator, MockProjectSettings, MockProjectSkeleton}, - TempProject, - }, -}; -use foundry_compilers_core::error::Result; - -// default version to use -const DEFAULT_VERSION: &str = "^0.8.10"; - -struct MockSettings { - settings: MockProjectSettings, - version: &'static str, -} - -impl From for MockSettings { - fn from(settings: MockProjectSettings) -> Self { - Self { settings, version: DEFAULT_VERSION } - } -} -impl From<(MockProjectSettings, &'static str)> for MockSettings { - fn from(input: (MockProjectSettings, &'static str)) -> Self { - Self { settings: input.0, version: input.1 } - } -} - -/// Helper function to run a test and report the used generator if the closure failed. -fn run_mock( - settings: impl Into, - f: impl FnOnce(&mut TempProject, &MockProjectGenerator) -> Result<()>, -) -> TempProject { - let MockSettings { settings, version } = settings.into(); - let gen = MockProjectGenerator::new(&settings); - let mut project = TempProject::dapptools().unwrap(); - let remappings = gen.remappings_at(project.root()); - project.paths_mut().remappings.extend(remappings); - project.mock(&gen, version).unwrap(); - - if let Err(err) = f(&mut project, &gen) { - panic!( - "mock failed: `{}` with mock settings:\n {}", - err, - serde_json::to_string(&gen).unwrap() - ); - } - - project -} - -/// Runs a basic set of tests for the given settings -fn run_basic(settings: impl Into) { - let settings = settings.into(); - let version = settings.version; - run_mock(settings, |project, _| { - project.ensure_no_errors_recompile_unchanged()?; - project.add_basic_source("Dummy", version)?; - project.ensure_changed()?; - Ok(()) - }); -} - -#[test] -fn can_compile_mocked_random() { - run_basic(MockProjectSettings::random()); -} - -// compile a bunch of random projects -#[test] -fn can_compile_mocked_multi() { - for _ in 0..10 { - run_basic(MockProjectSettings::random()); - } -} - -#[test] -fn can_compile_mocked_large() { - run_basic(MockProjectSettings::large()) -} - -#[test] -fn can_compile_mocked_modified() { - run_mock(MockProjectSettings::random(), |project, gen| { - project.ensure_no_errors_recompile_unchanged()?; - // modify a random file - gen.modify_file(gen.used_file_ids().count() / 2, project.paths(), DEFAULT_VERSION)?; - project.ensure_changed()?; - project.artifacts_snapshot()?.assert_artifacts_essentials_present(); - Ok(()) - }); -} - -#[test] -fn can_compile_mocked_modified_all() { - run_mock(MockProjectSettings::random(), |project, gen| { - project.ensure_no_errors_recompile_unchanged()?; - // modify a random file - for id in gen.used_file_ids() { - gen.modify_file(id, project.paths(), DEFAULT_VERSION)?; - project.ensure_changed()?; - project.artifacts_snapshot()?.assert_artifacts_essentials_present(); - } - Ok(()) - }); -} - -// a test useful to manually debug a serialized skeleton -#[test] -fn can_compile_skeleton() { - let mut project = TempProject::::dapptools().unwrap(); - let s = r#"{"files":[{"id":0,"name":"SourceFile0","imports":[{"External":[0,1]},{"External":[3,4]}],"lib_id":null,"emit_artifacts":true},{"id":1,"name":"SourceFile1","imports":[],"lib_id":0,"emit_artifacts":true},{"id":2,"name":"SourceFile2","imports":[],"lib_id":1,"emit_artifacts":true},{"id":3,"name":"SourceFile3","imports":[],"lib_id":2,"emit_artifacts":true},{"id":4,"name":"SourceFile4","imports":[],"lib_id":3,"emit_artifacts":true}],"libraries":[{"name":"Lib0","id":0,"offset":1,"num_files":1},{"name":"Lib1","id":1,"offset":2,"num_files":1},{"name":"Lib2","id":2,"offset":3,"num_files":1},{"name":"Lib3","id":3,"offset":4,"num_files":1}]}"#; - let gen: MockProjectGenerator = serde_json::from_str::(s).unwrap().into(); - let remappings = gen.remappings_at(project.root()); - project.paths_mut().remappings.extend(remappings); - project.mock(&gen, DEFAULT_VERSION).unwrap(); - - // mattsse: helper to show what's being generated - // gen.write_to(&foundry_compilers::ProjectPathsConfig::dapptools("./skeleton").unwrap(), - // DEFAULT_VERSION).unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - for id in gen.used_file_ids() { - gen.modify_file(id, project.paths(), DEFAULT_VERSION).unwrap(); - project.ensure_changed().unwrap(); - project.artifacts_snapshot().unwrap().assert_artifacts_essentials_present(); - } -} diff --git a/src/compilers/tests/project.rs b/src/compilers/tests/project.rs deleted file mode 100644 index 7a89c463..00000000 --- a/src/compilers/tests/project.rs +++ /dev/null @@ -1,4026 +0,0 @@ -//! project tests - -use alloy_primitives::{Address, Bytes}; -use foundry_compilers::{ - buildinfo::BuildInfo, - cache::{CompilerCache, SOLIDITY_FILES_CACHE_FILENAME}, - compilers::{ - multi::{ - MultiCompiler, MultiCompilerLanguage, MultiCompilerParsedSource, MultiCompilerSettings, - }, - solc::{Solc, SolcCompiler, SolcLanguage}, - vyper::{Vyper, VyperLanguage, VyperSettings}, - CompilerOutput, - }, - flatten::Flattener, - info::ContractInfo, - project_util::*, - solc::SolcSettings, - take_solc_installer_lock, Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, - ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, TestFileFilter, -}; -use foundry_compilers_artifacts::{ - output_selection::OutputSelection, remappings::Remapping, BytecodeHash, DevDoc, Error, - ErrorDoc, EventDoc, Libraries, MethodDoc, ModelCheckerEngine::CHC, ModelCheckerSettings, - Settings, Severity, SolcInput, UserDoc, UserDocNotice, -}; -use foundry_compilers_core::{ - error::SolcError, - utils::{self, canonicalize, RuntimeOrHandle}, -}; -use once_cell::sync::Lazy; -use semver::Version; -use similar_asserts::assert_eq; -use std::{ - collections::{BTreeMap, BTreeSet, HashMap, HashSet}, - fs::{self}, - io, - path::{Path, PathBuf, MAIN_SEPARATOR}, - str::FromStr, -}; -use svm::{platform, Platform}; - -pub static VYPER: Lazy = Lazy::new(|| { - RuntimeOrHandle::new().block_on(async { - #[cfg(target_family = "unix")] - use std::{fs::Permissions, os::unix::fs::PermissionsExt}; - - take_solc_installer_lock!(_lock); - let path = std::env::temp_dir().join("vyper"); - - if path.exists() { - return Vyper::new(&path).unwrap(); - } - - let url = match platform() { - Platform::MacOsAarch64 => "https://github.com/vyperlang/vyper/releases/download/v0.3.10/vyper.0.3.10+commit.91361694.darwin", - Platform::LinuxAmd64 => "https://github.com/vyperlang/vyper/releases/download/v0.3.10/vyper.0.3.10+commit.91361694.linux", - Platform::WindowsAmd64 => "https://github.com/vyperlang/vyper/releases/download/v0.3.10/vyper.0.3.10+commit.91361694.windows.exe", - _ => panic!("unsupported") - }; - - let res = reqwest::Client::builder().build().unwrap().get(url).send().await.unwrap(); - - assert!(res.status().is_success()); - - let bytes = res.bytes().await.unwrap(); - - std::fs::write(&path, bytes).unwrap(); - - #[cfg(target_family = "unix")] - std::fs::set_permissions(&path, Permissions::from_mode(0o755)).unwrap(); - - Vyper::new(&path).unwrap() - }) -}); - -#[test] -fn can_get_versioned_linkrefs() { - let root = - Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/test-versioned-linkrefs"); - let paths = ProjectPathsConfig::builder() - .sources(root.join("src")) - .lib(root.join("lib")) - .build() - .unwrap(); - - let project = Project::builder() - .paths(paths) - .ephemeral() - .no_artifacts() - .build(Default::default()) - .unwrap(); - project.compile().unwrap().assert_success(); -} - -#[test] -fn can_compile_hardhat_sample() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample"); - let paths = ProjectPathsConfig::builder() - .sources(root.join("contracts")) - .lib(root.join("node_modules")); - let project = TempProject::::new(paths).unwrap(); - - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Greeter").is_some()); - assert!(compiled.find_first("console").is_some()); - compiled.assert_success(); - - // nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Greeter").is_some()); - assert!(compiled.find_first("console").is_some()); - assert!(compiled.is_unchanged()); - - // delete artifacts - std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Greeter").is_some()); - assert!(compiled.find_first("console").is_some()); - assert!(!compiled.is_unchanged()); -} - -#[test] -fn can_compile_dapp_sample() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); - - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - compiled.assert_success(); - - // nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - assert!(compiled.is_unchanged()); - - let cache = CompilerCache::::read(project.cache_path()).unwrap(); - - // delete artifacts - std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - assert!(!compiled.is_unchanged()); - - let updated_cache = CompilerCache::::read(project.cache_path()).unwrap(); - assert_eq!(cache, updated_cache); -} - -#[test] -fn can_compile_yul_sample() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/yul-sample"); - let paths = ProjectPathsConfig::builder().sources(root); - let project = TempProject::::new(paths).unwrap(); - - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - assert!(compiled.find_first("SimpleStore").is_some()); - compiled.assert_success(); - - // nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - assert!(compiled.find_first("SimpleStore").is_some()); - assert!(compiled.is_unchanged()); - - let cache = CompilerCache::::read(project.cache_path()).unwrap(); - - // delete artifacts - std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - assert!(compiled.find_first("SimpleStore").is_some()); - assert!(!compiled.is_unchanged()); - - let updated_cache = CompilerCache::::read(project.cache_path()).unwrap(); - assert_eq!(cache, updated_cache); -} - -#[test] -fn can_compile_configured() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - - let handler = ConfigurableArtifacts { - additional_values: ExtraOutputValues { - metadata: true, - ir: true, - ir_optimized: true, - opcodes: true, - legacy_assembly: true, - ..Default::default() - }, - ..Default::default() - }; - - let settings = handler.solc_settings(); - let project = TempProject::with_artifacts(paths, handler).unwrap().with_solc_settings(settings); - let compiled = project.compile().unwrap(); - let artifact = compiled.find_first("Dapp").unwrap(); - assert!(artifact.metadata.is_some()); - assert!(artifact.raw_metadata.is_some()); - assert!(artifact.ir.is_some()); - assert!(artifact.ir_optimized.is_some()); - assert!(artifact.opcodes.is_some()); - assert!(artifact.opcodes.is_some()); - assert!(artifact.legacy_assembly.is_some()); -} - -#[test] -fn can_compile_dapp_detect_changes_in_libs() { - let mut project = TempProject::::dapptools().unwrap(); - - let remapping = project.paths().libraries[0].join("remapping"); - project - .paths_mut() - .remappings - .push(Remapping::from_str(&format!("remapping/={}/", remapping.display())).unwrap()); - - let src = project - .add_source( - "Foo", - r#" - pragma solidity ^0.8.10; - import "remapping/Bar.sol"; - - contract Foo {} - "#, - ) - .unwrap(); - - let lib = project - .add_lib( - "remapping/Bar", - r" - pragma solidity ^0.8.10; - - contract Bar {} - ", - ) - .unwrap(); - - let graph = Graph::::resolve(project.paths()).unwrap(); - assert_eq!(graph.files().len(), 2); - assert_eq!(graph.files().clone(), HashMap::from([(src, 0), (lib, 1),])); - - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Foo").is_some()); - assert!(compiled.find_first("Bar").is_some()); - compiled.assert_success(); - - // nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Foo").is_some()); - assert!(compiled.is_unchanged()); - - let cache = CompilerCache::::read(&project.paths().cache).unwrap(); - assert_eq!(cache.files.len(), 2); - - // overwrite lib - project - .add_lib( - "remapping/Bar", - r" - pragma solidity ^0.8.10; - - // changed lib - contract Bar {} - ", - ) - .unwrap(); - - let graph = Graph::::resolve(project.paths()).unwrap(); - assert_eq!(graph.files().len(), 2); - - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Foo").is_some()); - assert!(compiled.find_first("Bar").is_some()); - // ensure change is detected - assert!(!compiled.is_unchanged()); -} - -#[test] -fn can_compile_dapp_detect_changes_in_sources() { - let project = TempProject::::dapptools().unwrap(); - - let src = project - .add_source( - "DssSpell.t", - r#" - pragma solidity ^0.8.10; - import "./DssSpell.t.base.sol"; - - contract DssSpellTest is DssSpellTestBase { } - "#, - ) - .unwrap(); - - let base = project - .add_source( - "DssSpell.t.base", - r" - pragma solidity ^0.8.10; - - contract DssSpellTestBase { - address deployed_spell; - function setUp() public { - deployed_spell = address(0xA867399B43aF7790aC800f2fF3Fa7387dc52Ec5E); - } - } - ", - ) - .unwrap(); - - let graph = Graph::::resolve(project.paths()).unwrap(); - assert_eq!(graph.files().len(), 2); - assert_eq!(graph.files().clone(), HashMap::from([(base, 0), (src, 1),])); - assert_eq!(graph.imported_nodes(1).to_vec(), vec![0]); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("DssSpellTest").is_some()); - assert!(compiled.find_first("DssSpellTestBase").is_some()); - - // nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.is_unchanged()); - assert!(compiled.find_first("DssSpellTest").is_some()); - assert!(compiled.find_first("DssSpellTestBase").is_some()); - - let cache = CompilerCache::::read(&project.paths().cache).unwrap(); - assert_eq!(cache.files.len(), 2); - - let artifacts = compiled.into_artifacts().collect::>(); - - // overwrite import - let _ = project - .add_source( - "DssSpell.t.base", - r" - pragma solidity ^0.8.10; - - contract DssSpellTestBase { - address deployed_spell; - function setUp() public { - deployed_spell = address(0); - } - } - ", - ) - .unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); - assert_eq!(graph.files().len(), 2); - - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("DssSpellTest").is_some()); - assert!(compiled.find_first("DssSpellTestBase").is_some()); - // ensure change is detected - assert!(!compiled.is_unchanged()); - - // and all recompiled artifacts are different - for (p, artifact) in compiled.into_artifacts() { - let other = artifacts - .iter() - .find(|(id, _)| id.name == p.name && id.version == p.version && id.source == p.source) - .unwrap() - .1; - assert_ne!(artifact, *other); - } -} - -#[test] -fn can_emit_build_info() { - let mut project = TempProject::::dapptools().unwrap(); - project.project_mut().build_info = true; - project - .add_source( - "A", - r#" -pragma solidity ^0.8.10; -import "./B.sol"; -contract A { } -"#, - ) - .unwrap(); - - project - .add_source( - "B", - r" -pragma solidity ^0.8.10; -contract B { } -", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let info_dir = project.project().build_info_path(); - assert!(info_dir.exists()); - - let mut build_info_count = 0; - for entry in fs::read_dir(info_dir).unwrap() { - let _info = - BuildInfo::>::read(&entry.unwrap().path()).unwrap(); - build_info_count += 1; - } - assert_eq!(build_info_count, 1); -} - -#[test] -fn can_clean_build_info() { - let mut project = TempProject::::dapptools().unwrap(); - - project.project_mut().build_info = true; - project.project_mut().paths.build_infos = project.project_mut().paths.root.join("build-info"); - project - .add_source( - "A", - r#" -pragma solidity ^0.8.10; -import "./B.sol"; -contract A { } -"#, - ) - .unwrap(); - - project - .add_source( - "B", - r" -pragma solidity ^0.8.10; -contract B { } -", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let info_dir = project.project().build_info_path(); - assert!(info_dir.exists()); - - let mut build_info_count = 0; - for entry in fs::read_dir(info_dir).unwrap() { - let _info = - BuildInfo::>::read(&entry.unwrap().path()).unwrap(); - build_info_count += 1; - } - assert_eq!(build_info_count, 1); - - project.project().cleanup().unwrap(); - - assert!(!project.project().build_info_path().exists()); -} - -#[test] -fn can_compile_dapp_sample_with_cache() { - let tmp_dir = tempfile::tempdir().unwrap(); - let root = tmp_dir.path(); - let cache = root.join("cache").join(SOLIDITY_FILES_CACHE_FILENAME); - let artifacts = root.join("out"); - - let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let orig_root = manifest_dir.join("../../test-data/dapp-sample"); - let cache_testdata_dir = manifest_dir.join("../../test-data/cache-sample/"); - copy_dir_all(&orig_root, tmp_dir.path()).unwrap(); - let paths = ProjectPathsConfig::builder() - .cache(cache) - .sources(root.join("src")) - .artifacts(artifacts) - .lib(root.join("lib")) - .root(root) - .build() - .unwrap(); - - // first compile - let project = Project::builder().paths(paths).build(Default::default()).unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - compiled.assert_success(); - - // cache is used when nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - assert!(compiled.is_unchanged()); - - // deleted artifacts cause recompile even with cache - std::fs::remove_dir_all(project.artifacts_path()).unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - assert!(!compiled.is_unchanged()); - - // new file is compiled even with partial cache - std::fs::copy(cache_testdata_dir.join("NewContract.sol"), root.join("src/NewContract.sol")) - .unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_some()); - assert!(compiled.find_first("NewContract").is_some()); - assert!(!compiled.is_unchanged()); - assert_eq!( - compiled.into_artifacts().map(|(artifact_id, _)| artifact_id.name).collect::>(), - HashSet::from([ - "Dapp".to_string(), - "DappTest".to_string(), - "DSTest".to_string(), - "NewContract".to_string(), - ]) - ); - - // old cached artifact is not taken from the cache - std::fs::copy(cache_testdata_dir.join("Dapp.sol"), root.join("src/Dapp.sol")).unwrap(); - let compiled = project.compile().unwrap(); - assert_eq!( - compiled.into_artifacts().map(|(artifact_id, _)| artifact_id.name).collect::>(), - HashSet::from([ - "DappTest".to_string(), - "NewContract".to_string(), - "DSTest".to_string(), - "Dapp".to_string(), - ]) - ); - - // deleted artifact is not taken from the cache - std::fs::remove_file(project.paths.sources.join("Dapp.sol")).unwrap(); - let compiled: ProjectCompileOutput<_> = project.compile().unwrap(); - assert!(compiled.find_first("Dapp").is_none()); -} - -fn copy_dir_all(src: &Path, dst: &Path) -> io::Result<()> { - std::fs::create_dir_all(dst)?; - for entry in std::fs::read_dir(src)? { - let entry = entry?; - let ty = entry.file_type()?; - if ty.is_dir() { - copy_dir_all(&entry.path(), &dst.join(entry.file_name()))?; - } else { - std::fs::copy(entry.path(), dst.join(entry.file_name()))?; - } - } - Ok(()) -} - -// Runs both `flatten` implementations, asserts that their outputs match and runs additional checks -// against the output. -fn test_flatteners(project: &TempProject, target: &Path, additional_checks: fn(&str)) { - let target = canonicalize(target).unwrap(); - let result = - project.project().paths.clone().with_language::().flatten(&target).unwrap(); - let solc_result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - - assert_eq!(result, solc_result); - - additional_checks(&result); -} - -#[test] -fn can_flatten_file_with_external_lib() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample"); - let paths = ProjectPathsConfig::builder() - .sources(root.join("contracts")) - .lib(root.join("node_modules")); - let project = TempProject::::new(paths).unwrap(); - - let target = root.join("contracts").join("Greeter.sol"); - - test_flatteners(&project, &target, |result| { - assert!(!result.contains("import")); - assert!(result.contains("library console")); - assert!(result.contains("contract Greeter")); - }); -} - -#[test] -fn can_flatten_file_in_dapp_sample() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); - - let target = root.join("src/Dapp.t.sol"); - - test_flatteners(&project, &target, |result| { - assert!(!result.contains("import")); - assert!(result.contains("contract DSTest")); - assert!(result.contains("contract Dapp")); - assert!(result.contains("contract DappTest")); - }); -} - -#[test] -fn can_flatten_unique() { - let project = TempProject::::dapptools().unwrap(); - - let target = project - .add_source( - "A", - r#" -pragma solidity ^0.8.10; -import "./C.sol"; -import "./B.sol"; -contract A { } -"#, - ) - .unwrap(); - - project - .add_source( - "B", - r#" -pragma solidity ^0.8.10; -import "./C.sol"; -contract B { } -"#, - ) - .unwrap(); - - project - .add_source( - "C", - r#" -pragma solidity ^0.8.10; -import "./A.sol"; -contract C { } -"#, - ) - .unwrap(); - - test_flatteners(&project, &target, |result| { - assert_eq!( - result, - r#"pragma solidity ^0.8.10; - -// src/B.sol - -contract B { } - -// src/C.sol - -contract C { } - -// src/A.sol - -contract A { } -"# - ); - }); -} - -#[test] -fn can_flatten_experimental_pragma() { - let project = TempProject::::dapptools().unwrap(); - - let target = project - .add_source( - "A", - r#" -pragma solidity ^0.8.10; -pragma experimental ABIEncoderV2; -import "./C.sol"; -import "./B.sol"; -contract A { } -"#, - ) - .unwrap(); - - project - .add_source( - "B", - r#" -pragma solidity ^0.8.10; -pragma experimental ABIEncoderV2; -import "./C.sol"; -contract B { } -"#, - ) - .unwrap(); - - project - .add_source( - "C", - r#" -pragma solidity ^0.8.10; -pragma experimental ABIEncoderV2; -import "./A.sol"; -contract C { } -"#, - ) - .unwrap(); - - test_flatteners(&project, &target, |result| { - assert_eq!( - result, - r"pragma solidity ^0.8.10; -pragma experimental ABIEncoderV2; - -// src/B.sol - -contract B { } - -// src/C.sol - -contract C { } - -// src/A.sol - -contract A { } -" - ); - }); -} - -#[test] -fn can_flatten_on_solang_failure() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "Lib", - r#"// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.10; - -library Lib {} -"#, - ) - .unwrap(); - - let target = project - .add_source( - "Contract", - r#"// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.10; - -import { Lib } from "./Lib.sol"; - -// Intentionally erroneous code -contract Contract { - failure(); -} -"#, - ) - .unwrap(); - - let result = project.paths().clone().with_language::().flatten(target.as_path()); - assert!(result.is_ok()); - - let result = result.unwrap(); - assert_eq!( - result, - r"// SPDX-License-Identifier: UNLICENSED -pragma solidity ^0.8.10; - -// src/Lib.sol - -library Lib {} - -// src/Contract.sol - -// Intentionally erroneous code -contract Contract { - failure(); -} -" - ); -} - -#[test] -fn can_flatten_multiline() { - let project = TempProject::::dapptools().unwrap(); - - let target = project - .add_source( - "A", - r#" -pragma solidity ^0.8.10; -import "./C.sol"; -import { - IllegalArgument, - IllegalState -} from "./Errors.sol"; -contract A { } -"#, - ) - .unwrap(); - - project - .add_source( - "Errors", - r" -pragma solidity ^0.8.10; -error IllegalArgument(); -error IllegalState(); -", - ) - .unwrap(); - - project - .add_source( - "C", - r" -pragma solidity ^0.8.10; -contract C { } -", - ) - .unwrap(); - - test_flatteners(&project, &target, |result| { - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/C.sol - -contract C { } - -// src/Errors.sol - -error IllegalArgument(); -error IllegalState(); - -// src/A.sol - -contract A { } -" - ); - }); -} - -#[test] -fn can_flatten_remove_extra_spacing() { - let project = TempProject::::dapptools().unwrap(); - - let target = project - .add_source( - "A", - r#"pragma solidity ^0.8.10; -import "./C.sol"; -import "./B.sol"; -contract A { } -"#, - ) - .unwrap(); - - project - .add_source( - "B", - r#"// This is a B Contract -pragma solidity ^0.8.10; - -import "./C.sol"; - -contract B { } -"#, - ) - .unwrap(); - - project - .add_source( - "C", - r"pragma solidity ^0.8.10; -contract C { } -", - ) - .unwrap(); - - test_flatteners(&project, &target, |result| { - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/C.sol - -contract C { } - -// src/B.sol -// This is a B Contract - -contract B { } - -// src/A.sol - -contract A { } -" - ); - }); -} - -#[test] -fn can_flatten_with_alias() { - let project = TempProject::::dapptools().unwrap(); - - let target = project - .add_source( - "Contract", - r#"pragma solidity ^0.8.10; - -import { ParentContract as Parent } from "./Parent.sol"; -import { AnotherParentContract as AnotherParent } from "./AnotherParent.sol"; -import { PeerContract as Peer } from "./Peer.sol"; -import { MathLibrary as Math } from "./Math.sol"; -import * as Lib from "./SomeLib.sol"; - -contract Contract is Parent, - AnotherParent { - using Math for uint256; - - string public usingString = "using Math for uint256;"; - string public inheritanceString = "\"Contract is Parent {\""; - string public castString = 'Peer(smth) '; - string public methodString = '\' Math.max()'; - - Peer public peer; - - constructor(address _peer) { - peer = Peer(_peer); - peer = new Peer(); - uint256 x = Math.minusOne(Math.max()); - } -} -"#, - ) - .unwrap(); - - project - .add_source( - "Parent", - r"pragma solidity ^0.8.10; - -contract ParentContract { } -", - ) - .unwrap(); - - project - .add_source( - "AnotherParent", - r"pragma solidity ^0.8.10; - -contract AnotherParentContract { } -", - ) - .unwrap(); - - project - .add_source( - "Peer", - r"pragma solidity ^0.8.10; - -contract PeerContract { } -", - ) - .unwrap(); - - project - .add_source( - "Math", - r"pragma solidity ^0.8.10; - -library MathLibrary { - function minusOne(uint256 val) internal returns (uint256) { - return val - 1; - } - - function max() internal returns (uint256) { - return type(uint256).max; - } - - function diffMax(uint256 value) internal returns (uint256) { - return type(uint256).max - value; - } -} -", - ) - .unwrap(); - - project - .add_source( - "SomeLib", - r"pragma solidity ^0.8.10; - -library SomeLib { } -", - ) - .unwrap(); - - test_flatteners(&project, &target, |result| { - assert_eq!( - result, - r#"pragma solidity ^0.8.10; - -// src/AnotherParent.sol - -contract AnotherParentContract { } - -// src/Math.sol - -library MathLibrary { - function minusOne(uint256 val) internal returns (uint256) { - return val - 1; - } - - function max() internal returns (uint256) { - return type(uint256).max; - } - - function diffMax(uint256 value) internal returns (uint256) { - return type(uint256).max - value; - } -} - -// src/Parent.sol - -contract ParentContract { } - -// src/Peer.sol - -contract PeerContract { } - -// src/SomeLib.sol - -library SomeLib { } - -// src/Contract.sol - -contract Contract is ParentContract, - AnotherParentContract { - using MathLibrary for uint256; - - string public usingString = "using Math for uint256;"; - string public inheritanceString = "\"Contract is Parent {\""; - string public castString = 'Peer(smth) '; - string public methodString = '\' Math.max()'; - - PeerContract public peer; - - constructor(address _peer) { - peer = PeerContract(_peer); - peer = new PeerContract(); - uint256 x = MathLibrary.minusOne(MathLibrary.max()); - } -} -"# - ); - }); -} - -#[test] -fn can_flatten_with_version_pragma_after_imports() { - let project = TempProject::::dapptools().unwrap(); - - let target = project - .add_source( - "A", - r#" -pragma solidity ^0.8.10; - -import * as B from "./B.sol"; - -contract A { } -"#, - ) - .unwrap(); - - project - .add_source( - "B", - r#" -import {D} from "./D.sol"; -pragma solidity ^0.8.10; -import * as C from "./C.sol"; -contract B { } -"#, - ) - .unwrap(); - - project - .add_source( - "C", - r" -pragma solidity ^0.8.10; -contract C { } -", - ) - .unwrap(); - - project - .add_source( - "D", - r" -pragma solidity ^0.8.10; -contract D { } -", - ) - .unwrap(); - - test_flatteners(&project, &target, |result| { - assert_eq!( - result, - r#"pragma solidity ^0.8.10; - -// src/C.sol - -contract C { } - -// src/D.sol - -contract D { } - -// src/B.sol - -contract B { } - -// src/A.sol - -contract A { } -"# - ); - }); -} - -#[test] -fn can_flatten_with_duplicates() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "Foo.sol", - r#" -pragma solidity ^0.8.10; - -contract Foo { - function foo() public pure returns (uint256) { - return 1; - } -} - -contract Bar is Foo {} -"#, - ) - .unwrap(); - - let target = project - .add_source( - "Bar.sol", - r#" -pragma solidity ^0.8.10; -import {Foo} from "./Foo.sol"; - -contract Bar is Foo {} -"#, - ) - .unwrap(); - - let result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/Foo.sol - -contract Foo { - function foo() public pure returns (uint256) { - return 1; - } -} - -contract Bar_0 is Foo {} - -// src/Bar.sol - -contract Bar_1 is Foo {} -" - ); -} - -#[test] -fn can_flatten_complex_aliases_setup_with_duplicates() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "A.sol", - r#" -pragma solidity ^0.8.10; - -contract A { - type SomeCustomValue is uint256; - - struct SomeStruct { - uint256 field; - } - - enum SomeEnum { VALUE1, VALUE2 } - - function foo() public pure returns (uint256) { - return 1; - } -} -"#, - ) - .unwrap(); - - project - .add_source( - "B.sol", - r#" -pragma solidity ^0.8.10; -import "./A.sol" as A_File; - -contract A is A_File.A {} -"#, - ) - .unwrap(); - - project - .add_source( - "C.sol", - r#" -pragma solidity ^0.8.10; -import "./B.sol" as B_File; - -contract A is B_File.A_File.A {} -"#, - ) - .unwrap(); - - let target = project - .add_source( - "D.sol", - r#" -pragma solidity ^0.8.10; -import "./C.sol" as C_File; - -C_File.B_File.A_File.A.SomeCustomValue constant fileLevelValue = C_File.B_File.A_File.A.SomeCustomValue.wrap(1); - -contract D is C_File.B_File.A_File.A { - C_File.B_File.A_File.A.SomeStruct public someStruct; - C_File.B_File.A_File.A.SomeEnum public someEnum = C_File.B_File.A_File.A.SomeEnum.VALUE1; - - constructor() C_File.B_File.A_File.A() { - someStruct = C_File.B_File.A_File.A.SomeStruct(1); - someEnum = C_File.B_File.A_File.A.SomeEnum.VALUE2; - } - - function getSelector() public pure returns (bytes4) { - return C_File.B_File.A_File.A.foo.selector; - } - - function getEnumValue1() public pure returns (C_File.B_File.A_File.A.SomeEnum) { - return C_File.B_File.A_File.A.SomeEnum.VALUE1; - } - - function getStruct() public pure returns (C_File.B_File.A_File.A.SomeStruct memory) { - return C_File.B_File.A_File.A.SomeStruct(1); - } -} -"#,).unwrap(); - - let result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/A.sol - -contract A_0 { - type SomeCustomValue is uint256; - - struct SomeStruct { - uint256 field; - } - - enum SomeEnum { VALUE1, VALUE2 } - - function foo() public pure returns (uint256) { - return 1; - } -} - -// src/B.sol - -contract A_1 is A_0 {} - -// src/C.sol - -contract A_2 is A_0 {} - -// src/D.sol - -A_0.SomeCustomValue constant fileLevelValue = A_0.SomeCustomValue.wrap(1); - -contract D is A_0 { - A_0.SomeStruct public someStruct; - A_0.SomeEnum public someEnum = A_0.SomeEnum.VALUE1; - - constructor() A_0() { - someStruct = A_0.SomeStruct(1); - someEnum = A_0.SomeEnum.VALUE2; - } - - function getSelector() public pure returns (bytes4) { - return A_0.foo.selector; - } - - function getEnumValue1() public pure returns (A_0.SomeEnum) { - return A_0.SomeEnum.VALUE1; - } - - function getStruct() public pure returns (A_0.SomeStruct memory) { - return A_0.SomeStruct(1); - } -} -" - ); -} - -// https://github.com/foundry-rs/compilers/issues/34 -#[test] -fn can_flatten_34_repro() { - let project = TempProject::::dapptools().unwrap(); - let target = project - .add_source( - "FlieA.sol", - r#"pragma solidity ^0.8.10; -import {B} from "./FileB.sol"; - -interface FooBar { - function foo() external; -} -contract A { - function execute() external { - FooBar(address(0)).foo(); - } -}"#, - ) - .unwrap(); - - project - .add_source( - "FileB.sol", - r#"pragma solidity ^0.8.10; - -interface FooBar { - function bar() external; -} -contract B { - function execute() external { - FooBar(address(0)).bar(); - } -}"#, - ) - .unwrap(); - - let result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - assert_eq!( - result, - r#"pragma solidity ^0.8.10; - -// src/FileB.sol - -interface FooBar_0 { - function bar() external; -} -contract B { - function execute() external { - FooBar_0(address(0)).bar(); - } -} - -// src/FlieA.sol - -interface FooBar_1 { - function foo() external; -} -contract A { - function execute() external { - FooBar_1(address(0)).foo(); - } -} -"# - ); -} - -#[test] -fn can_flatten_experimental_in_other_file() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "A.sol", - r#" -pragma solidity 0.6.12; -pragma experimental ABIEncoderV2; - -contract A {} -"#, - ) - .unwrap(); - - let target = project - .add_source( - "B.sol", - r#" -pragma solidity 0.6.12; - -import "./A.sol"; - -contract B is A {} -"#, - ) - .unwrap(); - - let result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - assert_eq!( - result, - r"pragma solidity =0.6.12; -pragma experimental ABIEncoderV2; - -// src/A.sol - -contract A {} - -// src/B.sol - -contract B is A {} -" - ); -} - -#[test] -fn can_detect_type_error() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "Contract", - r#" - pragma solidity ^0.8.10; - - contract Contract { - function xyz() public { - require(address(0), "Error"); - } - } - "#, - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - assert!(compiled.has_compiler_errors()); -} - -#[test] -fn can_flatten_aliases_with_pragma_and_license_after_source() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "A", - r#"pragma solidity ^0.8.10; -contract A { } -"#, - ) - .unwrap(); - - let target = project - .add_source( - "B", - r#"contract B is AContract {} -import {A as AContract} from "./A.sol"; -pragma solidity ^0.8.10;"#, - ) - .unwrap(); - - test_flatteners(&project, &target, |result| { - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/A.sol - -contract A { } - -// src/B.sol -contract B is A {} -" - ); - }); -} - -#[test] -fn can_flatten_rename_inheritdocs() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "DuplicateA", - r#"pragma solidity ^0.8.10; -contract A {} -"#, - ) - .unwrap(); - - project - .add_source( - "A", - r#"pragma solidity ^0.8.10; -import {A as OtherName} from "./DuplicateA.sol"; - -contract A { - /// Documentation - function foo() public virtual {} -} -"#, - ) - .unwrap(); - - let target = project - .add_source( - "B", - r#"pragma solidity ^0.8.10; -import {A} from "./A.sol"; - -contract B is A { - /// @inheritdoc A - function foo() public override {} -}"#, - ) - .unwrap(); - - let result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/DuplicateA.sol - -contract A_0 {} - -// src/A.sol - -contract A_1 { - /// Documentation - function foo() public virtual {} -} - -// src/B.sol - -contract B is A_1 { - /// @inheritdoc A_1 - function foo() public override {} -} -" - ); -} - -#[test] -fn can_flatten_rename_inheritdocs_alias() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "A", - r#"pragma solidity ^0.8.10; - -contract A { - /// Documentation - function foo() public virtual {} -} -"#, - ) - .unwrap(); - - let target = project - .add_source( - "B", - r#"pragma solidity ^0.8.10; -import {A as Alias} from "./A.sol"; - -contract B is Alias { - /// @inheritdoc Alias - function foo() public override {} -}"#, - ) - .unwrap(); - - let result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/A.sol - -contract A { - /// Documentation - function foo() public virtual {} -} - -// src/B.sol - -contract B is A { - /// @inheritdoc A - function foo() public override {} -} -" - ); -} - -#[test] -fn can_flatten_rename_user_defined_functions() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "CustomUint", - r" -pragma solidity ^0.8.10; - -type CustomUint is uint256; - -function mul(CustomUint a, CustomUint b) pure returns(CustomUint) { - return CustomUint.wrap(CustomUint.unwrap(a) * CustomUint.unwrap(b)); -} - -using {mul} for CustomUint global;", - ) - .unwrap(); - - project - .add_source( - "CustomInt", - r"pragma solidity ^0.8.10; - -type CustomInt is int256; - -function mul(CustomInt a, CustomInt b) pure returns(CustomInt) { - return CustomInt.wrap(CustomInt.unwrap(a) * CustomInt.unwrap(b)); -} - -using {mul} for CustomInt global;", - ) - .unwrap(); - - let target = project - .add_source( - "Target", - r"pragma solidity ^0.8.10; - -import {CustomInt} from './CustomInt.sol'; -import {CustomUint} from './CustomUint.sol'; - -contract Foo { - function mul(CustomUint a, CustomUint b) public returns(CustomUint) { - return a.mul(b); - } - - function mul(CustomInt a, CustomInt b) public returns(CustomInt) { - return a.mul(b); - } -}", - ) - .unwrap(); - - let result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/CustomInt.sol - -type CustomInt is int256; - -function mul_0(CustomInt a, CustomInt b) pure returns(CustomInt) { - return CustomInt.wrap(CustomInt.unwrap(a) * CustomInt.unwrap(b)); -} - -using {mul_0} for CustomInt global; - -// src/CustomUint.sol - -type CustomUint is uint256; - -function mul_1(CustomUint a, CustomUint b) pure returns(CustomUint) { - return CustomUint.wrap(CustomUint.unwrap(a) * CustomUint.unwrap(b)); -} - -using {mul_1} for CustomUint global; - -// src/Target.sol - -contract Foo { - function mul(CustomUint a, CustomUint b) public returns(CustomUint) { - return a.mul_1(b); - } - - function mul(CustomInt a, CustomInt b) public returns(CustomInt) { - return a.mul_0(b); - } -} -" - ); -} - -#[test] -fn can_flatten_rename_global_functions() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "func1", - r"pragma solidity ^0.8.10; - -function func() view {}", - ) - .unwrap(); - - project - .add_source( - "func2", - r"pragma solidity ^0.8.10; - -function func(uint256 x) view returns(uint256) { - return x + 1; -}", - ) - .unwrap(); - - let target = project - .add_source( - "Target", - r"pragma solidity ^0.8.10; - -import {func as func1} from './func1.sol'; -import {func as func2} from './func2.sol'; - -contract Foo { - constructor(uint256 x) { - func1(); - func2(x); - } -}", - ) - .unwrap(); - - let result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/func1.sol - -function func_0() view {} - -// src/func2.sol - -function func_1(uint256 x) view returns(uint256) { - return x + 1; -} - -// src/Target.sol - -contract Foo { - constructor(uint256 x) { - func_0(); - func_1(x); - } -} -" - ); -} - -#[test] -fn can_flatten_rename_in_assembly() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "A", - r"pragma solidity ^0.8.10; - -uint256 constant a = 1;", - ) - .unwrap(); - - project - .add_source( - "B", - r"pragma solidity ^0.8.10; - -uint256 constant a = 2;", - ) - .unwrap(); - - let target = project - .add_source( - "Target", - r"pragma solidity ^0.8.10; - -import {a as a1} from './A.sol'; -import {a as a2} from './B.sol'; - -contract Foo { - function test() public returns(uint256 x) { - assembly { - x := mul(a1, a2) - } - } -}", - ) - .unwrap(); - - let result = Flattener::new(project.project().clone(), &target).unwrap().flatten(); - assert_eq!( - result, - r"pragma solidity ^0.8.10; - -// src/A.sol - -uint256 constant a_0 = 1; - -// src/B.sol - -uint256 constant a_1 = 2; - -// src/Target.sol - -contract Foo { - function test() public returns(uint256 x) { - assembly { - x := mul(a_0, a_1) - } - } -} -" - ); -} - -#[test] -fn can_flatten_combine_pragmas() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "A", - r"pragma solidity >=0.5.0; - -contract A {}", - ) - .unwrap(); - - let target = project - .add_source( - "B", - r"pragma solidity <0.9.0; -import './A.sol'; - -contract B {}", - ) - .unwrap(); - - test_flatteners(&project, &target, |result| { - assert_eq!( - result, - r"pragma solidity <0.9.0 >=0.5.0; - -// src/A.sol - -contract A {} - -// src/B.sol - -contract B {} -" - ); - }); -} - -#[test] -fn can_flatten_with_assembly_reference_suffix() { - let project = TempProject::::dapptools().unwrap(); - - let target = project - .add_source( - "A", - r"pragma solidity >=0.5.0; - -contract A { - uint256 val; - - function useSuffix() public { - bytes32 slot; - assembly { - slot := val.slot - } - } -}", - ) - .unwrap(); - - test_flatteners(&project, &target, |result| { - assert_eq!( - result, - r"pragma solidity >=0.5.0; - -// src/A.sol - -contract A { - uint256 val; - - function useSuffix() public { - bytes32 slot; - assembly { - slot := val.slot - } - } -} -" - ); - }); -} - -#[test] -fn can_compile_single_files() { - let tmp = TempProject::::dapptools().unwrap(); - - let f = tmp - .add_contract( - "examples/Foo", - r" - pragma solidity ^0.8.10; - - contract Foo {} - ", - ) - .unwrap(); - - let compiled = tmp.project().compile_file(f.clone()).unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("Foo").is_some()); - - let bar = tmp - .add_contract( - "examples/Bar", - r" - pragma solidity ^0.8.10; - - contract Bar {} - ", - ) - .unwrap(); - - let compiled = tmp.project().compile_files(vec![f, bar]).unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("Foo").is_some()); - assert!(compiled.find_first("Bar").is_some()); -} - -#[test] -fn consistent_bytecode() { - let tmp = TempProject::::dapptools().unwrap(); - - tmp.add_source( - "LinkTest", - r" -// SPDX-License-Identifier: MIT -library LibTest { - function foobar(uint256 a) public view returns (uint256) { - return a * 100; - } -} -contract LinkTest { - function foo() public returns (uint256) { - return LibTest.foobar(1); - } -} -", - ) - .unwrap(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - - let contract = compiled.find_first("LinkTest").unwrap(); - let bytecode = &contract.bytecode.as_ref().unwrap().object; - assert!(bytecode.is_unlinked()); - let s = bytecode.as_str().unwrap(); - assert!(!s.starts_with("0x")); - - let s = serde_json::to_string(&bytecode).unwrap(); - assert_eq!(bytecode.clone(), serde_json::from_str(&s).unwrap()); -} - -#[test] -fn can_apply_libraries() { - let mut tmp = TempProject::::dapptools().unwrap(); - - tmp.add_source( - "LinkTest", - r#" -// SPDX-License-Identifier: MIT -import "./MyLib.sol"; -contract LinkTest { - function foo() public returns (uint256) { - return MyLib.foobar(1); - } -} -"#, - ) - .unwrap(); - - let lib = tmp - .add_source( - "MyLib", - r" -// SPDX-License-Identifier: MIT -library MyLib { - function foobar(uint256 a) public view returns (uint256) { - return a * 100; - } -} -", - ) - .unwrap(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - - assert!(compiled.find_first("MyLib").is_some()); - let contract = compiled.find_first("LinkTest").unwrap(); - let bytecode = &contract.bytecode.as_ref().unwrap().object; - assert!(bytecode.is_unlinked()); - - // provide the library settings to let solc link - tmp.project_mut().settings.solc.libraries = BTreeMap::from([( - lib, - BTreeMap::from([("MyLib".to_string(), format!("{:?}", Address::ZERO))]), - )]) - .into(); - tmp.project_mut().settings.solc.libraries.slash_paths(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - - assert!(compiled.find_first("MyLib").is_some()); - let contract = compiled.find_first("LinkTest").unwrap(); - let bytecode = &contract.bytecode.as_ref().unwrap().object; - assert!(!bytecode.is_unlinked()); - - let libs = Libraries::parse(&[format!("./src/MyLib.sol:MyLib:{:?}", Address::ZERO)]).unwrap(); - // provide the library settings to let solc link - tmp.project_mut().settings.solc.libraries = - libs.apply(|libs| tmp.paths().apply_lib_remappings(libs)); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - - assert!(compiled.find_first("MyLib").is_some()); - let contract = compiled.find_first("LinkTest").unwrap(); - let bytecode = &contract.bytecode.as_ref().unwrap().object; - assert!(!bytecode.is_unlinked()); -} - -#[test] -fn can_ignore_warning_from_paths() { - let setup_and_compile = |ignore_paths: Option>| { - let tmp = match ignore_paths { - Some(paths) => TempProject::dapptools_with_ignore_paths(paths).unwrap(), - None => TempProject::::dapptools().unwrap(), - }; - - tmp.add_source( - "LinkTest", - r#" - // SPDX-License-Identifier: MIT - import "./MyLib.sol"; - contract LinkTest { - function foo() public returns (uint256) { - } - } - "#, - ) - .unwrap(); - - tmp.add_source( - "MyLib", - r" - // SPDX-License-Identifier: MIT - library MyLib { - function foobar(uint256 a) public view returns (uint256) { - return a * 100; - } - } - ", - ) - .unwrap(); - - tmp.compile().unwrap() - }; - - // Test without ignoring paths - let compiled_without_ignore = setup_and_compile(None); - compiled_without_ignore.assert_success(); - assert!(compiled_without_ignore.has_compiler_warnings()); - - // Test with ignoring paths - let paths_to_ignore = vec![Path::new("src").to_path_buf()]; - let compiled_with_ignore = setup_and_compile(Some(paths_to_ignore)); - compiled_with_ignore.assert_success(); - assert!(!compiled_with_ignore.has_compiler_warnings()); -} -#[test] -fn can_apply_libraries_with_remappings() { - let mut tmp = TempProject::::dapptools().unwrap(); - - let remapping = tmp.paths().libraries[0].join("remapping"); - tmp.paths_mut() - .remappings - .push(Remapping::from_str(&format!("remapping/={}/", remapping.display())).unwrap()); - - tmp.add_source( - "LinkTest", - r#" -// SPDX-License-Identifier: MIT -import "remapping/MyLib.sol"; -contract LinkTest { - function foo() public returns (uint256) { - return MyLib.foobar(1); - } -} -"#, - ) - .unwrap(); - - tmp.add_lib( - "remapping/MyLib", - r" -// SPDX-License-Identifier: MIT -library MyLib { - function foobar(uint256 a) public view returns (uint256) { - return a * 100; - } -} -", - ) - .unwrap(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - - assert!(compiled.find_first("MyLib").is_some()); - let contract = compiled.find_first("LinkTest").unwrap(); - let bytecode = &contract.bytecode.as_ref().unwrap().object; - assert!(bytecode.is_unlinked()); - - let libs = - Libraries::parse(&[format!("remapping/MyLib.sol:MyLib:{:?}", Address::ZERO)]).unwrap(); // provide the library settings to let solc link - tmp.project_mut().settings.solc.libraries = - libs.apply(|libs| tmp.paths().apply_lib_remappings(libs)); - tmp.project_mut().settings.solc.libraries.slash_paths(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - - assert!(compiled.find_first("MyLib").is_some()); - let contract = compiled.find_first("LinkTest").unwrap(); - let bytecode = &contract.bytecode.as_ref().unwrap().object; - assert!(!bytecode.is_unlinked()); -} - -#[test] -fn can_detect_invalid_version() { - let tmp = TempProject::::dapptools().unwrap(); - let content = r" - pragma solidity ^0.100.10; - contract A {} - "; - tmp.add_source("A", content).unwrap(); - - let out = tmp.compile().unwrap_err(); - match out { - SolcError::Message(err) => { - assert_eq!(err, format!("Encountered invalid solc version in src{MAIN_SEPARATOR}A.sol: No solc version exists that matches the version requirement: ^0.100.10")); - } - _ => { - unreachable!() - } - } -} - -#[test] -fn test_severity_warnings() { - let mut tmp = TempProject::::dapptools().unwrap(); - // also treat warnings as error - tmp.project_mut().compiler_severity_filter = Severity::Warning; - - let content = r" - pragma solidity =0.8.13; - contract A {} - "; - tmp.add_source("A", content).unwrap(); - - let out = tmp.compile().unwrap(); - assert!(out.output().has_error(&[], &[], &Severity::Warning)); - - let content = r" - // SPDX-License-Identifier: MIT OR Apache-2.0 - pragma solidity =0.8.13; - contract A {} - "; - tmp.add_source("A", content).unwrap(); - - let out = tmp.compile().unwrap(); - assert!(!out.output().has_error(&[], &[], &Severity::Warning)); - - let content = r" - // SPDX-License-Identifier: MIT OR Apache-2.0 - pragma solidity =0.8.13; - contract A { - function id(uint111 value) external pure returns (uint256) { - return 0; - } - } - "; - tmp.add_source("A", content).unwrap(); - - let out = tmp.compile().unwrap(); - assert!(out.output().has_error(&[], &[], &Severity::Warning)); -} - -#[test] -fn can_recompile_with_changes() { - let mut tmp = TempProject::::dapptools().unwrap(); - tmp.project_mut().paths.allowed_paths = BTreeSet::from([tmp.root().join("modules")]); - - let content = r#" - pragma solidity ^0.8.10; - import "../modules/B.sol"; - contract A {} - "#; - tmp.add_source("A", content).unwrap(); - - tmp.add_contract( - "modules/B", - r" - pragma solidity ^0.8.10; - contract B {} - ", - ) - .unwrap(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("A").is_some()); - assert!(compiled.find_first("B").is_some()); - - let compiled = tmp.compile().unwrap(); - assert!(compiled.find_first("A").is_some()); - assert!(compiled.find_first("B").is_some()); - assert!(compiled.is_unchanged()); - - // modify A.sol - tmp.add_source("A", format!("{content}\n")).unwrap(); - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - assert!(compiled.find_first("A").is_some()); - assert!(compiled.find_first("B").is_some()); -} - -#[test] -fn can_recompile_with_lowercase_names() { - let tmp = TempProject::::dapptools().unwrap(); - - tmp.add_source( - "deployProxy.sol", - r" - pragma solidity =0.8.12; - contract DeployProxy {} - ", - ) - .unwrap(); - - let upgrade = r#" - pragma solidity =0.8.12; - import "./deployProxy.sol"; - import "./ProxyAdmin.sol"; - contract UpgradeProxy {} - "#; - tmp.add_source("upgradeProxy.sol", upgrade).unwrap(); - - tmp.add_source( - "ProxyAdmin.sol", - r" - pragma solidity =0.8.12; - contract ProxyAdmin {} - ", - ) - .unwrap(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("DeployProxy").is_some()); - assert!(compiled.find_first("UpgradeProxy").is_some()); - assert!(compiled.find_first("ProxyAdmin").is_some()); - - let artifacts = tmp.artifacts_snapshot().unwrap(); - assert_eq!(artifacts.artifacts.as_ref().len(), 3); - artifacts.assert_artifacts_essentials_present(); - - let compiled = tmp.compile().unwrap(); - assert!(compiled.find_first("DeployProxy").is_some()); - assert!(compiled.find_first("UpgradeProxy").is_some()); - assert!(compiled.find_first("ProxyAdmin").is_some()); - assert!(compiled.is_unchanged()); - - // modify upgradeProxy.sol - tmp.add_source("upgradeProxy.sol", format!("{upgrade}\n")).unwrap(); - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - assert!(compiled.find_first("DeployProxy").is_some()); - assert!(compiled.find_first("UpgradeProxy").is_some()); - assert!(compiled.find_first("ProxyAdmin").is_some()); - - let artifacts = tmp.artifacts_snapshot().unwrap(); - assert_eq!(artifacts.artifacts.as_ref().len(), 3); - artifacts.assert_artifacts_essentials_present(); -} - -#[test] -fn can_recompile_unchanged_with_empty_files() { - let tmp = TempProject::::dapptools().unwrap(); - - tmp.add_source( - "A", - r#" - pragma solidity ^0.8.10; - import "./B.sol"; - contract A {} - "#, - ) - .unwrap(); - - tmp.add_source( - "B", - r#" - pragma solidity ^0.8.10; - import "./C.sol"; - "#, - ) - .unwrap(); - - let c = r" - pragma solidity ^0.8.10; - contract C {} - "; - tmp.add_source("C", c).unwrap(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("A").is_some()); - assert!(compiled.find_first("C").is_some()); - - let compiled = tmp.compile().unwrap(); - assert!(compiled.find_first("A").is_some()); - assert!(compiled.find_first("C").is_some()); - assert!(compiled.is_unchanged()); - - // modify C.sol - tmp.add_source("C", format!("{c}\n")).unwrap(); - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - assert!(compiled.find_first("A").is_some()); - assert!(compiled.find_first("C").is_some()); -} - -#[test] -fn can_emit_empty_artifacts() { - let tmp = TempProject::::dapptools().unwrap(); - - let top_level = tmp - .add_source( - "top_level", - r" - function test() {} - ", - ) - .unwrap(); - - tmp.add_source( - "Contract", - r#" -// SPDX-License-Identifier: UNLICENSED -pragma solidity 0.8.10; - -import "./top_level.sol"; - -contract Contract { - function a() public{ - test(); - } -} - "#, - ) - .unwrap(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("Contract").is_some()); - assert!(compiled.find_first("top_level").is_some()); - let mut artifacts = tmp.artifacts_snapshot().unwrap(); - - assert_eq!(artifacts.artifacts.as_ref().len(), 2); - - let mut top_level = artifacts.artifacts.as_mut().remove(&top_level).unwrap(); - - assert_eq!(top_level.len(), 1); - - let artifact = top_level.remove("top_level").unwrap().remove(0); - assert!(artifact.artifact.ast.is_some()); - - // recompile - let compiled = tmp.compile().unwrap(); - assert!(compiled.is_unchanged()); - - // modify standalone file - - tmp.add_source( - "top_level", - r" - error MyError(); - function test() {} - ", - ) - .unwrap(); - let compiled = tmp.compile().unwrap(); - assert!(!compiled.is_unchanged()); -} - -#[test] -fn can_detect_contract_def_source_files() { - let tmp = TempProject::::dapptools().unwrap(); - - let mylib = tmp - .add_source( - "MyLib", - r" - pragma solidity 0.8.10; - library MyLib { - } - ", - ) - .unwrap(); - - let myinterface = tmp - .add_source( - "MyInterface", - r" - pragma solidity 0.8.10; - interface MyInterface {} - ", - ) - .unwrap(); - - let mycontract = tmp - .add_source( - "MyContract", - r" - pragma solidity 0.8.10; - contract MyContract {} - ", - ) - .unwrap(); - - let myabstract_contract = tmp - .add_source( - "MyAbstractContract", - r" - pragma solidity 0.8.10; - contract MyAbstractContract {} - ", - ) - .unwrap(); - - let myerr = tmp - .add_source( - "MyError", - r" - pragma solidity 0.8.10; - error MyError(); - ", - ) - .unwrap(); - - let myfunc = tmp - .add_source( - "MyFunction", - r" - pragma solidity 0.8.10; - function abc(){} - ", - ) - .unwrap(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - - let mut sources = compiled.into_output().sources; - let myfunc = sources.remove_by_path(&myfunc).unwrap(); - assert!(!myfunc.contains_contract_definition()); - - let myerr = sources.remove_by_path(&myerr).unwrap(); - assert!(!myerr.contains_contract_definition()); - - let mylib = sources.remove_by_path(&mylib).unwrap(); - assert!(mylib.contains_contract_definition()); - - let myabstract_contract = sources.remove_by_path(&myabstract_contract).unwrap(); - assert!(myabstract_contract.contains_contract_definition()); - - let myinterface = sources.remove_by_path(&myinterface).unwrap(); - assert!(myinterface.contains_contract_definition()); - - let mycontract = sources.remove_by_path(&mycontract).unwrap(); - assert!(mycontract.contains_contract_definition()); -} - -#[test] -fn can_compile_sparse_with_link_references() { - let mut tmp = TempProject::::dapptools().unwrap(); - - tmp.add_source( - "ATest.t.sol", - r#" - pragma solidity =0.8.12; - import {MyLib} from "./mylib.sol"; - contract ATest { - function test_mylib() public returns (uint256) { - return MyLib.doStuff(); - } - } - "#, - ) - .unwrap(); - - let my_lib_path = tmp - .add_source( - "mylib.sol", - r" - pragma solidity =0.8.12; - library MyLib { - function doStuff() external pure returns (uint256) {return 1337;} - } - ", - ) - .unwrap(); - - tmp.project_mut().sparse_output = Some(Box::::default()); - let mut compiled = tmp.compile().unwrap(); - compiled.assert_success(); - - let mut output = compiled.clone().into_output(); - - assert!(compiled.find_first("ATest").is_some()); - assert!(compiled.find_first("MyLib").is_some()); - let lib = compiled.remove_first("MyLib").unwrap(); - assert!(lib.bytecode.is_some()); - let lib = compiled.remove_first("MyLib"); - assert!(lib.is_none()); - - let mut dup = output.clone(); - let lib = dup.remove_first("MyLib"); - assert!(lib.is_some()); - let lib = dup.remove_first("MyLib"); - assert!(lib.is_none()); - - dup = output.clone(); - let lib = dup.remove(&my_lib_path, "MyLib"); - assert!(lib.is_some()); - let lib = dup.remove(&my_lib_path, "MyLib"); - assert!(lib.is_none()); - - #[cfg(not(windows))] - let info = ContractInfo::new(&format!("{}:{}", my_lib_path.display(), "MyLib")); - #[cfg(windows)] - let info = { - use path_slash::PathBufExt; - ContractInfo { - path: Some(my_lib_path.to_slash_lossy().to_string()), - name: "MyLib".to_string(), - } - }; - let lib = output.remove_contract(&info); - assert!(lib.is_some()); - let lib = output.remove_contract(&info); - assert!(lib.is_none()); -} - -#[test] -fn can_sanitize_bytecode_hash() { - let mut tmp = TempProject::::dapptools().unwrap(); - tmp.project_mut().settings.solc.metadata = Some(BytecodeHash::Ipfs.into()); - - tmp.add_source( - "A", - r" - pragma solidity =0.5.17; - contract A {} - ", - ) - .unwrap(); - - let compiled = tmp.compile().unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("A").is_some()); -} - -// https://github.com/foundry-rs/foundry/issues/5307 -#[test] -fn can_create_standard_json_input_with_external_file() { - // File structure: - // . - // ├── verif - // │   └── src - // │   └── Counter.sol - // └── remapped - // ├── Child.sol - // └── Parent.sol - - let dir = tempfile::tempdir().unwrap(); - let verif_dir = utils::canonicalize(dir.path()).unwrap().join("verif"); - let remapped_dir = utils::canonicalize(dir.path()).unwrap().join("remapped"); - fs::create_dir_all(verif_dir.join("src")).unwrap(); - fs::create_dir(&remapped_dir).unwrap(); - - let mut verif_project = ProjectBuilder::::new(Default::default()) - .paths(ProjectPathsConfig::dapptools(&verif_dir).unwrap()) - .build(Default::default()) - .unwrap(); - - verif_project.paths.remappings.push(Remapping { - context: None, - name: "@remapped/".into(), - path: "../remapped/".into(), - }); - verif_project.paths.allowed_paths.insert(remapped_dir.clone()); - - fs::write(remapped_dir.join("Parent.sol"), "pragma solidity >=0.8.0; import './Child.sol';") - .unwrap(); - fs::write(remapped_dir.join("Child.sol"), "pragma solidity >=0.8.0;").unwrap(); - fs::write( - verif_dir.join("src/Counter.sol"), - "pragma solidity >=0.8.0; import '@remapped/Parent.sol'; contract Counter {}", - ) - .unwrap(); - - // solc compiles using the host file system; therefore, this setup is considered valid - let compiled = verif_project.compile().unwrap(); - compiled.assert_success(); - - // can create project root based paths - let std_json = verif_project.standard_json_input(&verif_dir.join("src/Counter.sol")).unwrap(); - assert_eq!( - std_json.sources.iter().map(|(path, _)| path.clone()).collect::>(), - vec![ - PathBuf::from("src/Counter.sol"), - PathBuf::from("../remapped/Parent.sol"), - PathBuf::from("../remapped/Child.sol") - ] - ); - - let solc = Solc::find_or_install(&Version::new(0, 8, 24)).unwrap(); - - // can compile using the created json - let compiler_errors = solc - .compile(&std_json) - .unwrap() - .errors - .into_iter() - .filter_map(|e| if e.severity.is_error() { Some(e.message) } else { None }) - .collect::>(); - assert!(compiler_errors.is_empty(), "{compiler_errors:?}"); -} - -#[test] -fn can_compile_std_json_input() { - let tmp = TempProject::::dapptools_init().unwrap(); - tmp.assert_no_errors(); - let source = tmp.list_source_files().into_iter().find(|p| p.ends_with("Dapp.t.sol")).unwrap(); - let input = tmp.project().standard_json_input(&source).unwrap(); - - assert!(input.settings.remappings.contains(&"ds-test/=lib/ds-test/src/".parse().unwrap())); - let input: SolcInput = input.into(); - assert!(input.sources.contains_key(Path::new("lib/ds-test/src/test.sol"))); - - // should be installed - if let Ok(solc) = Solc::find_or_install(&Version::new(0, 8, 24)) { - let out = solc.compile(&input).unwrap(); - assert!(out.errors.is_empty()); - assert!(out.sources.contains_key(Path::new("lib/ds-test/src/test.sol"))); - } -} - -// This test is exclusive to unix because creating a symlink is a privileged action on windows. -// https://doc.rust-lang.org/std/os/windows/fs/fn.symlink_dir.html#limitations -#[test] -#[cfg(unix)] -fn can_create_standard_json_input_with_symlink() { - let mut project = TempProject::::dapptools().unwrap(); - let dependency = TempProject::::dapptools().unwrap(); - - // File structure: - // - // project - // ├── node_modules - // │   └── dependency -> symlink to actual 'dependency' directory - // └── src - // └── A.sol - // - // dependency - // └── src - // ├── B.sol - // └── C.sol - - fs::create_dir(project.root().join("node_modules")).unwrap(); - - std::os::unix::fs::symlink(dependency.root(), project.root().join("node_modules/dependency")) - .unwrap(); - project.project_mut().paths.remappings.push(Remapping { - context: None, - name: "@dependency/".into(), - path: "node_modules/dependency/".into(), - }); - - project - .add_source( - "A", - r"pragma solidity >=0.8.0; import '@dependency/src/B.sol'; contract A is B {}", - ) - .unwrap(); - dependency - .add_source("B", r"pragma solidity >=0.8.0; import './C.sol'; contract B is C {}") - .unwrap(); - dependency.add_source("C", r"pragma solidity >=0.8.0; contract C {}").unwrap(); - - // solc compiles using the host file system; therefore, this setup is considered valid - project.assert_no_errors(); - - // can create project root based paths - let std_json = - project.project().standard_json_input(&project.sources_path().join("A.sol")).unwrap(); - assert_eq!( - std_json.sources.iter().map(|(path, _)| path.clone()).collect::>(), - vec![ - PathBuf::from("src/A.sol"), - PathBuf::from("node_modules/dependency/src/B.sol"), - PathBuf::from("node_modules/dependency/src/C.sol") - ] - ); - - let solc = Solc::find_or_install(&Version::new(0, 8, 24)).unwrap(); - - // can compile using the created json - let compiler_errors = solc - .compile(&std_json) - .unwrap() - .errors - .into_iter() - .filter_map(|e| if e.severity.is_error() { Some(e.message) } else { None }) - .collect::>(); - assert!(compiler_errors.is_empty(), "{compiler_errors:?}"); -} - -#[test] -fn can_compile_model_checker_sample() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/model-checker-sample"); - let paths = ProjectPathsConfig::builder().sources(root); - - let mut project = TempProject::::new(paths).unwrap(); - project.project_mut().settings.solc.settings.model_checker = Some(ModelCheckerSettings { - engine: Some(CHC), - timeout: Some(10000), - ..Default::default() - }); - let compiled = project.compile().unwrap(); - - assert!(compiled.find_first("Assert").is_some()); - compiled.assert_success(); - assert!(compiled.has_compiler_warnings()); -} - -#[test] -fn test_compiler_severity_filter() { - fn gen_test_data_warning_path() -> ProjectPathsConfig { - let root = - Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/test-contract-warnings"); - - ProjectPathsConfig::builder().sources(root).build().unwrap() - } - - let project = Project::builder() - .no_artifacts() - .paths(gen_test_data_warning_path()) - .ephemeral() - .build(Default::default()) - .unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.has_compiler_warnings()); - compiled.assert_success(); - - let project = Project::builder() - .no_artifacts() - .paths(gen_test_data_warning_path()) - .ephemeral() - .set_compiler_severity_filter(foundry_compilers_artifacts::Severity::Warning) - .build(Default::default()) - .unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.has_compiler_warnings()); - assert!(compiled.has_compiler_errors()); -} - -fn gen_test_data_licensing_warning() -> ProjectPathsConfig { - let root = canonicalize( - Path::new(env!("CARGO_MANIFEST_DIR")) - .join("../../test-data/test-contract-warnings/LicenseWarning.sol"), - ) - .unwrap(); - - ProjectPathsConfig::builder().sources(root).build().unwrap() -} - -fn compile_project_with_options( - severity_filter: Option, - ignore_paths: Option>, - ignore_error_code: Option, -) -> ProjectCompileOutput { - let mut builder = - Project::builder().no_artifacts().paths(gen_test_data_licensing_warning()).ephemeral(); - - if let Some(paths) = ignore_paths { - builder = builder.ignore_paths(paths); - } - if let Some(code) = ignore_error_code { - builder = builder.ignore_error_code(code); - } - if let Some(severity) = severity_filter { - builder = builder.set_compiler_severity_filter(severity); - } - - let project = builder.build(Default::default()).unwrap(); - project.compile().unwrap() -} - -#[test] -fn test_compiler_ignored_file_paths() { - let compiled = compile_project_with_options(None, None, None); - // no ignored paths set, so the warning should be present - assert!(compiled.has_compiler_warnings()); - compiled.assert_success(); - - let testdata = - canonicalize(Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data")).unwrap(); - let compiled = compile_project_with_options( - Some(foundry_compilers_artifacts::Severity::Warning), - Some(vec![testdata]), - None, - ); - - // ignored paths set, so the warning shouldnt be present - assert!(!compiled.has_compiler_warnings()); - compiled.assert_success(); -} - -#[test] -fn test_compiler_severity_filter_and_ignored_error_codes() { - let missing_license_error_code = 1878; - - let compiled = compile_project_with_options(None, None, None); - assert!(compiled.has_compiler_warnings()); - - let compiled = compile_project_with_options(None, None, Some(missing_license_error_code)); - assert!(!compiled.has_compiler_warnings()); - compiled.assert_success(); - - let compiled = compile_project_with_options( - Some(foundry_compilers_artifacts::Severity::Warning), - None, - Some(missing_license_error_code), - ); - assert!(!compiled.has_compiler_warnings()); - compiled.assert_success(); -} - -fn remove_solc_if_exists(version: &Version) { - if Solc::find_svm_installed_version(version).unwrap().is_some() { - svm::remove_version(version).expect("failed to remove version") - } -} - -#[test] -fn can_install_solc_and_compile_version() { - let project = TempProject::::dapptools().unwrap(); - let version = Version::new(0, 8, 10); - - project - .add_source( - "Contract", - format!( - r#" -pragma solidity {version}; -contract Contract {{ }} -"# - ), - ) - .unwrap(); - - remove_solc_if_exists(&version); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); -} - -#[tokio::test(flavor = "multi_thread")] -async fn can_install_solc_and_compile_std_json_input_async() { - let tmp = TempProject::::dapptools_init().unwrap(); - tmp.assert_no_errors(); - let source = tmp.list_source_files().into_iter().find(|p| p.ends_with("Dapp.t.sol")).unwrap(); - let input = tmp.project().standard_json_input(&source).unwrap(); - let solc = Solc::find_or_install(&Version::new(0, 8, 24)).unwrap(); - - assert!(input.settings.remappings.contains(&"ds-test/=lib/ds-test/src/".parse().unwrap())); - let input: SolcInput = input.into(); - assert!(input.sources.contains_key(Path::new("lib/ds-test/src/test.sol"))); - - let out = solc.async_compile(&input).await.unwrap(); - assert!(!out.has_error()); - assert!(out.sources.contains_key(&PathBuf::from("lib/ds-test/src/test.sol"))); -} - -#[test] -fn can_purge_obsolete_artifacts() { - let mut project = TempProject::::dapptools().unwrap(); - project.set_solc("0.8.10"); - project - .add_source( - "Contract", - r" - pragma solidity >=0.8.10; - - contract Contract { - function xyz() public { - } - } - ", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - assert_eq!(compiled.into_artifacts().count(), 1); - - project.set_solc("0.8.13"); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - assert_eq!(compiled.into_artifacts().count(), 1); -} - -#[test] -fn can_parse_notice() { - let mut project = TempProject::::dapptools().unwrap(); - project.project_mut().artifacts.additional_values.userdoc = true; - project.project_mut().settings.solc.settings = project.project_mut().artifacts.solc_settings(); - - let contract = r" - pragma solidity $VERSION; - - contract Contract { - string greeting; - - /** - * @notice hello - */ - constructor(string memory _greeting) public { - greeting = _greeting; - } - - /** - * @notice hello - */ - function xyz() public { - } - - /// @notice hello - function abc() public { - } - } - "; - project.add_source("Contract", contract.replace("$VERSION", "=0.5.17")).unwrap(); - - let mut compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - assert!(compiled.find_first("Contract").is_some()); - let userdoc = compiled.remove_first("Contract").unwrap().userdoc; - - assert_eq!( - userdoc, - Some(UserDoc { - version: None, - kind: None, - methods: BTreeMap::from([ - ("abc()".to_string(), UserDocNotice::Notice { notice: "hello".to_string() }), - ("xyz()".to_string(), UserDocNotice::Notice { notice: "hello".to_string() }), - ("constructor".to_string(), UserDocNotice::Constructor("hello".to_string())), - ]), - events: BTreeMap::new(), - errors: BTreeMap::new(), - notice: None - }) - ); - - project.add_source("Contract", contract.replace("$VERSION", "^0.8.10")).unwrap(); - - let mut compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - assert!(compiled.find_first("Contract").is_some()); - let userdoc = compiled.remove_first("Contract").unwrap().userdoc; - - assert_eq!( - userdoc, - Some(UserDoc { - version: Some(1), - kind: Some("user".to_string()), - methods: BTreeMap::from([ - ("abc()".to_string(), UserDocNotice::Notice { notice: "hello".to_string() }), - ("xyz()".to_string(), UserDocNotice::Notice { notice: "hello".to_string() }), - ("constructor".to_string(), UserDocNotice::Notice { notice: "hello".to_string() }), - ]), - events: BTreeMap::new(), - errors: BTreeMap::new(), - notice: None - }) - ); -} - -#[test] -fn can_parse_doc() { - let mut project = TempProject::::dapptools().unwrap(); - project.project_mut().artifacts.additional_values.userdoc = true; - project.project_mut().artifacts.additional_values.devdoc = true; - project.project_mut().settings.solc.settings = project.project_mut().artifacts.solc_settings(); - - let contract = r" -// SPDX-License-Identifier: GPL-3.0-only -pragma solidity 0.8.17; - -/// @title Not an ERC20. -/// @author Notadev -/// @notice Do not use this. -/// @dev This is not an ERC20 implementation. -/// @custom:experimental This is an experimental contract. -interface INotERC20 { - /// @notice Transfer tokens. - /// @dev Transfer `amount` tokens to account `to`. - /// @param to Target account. - /// @param amount Transfer amount. - /// @return A boolean value indicating whether the operation succeeded. - function transfer(address to, uint256 amount) external returns (bool); - - /// @notice Transfer some tokens. - /// @dev Emitted when transfer. - /// @param from Source account. - /// @param to Target account. - /// @param value Transfer amount. - event Transfer(address indexed from, address indexed to, uint256 value); - - /// @notice Insufficient balance for transfer. - /// @dev Needed `required` but only `available` available. - /// @param available Balance available. - /// @param required Requested amount to transfer. - error InsufficientBalance(uint256 available, uint256 required); -} - -contract NotERC20 is INotERC20 { - /// @inheritdoc INotERC20 - function transfer(address to, uint256 amount) external returns (bool) { - return false; - } -} - "; - project.add_source("Contract", contract).unwrap(); - - let mut compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - - assert!(compiled.find_first("INotERC20").is_some()); - let contract = compiled.remove_first("INotERC20").unwrap(); - assert_eq!( - contract.userdoc, - Some(UserDoc { - version: Some(1), - kind: Some("user".to_string()), - notice: Some("Do not use this.".to_string()), - methods: BTreeMap::from([( - "transfer(address,uint256)".to_string(), - UserDocNotice::Notice { notice: "Transfer tokens.".to_string() } - ),]), - events: BTreeMap::from([( - "Transfer(address,address,uint256)".to_string(), - UserDocNotice::Notice { notice: "Transfer some tokens.".to_string() } - ),]), - errors: BTreeMap::from([( - "InsufficientBalance(uint256,uint256)".to_string(), - vec![UserDocNotice::Notice { - notice: "Insufficient balance for transfer.".to_string() - }] - ),]), - }) - ); - assert_eq!( - contract.devdoc, - Some(DevDoc { - version: Some(1), - kind: Some("dev".to_string()), - author: Some("Notadev".to_string()), - details: Some("This is not an ERC20 implementation.".to_string()), - custom_experimental: Some("This is an experimental contract.".to_string()), - methods: BTreeMap::from([( - "transfer(address,uint256)".to_string(), - MethodDoc { - details: Some("Transfer `amount` tokens to account `to`.".to_string()), - params: BTreeMap::from([ - ("to".to_string(), "Target account.".to_string()), - ("amount".to_string(), "Transfer amount.".to_string()) - ]), - returns: BTreeMap::from([( - "_0".to_string(), - "A boolean value indicating whether the operation succeeded.".to_string() - ),]) - } - ),]), - events: BTreeMap::from([( - "Transfer(address,address,uint256)".to_string(), - EventDoc { - details: Some("Emitted when transfer.".to_string()), - params: BTreeMap::from([ - ("from".to_string(), "Source account.".to_string()), - ("to".to_string(), "Target account.".to_string()), - ("value".to_string(), "Transfer amount.".to_string()), - ]), - } - ),]), - errors: BTreeMap::from([( - "InsufficientBalance(uint256,uint256)".to_string(), - vec![ErrorDoc { - details: Some("Needed `required` but only `available` available.".to_string()), - params: BTreeMap::from([ - ("available".to_string(), "Balance available.".to_string()), - ("required".to_string(), "Requested amount to transfer.".to_string()) - ]), - }] - ),]), - title: Some("Not an ERC20.".to_string()) - }) - ); - - assert!(compiled.find_first("NotERC20").is_some()); - let contract = compiled.remove_first("NotERC20").unwrap(); - assert_eq!( - contract.userdoc, - Some(UserDoc { - version: Some(1), - kind: Some("user".to_string()), - notice: None, - methods: BTreeMap::from([( - "transfer(address,uint256)".to_string(), - UserDocNotice::Notice { notice: "Transfer tokens.".to_string() } - ),]), - events: BTreeMap::from([( - "Transfer(address,address,uint256)".to_string(), - UserDocNotice::Notice { notice: "Transfer some tokens.".to_string() } - ),]), - errors: BTreeMap::from([( - "InsufficientBalance(uint256,uint256)".to_string(), - vec![UserDocNotice::Notice { - notice: "Insufficient balance for transfer.".to_string() - }] - ),]), - }) - ); - assert_eq!( - contract.devdoc, - Some(DevDoc { - version: Some(1), - kind: Some("dev".to_string()), - author: None, - details: None, - custom_experimental: None, - methods: BTreeMap::from([( - "transfer(address,uint256)".to_string(), - MethodDoc { - details: Some("Transfer `amount` tokens to account `to`.".to_string()), - params: BTreeMap::from([ - ("to".to_string(), "Target account.".to_string()), - ("amount".to_string(), "Transfer amount.".to_string()) - ]), - returns: BTreeMap::from([( - "_0".to_string(), - "A boolean value indicating whether the operation succeeded.".to_string() - ),]) - } - ),]), - events: BTreeMap::new(), - errors: BTreeMap::from([( - "InsufficientBalance(uint256,uint256)".to_string(), - vec![ErrorDoc { - details: Some("Needed `required` but only `available` available.".to_string()), - params: BTreeMap::from([ - ("available".to_string(), "Balance available.".to_string()), - ("required".to_string(), "Requested amount to transfer.".to_string()) - ]), - }] - ),]), - title: None - }) - ); -} - -#[test] -fn test_relative_cache_entries() { - let project = TempProject::::dapptools().unwrap(); - let _a = project - .add_source( - "A", - r" -pragma solidity ^0.8.10; -contract A { } -", - ) - .unwrap(); - let _b = project - .add_source( - "B", - r" -pragma solidity ^0.8.10; -contract B { } -", - ) - .unwrap(); - let _c = project - .add_source( - "C", - r" -pragma solidity ^0.8.10; -contract C { } -", - ) - .unwrap(); - let _d = project - .add_source( - "D", - r" -pragma solidity ^0.8.10; -contract D { } -", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let cache = CompilerCache::::read(project.cache_path()).unwrap(); - - let entries = vec![ - PathBuf::from("src/A.sol"), - PathBuf::from("src/B.sol"), - PathBuf::from("src/C.sol"), - PathBuf::from("src/D.sol"), - ]; - assert_eq!(entries, cache.files.keys().cloned().collect::>()); - - let cache = CompilerCache::::read_joined(project.paths()).unwrap(); - - assert_eq!( - entries.into_iter().map(|p| project.root().join(p)).collect::>(), - cache.files.keys().cloned().collect::>() - ); -} - -#[test] -fn test_failure_after_removing_file() { - let project = TempProject::::dapptools().unwrap(); - project - .add_source( - "A", - r#" -pragma solidity ^0.8.10; -import "./B.sol"; -contract A { } -"#, - ) - .unwrap(); - - project - .add_source( - "B", - r#" -pragma solidity ^0.8.10; -import "./C.sol"; -contract B { } -"#, - ) - .unwrap(); - - let c = project - .add_source( - "C", - r" -pragma solidity ^0.8.10; -contract C { } -", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - fs::remove_file(c).unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.has_compiler_errors()); -} - -#[test] -fn can_handle_conflicting_files() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "Greeter", - r" - pragma solidity ^0.8.10; - - contract Greeter {} - ", - ) - .unwrap(); - - project - .add_source( - "tokens/Greeter", - r" - pragma solidity ^0.8.10; - - contract Greeter {} - ", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let artifacts = compiled.artifacts().count(); - assert_eq!(artifacts, 2); - - // nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.is_unchanged()); - let artifacts = compiled.artifacts().count(); - assert_eq!(artifacts, 2); - - let cache = CompilerCache::::read(project.cache_path()).unwrap(); - - let mut source_files = cache.files.keys().cloned().collect::>(); - source_files.sort_unstable(); - - assert_eq!( - source_files, - vec![PathBuf::from("src/Greeter.sol"), PathBuf::from("src/tokens/Greeter.sol"),] - ); - - let mut artifacts = project.artifacts_snapshot().unwrap().artifacts; - artifacts.strip_prefix_all(&project.paths().artifacts); - - assert_eq!(artifacts.len(), 2); - let mut artifact_files = artifacts.artifact_files().map(|f| f.file.clone()).collect::>(); - artifact_files.sort_unstable(); - - assert_eq!( - artifact_files, - vec![ - PathBuf::from("Greeter.sol/Greeter.json"), - PathBuf::from("tokens/Greeter.sol/Greeter.json"), - ] - ); -} - -// -#[test] -fn can_handle_conflicting_files_recompile() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "A", - r" - pragma solidity ^0.8.10; - - contract A { - function foo() public{} - } - ", - ) - .unwrap(); - - project - .add_source( - "inner/A", - r" - pragma solidity ^0.8.10; - - contract A { - function bar() public{} - } - ", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let artifacts = compiled.artifacts().count(); - assert_eq!(artifacts, 2); - - // nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.is_unchanged()); - let artifacts = compiled.artifacts().count(); - assert_eq!(artifacts, 2); - - let cache = CompilerCache::::read(project.cache_path()).unwrap(); - - let mut source_files = cache.files.keys().cloned().collect::>(); - source_files.sort_unstable(); - - assert_eq!(source_files, vec![PathBuf::from("src/A.sol"), PathBuf::from("src/inner/A.sol"),]); - - let mut artifacts = - project.artifacts_snapshot().unwrap().artifacts.into_stripped_file_prefixes(project.root()); - artifacts.strip_prefix_all(&project.paths().artifacts); - - assert_eq!(artifacts.len(), 2); - let mut artifact_files = artifacts.artifact_files().map(|f| f.file.clone()).collect::>(); - artifact_files.sort_unstable(); - - let expected_files = vec![PathBuf::from("A.sol/A.json"), PathBuf::from("inner/A.sol/A.json")]; - assert_eq!(artifact_files, expected_files); - - // overwrite conflicting nested file, effectively changing it - project - .add_source( - "inner/A", - r" - pragma solidity ^0.8.10; - contract A { - function bar() public{} - function baz() public{} - } - ", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let mut recompiled_artifacts = - project.artifacts_snapshot().unwrap().artifacts.into_stripped_file_prefixes(project.root()); - recompiled_artifacts.strip_prefix_all(&project.paths().artifacts); - - assert_eq!(recompiled_artifacts.len(), 2); - let mut artifact_files = - recompiled_artifacts.artifact_files().map(|f| f.file.clone()).collect::>(); - artifact_files.sort_unstable(); - assert_eq!(artifact_files, expected_files); - - // ensure that `a.sol/A.json` is unchanged - let outer = artifacts.find("src/A.sol".as_ref(), "A").unwrap(); - let outer_recompiled = recompiled_artifacts.find("src/A.sol".as_ref(), "A").unwrap(); - assert_eq!(outer, outer_recompiled); - - let inner_recompiled = recompiled_artifacts.find("src/inner/A.sol".as_ref(), "A").unwrap(); - assert!(inner_recompiled.get_abi().unwrap().functions.contains_key("baz")); -} - -// -#[test] -fn can_handle_conflicting_files_case_sensitive_recompile() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "a", - r" - pragma solidity ^0.8.10; - - contract A { - function foo() public{} - } - ", - ) - .unwrap(); - - project - .add_source( - "inner/A", - r" - pragma solidity ^0.8.10; - - contract A { - function bar() public{} - } - ", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let artifacts = compiled.artifacts().count(); - assert_eq!(artifacts, 2); - - // nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.is_unchanged()); - let artifacts = compiled.artifacts().count(); - assert_eq!(artifacts, 2); - - let cache = CompilerCache::::read(project.cache_path()).unwrap(); - - let mut source_files = cache.files.keys().cloned().collect::>(); - source_files.sort_unstable(); - - assert_eq!(source_files, vec![PathBuf::from("src/a.sol"), PathBuf::from("src/inner/A.sol"),]); - - let mut artifacts = - project.artifacts_snapshot().unwrap().artifacts.into_stripped_file_prefixes(project.root()); - artifacts.strip_prefix_all(&project.paths().artifacts); - - assert_eq!(artifacts.len(), 2); - let mut artifact_files = artifacts.artifact_files().map(|f| f.file.clone()).collect::>(); - artifact_files.sort_unstable(); - - let expected_files = vec![PathBuf::from("a.sol/A.json"), PathBuf::from("inner/A.sol/A.json")]; - assert_eq!(artifact_files, expected_files); - - // overwrite conflicting nested file, effectively changing it - project - .add_source( - "inner/A", - r" - pragma solidity ^0.8.10; - contract A { - function bar() public{} - function baz() public{} - } - ", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let mut recompiled_artifacts = - project.artifacts_snapshot().unwrap().artifacts.into_stripped_file_prefixes(project.root()); - recompiled_artifacts.strip_prefix_all(&project.paths().artifacts); - - assert_eq!(recompiled_artifacts.len(), 2); - let mut artifact_files = - recompiled_artifacts.artifact_files().map(|f| f.file.clone()).collect::>(); - artifact_files.sort_unstable(); - assert_eq!(artifact_files, expected_files); - - // ensure that `a.sol/A.json` is unchanged - let outer = artifacts.find("src/a.sol".as_ref(), "A").unwrap(); - let outer_recompiled = recompiled_artifacts.find("src/a.sol".as_ref(), "A").unwrap(); - assert_eq!(outer, outer_recompiled); - - let inner_recompiled = recompiled_artifacts.find("src/inner/A.sol".as_ref(), "A").unwrap(); - assert!(inner_recompiled.get_abi().unwrap().functions.contains_key("baz")); -} - -#[test] -fn can_checkout_repo() { - let project = TempProject::checkout("transmissions11/solmate").unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - let _artifacts = project.artifacts_snapshot().unwrap(); -} - -#[test] -fn can_detect_config_changes() { - let mut project = TempProject::::dapptools().unwrap(); - - let remapping = project.paths().libraries[0].join("remapping"); - project - .paths_mut() - .remappings - .push(Remapping::from_str(&format!("remapping/={}/", remapping.display())).unwrap()); - - project - .add_source( - "Foo", - r#" - pragma solidity ^0.8.10; - import "remapping/Bar.sol"; - - contract Foo {} - "#, - ) - .unwrap(); - project - .add_lib( - "remapping/Bar", - r" - pragma solidity ^0.8.10; - - contract Bar {} - ", - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - - let cache_before = - CompilerCache::::read(&project.paths().cache).unwrap(); - assert_eq!(cache_before.files.len(), 2); - - // nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.is_unchanged()); - - project.project_mut().settings.solc.settings.optimizer.enabled = Some(true); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(!compiled.is_unchanged()); - - let cache_after = CompilerCache::::read(&project.paths().cache).unwrap(); - assert_ne!(cache_before, cache_after); -} - -#[test] -fn can_add_basic_contract_and_library() { - let mut project = TempProject::::dapptools().unwrap(); - - let remapping = project.paths().libraries[0].join("remapping"); - project - .paths_mut() - .remappings - .push(Remapping::from_str(&format!("remapping/={}/", remapping.display())).unwrap()); - - let src = project.add_basic_source("Foo.sol", "^0.8.0").unwrap(); - - let lib = project.add_basic_source("Bar", "^0.8.0").unwrap(); - - let graph = Graph::::resolve(project.paths()).unwrap(); - assert_eq!(graph.files().len(), 2); - assert!(graph.files().contains_key(&src)); - assert!(graph.files().contains_key(&lib)); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("Foo").is_some()); - assert!(compiled.find_first("Bar").is_some()); -} - -// -#[test] -fn can_handle_nested_absolute_imports() { - let mut project = TempProject::::dapptools().unwrap(); - - let remapping = project.paths().libraries[0].join("myDepdendency"); - project - .paths_mut() - .remappings - .push(Remapping::from_str(&format!("myDepdendency/={}/", remapping.display())).unwrap()); - - project - .add_lib( - "myDepdendency/src/interfaces/IConfig.sol", - r" - pragma solidity ^0.8.10; - - interface IConfig {} - ", - ) - .unwrap(); - - project - .add_lib( - "myDepdendency/src/Config.sol", - r#" - pragma solidity ^0.8.10; - import "src/interfaces/IConfig.sol"; - - contract Config {} - "#, - ) - .unwrap(); - - project - .add_source( - "Greeter", - r#" - pragma solidity ^0.8.10; - import "myDepdendency/src/Config.sol"; - - contract Greeter {} - "#, - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("Greeter").is_some()); - assert!(compiled.find_first("Config").is_some()); - assert!(compiled.find_first("IConfig").is_some()); -} - -#[test] -fn can_handle_nested_test_absolute_imports() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "Contract.sol", - r" -// SPDX-License-Identifier: UNLICENSED -pragma solidity =0.8.13; - -library Library { - function f(uint256 a, uint256 b) public pure returns (uint256) { - return a + b; - } -} - -contract Contract { - uint256 c; - - constructor() { - c = Library.f(1, 2); - } -} - ", - ) - .unwrap(); - - project - .add_test( - "Contract.t.sol", - r#" -// SPDX-License-Identifier: UNLICENSED -pragma solidity =0.8.13; - -import "src/Contract.sol"; - -contract ContractTest { - function setUp() public { - } - - function test() public { - new Contract(); - } -} - "#, - ) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - assert!(compiled.find_first("Contract").is_some()); -} - -// This is a repro and a regression test for https://github.com/foundry-rs/compilers/pull/45 -#[test] -fn dirty_files_discovery() { - let project = TempProject::::dapptools().unwrap(); - - project - .add_source( - "D.sol", - r" -pragma solidity 0.8.23; -contract D { - function foo() internal pure returns (uint256) { - return 1; - } -} - ", - ) - .unwrap(); - - project - .add_source("A.sol", "pragma solidity ^0.8.10; import './C.sol'; contract A is D {}") - .unwrap(); - project - .add_source("B.sol", "pragma solidity ^0.8.10; import './A.sol'; contract B is D {}") - .unwrap(); - project - .add_source("C.sol", "pragma solidity ^0.8.10; import './D.sol'; contract C is D {}") - .unwrap(); - - project.compile().unwrap(); - - // Change D.sol so it becomes dirty - project - .add_source( - "D.sol", - r" -pragma solidity 0.8.23; -contract D { - function foo() internal pure returns (uint256) { - return 2; - } -} - ", - ) - .unwrap(); - - let output = project.compile().unwrap(); - - // Check that all contracts were recompiled - assert_eq!(output.compiled_artifacts().len(), 4); -} - -#[test] -fn test_deterministic_metadata() { - let tmp_dir = tempfile::tempdir().unwrap(); - let root = tmp_dir.path(); - let orig_root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); - copy_dir_all(&orig_root, tmp_dir.path()).unwrap(); - - let paths = ProjectPathsConfig::builder().root(root).build().unwrap(); - let project = Project::builder() - .locked_version(SolcLanguage::Solidity, Version::new(0, 8, 18)) - .paths(paths) - .build(MultiCompiler::default()) - .unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); - let artifact = compiled.find_first("DappTest").unwrap(); - - let bytecode = artifact.bytecode.as_ref().unwrap().bytes().unwrap().clone(); - let expected_bytecode = Bytes::from_str( - &std::fs::read_to_string( - Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-test-bytecode.txt"), - ) - .unwrap(), - ) - .unwrap(); - assert_eq!(bytecode, expected_bytecode); -} - -#[test] -fn can_compile_vyper_with_cache() { - let tmp_dir = tempfile::tempdir().unwrap(); - let root = tmp_dir.path(); - let cache = root.join("cache").join(SOLIDITY_FILES_CACHE_FILENAME); - - let manifest_dir = Path::new(env!("CARGO_MANIFEST_DIR")); - let orig_root = manifest_dir.join("../../test-data/vyper-sample"); - copy_dir_all(&orig_root, tmp_dir.path()).unwrap(); - - let paths = ProjectPathsConfig::builder() - .cache(cache) - .sources(root.join("src")) - .artifacts(root.join("out")) - .root(root) - .build::() - .unwrap(); - - let settings = VyperSettings { - output_selection: OutputSelection::default_output_selection(), - ..Default::default() - }; - - // first compile - let project = ProjectBuilder::::new(Default::default()) - .settings(settings) - .paths(paths) - .build(VYPER.clone()) - .unwrap(); - - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Counter").is_some()); - compiled.assert_success(); - - // cache is used when nothing to compile - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Counter").is_some()); - assert!(compiled.is_unchanged()); - - // deleted artifacts cause recompile even with cache - std::fs::remove_dir_all(project.artifacts_path()).unwrap(); - let compiled = project.compile().unwrap(); - assert!(compiled.find_first("Counter").is_some()); - assert!(!compiled.is_unchanged()); -} - -#[test] -fn yul_remappings_ignored() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/yul-sample"); - // Add dummy remapping. - let paths = ProjectPathsConfig::builder().sources(root.clone()).remapping(Remapping { - context: None, - name: "@openzeppelin".to_string(), - path: root.to_string_lossy().to_string(), - }); - let project = TempProject::::new(paths).unwrap(); - - let compiled = project.compile().unwrap(); - compiled.assert_success(); -} - -#[test] -fn test_vyper_imports() { - let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/vyper-imports"); - - let paths = ProjectPathsConfig::builder() - .sources(root.join("src")) - .root(root) - .build::() - .unwrap(); - - let settings = VyperSettings { - output_selection: OutputSelection::default_output_selection(), - ..Default::default() - }; - - let project = ProjectBuilder::::new(Default::default()) - .settings(settings) - .paths(paths) - .no_artifacts() - .build(VYPER.clone()) - .unwrap(); - - project.compile().unwrap().assert_success(); -} - -#[test] -fn test_can_compile_multi() { - let root = - canonicalize(Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/multi-sample")) - .unwrap(); - - let paths = ProjectPathsConfig::builder() - .sources(root.join("src")) - .root(&root) - .build::() - .unwrap(); - - let settings = MultiCompilerSettings { - vyper: VyperSettings { - output_selection: OutputSelection::default_output_selection(), - ..Default::default() - }, - solc: Default::default(), - }; - - let compiler = - MultiCompiler { solc: Some(SolcCompiler::default()), vyper: Some(VYPER.clone()) }; - - let project = ProjectBuilder::::new(Default::default()) - .settings(settings) - .paths(paths) - .no_artifacts() - .build(compiler) - .unwrap(); - - let compiled = project.compile().unwrap(); - assert!(compiled.find(&root.join("src/Counter.sol"), "Counter").is_some()); - assert!(compiled.find(&root.join("src/Counter.vy"), "Counter").is_some()); - compiled.assert_success(); -} - -// This is a reproduction of https://github.com/foundry-rs/compilers/issues/47 -#[cfg(feature = "svm-solc")] -#[test] -fn remapping_trailing_slash_issue47() { - use std::sync::Arc; - - use foundry_compilers_artifacts::{EvmVersion, Source, Sources}; - - let mut sources = Sources::new(); - sources.insert( - PathBuf::from("./C.sol"), - Source { - content: Arc::new(r#"import "@project/D.sol"; contract C {}"#.to_string()), - kind: Default::default(), - }, - ); - sources.insert( - PathBuf::from("./D.sol"), - Source { content: Arc::new(r#"contract D {}"#.to_string()), kind: Default::default() }, - ); - - let mut settings = Settings { evm_version: Some(EvmVersion::Byzantium), ..Default::default() }; - settings.remappings.push(Remapping { - context: None, - name: "@project".into(), - path: ".".into(), - }); - let input = SolcInput { language: SolcLanguage::Solidity, sources, settings }; - let compiler = Solc::find_or_install(&Version::new(0, 6, 8)).unwrap(); - let output = compiler.compile_exact(&input).unwrap(); - assert!(!output.has_error()); -} diff --git a/test-data/zksync/yul-sample/SimpleStore.yul b/test-data/zksync/yul-sample/SimpleStore.yul new file mode 100644 index 00000000..cef5d9bc --- /dev/null +++ b/test-data/zksync/yul-sample/SimpleStore.yul @@ -0,0 +1,11 @@ +object "SimpleStore" { + code { + datacopy(0, dataoffset("SimpleStore_deployed"), datasize("SimpleStore_deployed")) + return(0, datasize("SimpleStore_deployed")) + } + object "SimpleStore_deployed" { + code { + calldatacopy(0, 0, 36) // write calldata to memory + } + } +} diff --git a/src/compilers/tests/zksync.rs b/tests/zksync_test.rs similarity index 81% rename from src/compilers/tests/zksync.rs rename to tests/zksync_test.rs index 95d3ef2f..97b15f72 100644 --- a/src/compilers/tests/zksync.rs +++ b/tests/zksync_test.rs @@ -6,36 +6,39 @@ use std::{ }; use foundry_compilers::{ - buildinfo::BuildInfo, - cache::CompilerCache, - project_util::*, - resolver::parse::SolData, - zksolc::{ - input::ZkSolcInput, - settings::{ZkSolcError, ZkSolcWarning}, - ZkSolc, ZkSolcCompiler, ZkSolcSettings, - }, - zksync::{self, artifact_output::zk::ZkArtifactOutput}, - Graph, ProjectBuilder, ProjectPathsConfig, + buildinfo::BuildInfo, cache::CompilerCache, project_util::*, resolver::parse::SolData, + CompilerOutput, Graph, ProjectBuilder, ProjectPathsConfig, }; use foundry_compilers_artifacts::Remapping; +use foundry_compilers_zksync::{ + artifacts::zksolc::{contract::Contract, error::Error}, + compilers::{ + compilers::zksolc::{ + input::ZkSolcInput, + settings::{ZkSolcError, ZkSolcWarning}, + ZkSolc, ZkSolcCompiler, ZkSolcSettings, + }, + zksync::artifact_output::zk::ZkArtifactOutput, + }, +}; + #[test] fn zksync_can_compile_dapp_sample() { - let _ = tracing_subscriber::fmt() - .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) - .try_init() - .ok(); - let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample"); + // let _ = tracing_subscriber::fmt() + // .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + // .try_init() + // .ok(); + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); let project = TempProject::::new(paths).unwrap(); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.find_first("Dapp").is_some()); compiled.assert_success(); // nothing to compile - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.find_first("Dapp").is_some()); assert!(compiled.is_unchanged()); @@ -43,7 +46,8 @@ fn zksync_can_compile_dapp_sample() { // delete artifacts std::fs::remove_dir_all(&project.paths().artifacts).unwrap(); - let compiled = zksync::project_compile(project.project()).unwrap(); + //let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.find_first("Dapp").is_some()); assert!(!compiled.is_unchanged()); @@ -52,10 +56,10 @@ fn zksync_can_compile_dapp_sample() { } fn test_zksync_can_compile_contract_with_suppressed_errors(compiler: ZkSolcCompiler) { - let _ = tracing_subscriber::fmt() - .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) - .try_init() - .ok(); + // let _ = tracing_subscriber::fmt() + // .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + // .try_init() + // .ok(); let mut project = TempProject::::dapptools().unwrap(); project.project_mut().compiler = compiler; @@ -75,13 +79,13 @@ fn test_zksync_can_compile_contract_with_suppressed_errors(compiler: ZkSolcCompi ) .unwrap(); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.has_compiler_errors()); project.project_mut().settings.settings.suppressed_errors = HashSet::from([ZkSolcError::SendTransfer]); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); compiled.assert_success(); assert!(compiled.find_first("Erroneous").is_some()); } @@ -101,10 +105,10 @@ fn zksync_pre_1_5_7_can_compile_contract_with_suppressed_errors() { } fn test_zksync_can_compile_contract_with_suppressed_warnings(compiler: ZkSolcCompiler) { - let _ = tracing_subscriber::fmt() - .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) - .try_init() - .ok(); + // let _ = tracing_subscriber::fmt() + // .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + // .try_init() + // .ok(); let mut project = TempProject::::dapptools().unwrap(); project.project_mut().compiler = compiler; @@ -123,32 +127,32 @@ fn test_zksync_can_compile_contract_with_suppressed_warnings(compiler: ZkSolcCom ) .unwrap(); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); compiled.assert_success(); assert!( compiled - .compiler_output + .output() .errors .iter() .any(|err| err.is_warning() && err.message.contains("tx.origin")), "{:#?}", - compiled.compiler_output.errors + compiled.output().errors ); project.project_mut().settings.settings.suppressed_warnings = HashSet::from([ZkSolcWarning::TxOrigin]); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); compiled.assert_success(); assert!(compiled.find_first("Warning").is_some()); assert!( !compiled - .compiler_output + .output() .errors .iter() .any(|err| err.is_warning() && err.message.contains("tx.origin")), "{:#?}", - compiled.compiler_output.errors + compiled.output().errors ); } @@ -168,10 +172,10 @@ fn zksync_pre_1_5_7_can_compile_contract_with_suppressed_warnings() { #[test] fn zksync_can_compile_dapp_detect_changes_in_libs() { - let _ = tracing_subscriber::fmt() - .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) - .try_init() - .ok(); + // let _ = tracing_subscriber::fmt() + // .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + // .try_init() + // .ok(); let mut project = TempProject::::dapptools().unwrap(); let remapping = project.paths().libraries[0].join("remapping"); @@ -207,13 +211,13 @@ fn zksync_can_compile_dapp_detect_changes_in_libs() { assert_eq!(graph.files().len(), 2); assert_eq!(graph.files().clone(), HashMap::from([(src, 0), (lib, 1),])); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.find_first("Foo").is_some()); assert!(compiled.find_first("Bar").is_some()); compiled.assert_success(); // nothing to compile - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.find_first("Foo").is_some()); assert!(compiled.is_unchanged()); @@ -236,7 +240,7 @@ fn zksync_can_compile_dapp_detect_changes_in_libs() { let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.find_first("Foo").is_some()); assert!(compiled.find_first("Bar").is_some()); // ensure change is detected @@ -280,13 +284,13 @@ fn zksync_can_compile_dapp_detect_changes_in_sources() { assert_eq!(graph.files().clone(), HashMap::from([(base, 0), (src, 1),])); assert_eq!(graph.imported_nodes(1).to_vec(), vec![0]); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); compiled.assert_success(); assert!(compiled.find_first("DssSpellTest").is_some()); assert!(compiled.find_first("DssSpellTestBase").is_some()); // nothing to compile - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.is_unchanged()); assert!(compiled.find_first("DssSpellTest").is_some()); assert!(compiled.find_first("DssSpellTestBase").is_some()); @@ -315,7 +319,7 @@ fn zksync_can_compile_dapp_detect_changes_in_sources() { let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.find_first("DssSpellTest").is_some()); assert!(compiled.find_first("DssSpellTestBase").is_some()); // ensure change is detected @@ -357,7 +361,7 @@ contract B { } ) .unwrap(); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); compiled.assert_success(); let info_dir = project.project().build_info_path(); @@ -366,11 +370,9 @@ contract B { } let mut build_info_count = 0; for entry in fs::read_dir(info_dir).unwrap() { let info = - BuildInfo::::read( - &entry.unwrap().path(), - ) - .unwrap(); - assert!(info.output.zksync_solc_version.is_some()); + BuildInfo::>::read(&entry.unwrap().path()) + .unwrap(); + assert!(info.output.metadata.contains_key("zksyncSolcVersion")); build_info_count += 1; } assert_eq!(build_info_count, 1); @@ -403,7 +405,7 @@ contract B { } ) .unwrap(); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); compiled.assert_success(); let info_dir = project.project().build_info_path(); @@ -412,10 +414,8 @@ contract B { } let mut build_info_count = 0; for entry in fs::read_dir(info_dir).unwrap() { let _info = - BuildInfo::::read( - &entry.unwrap().path(), - ) - .unwrap(); + BuildInfo::>::read(&entry.unwrap().path()) + .unwrap(); build_info_count += 1; } assert_eq!(build_info_count, 1); @@ -492,9 +492,9 @@ contract Util {} let project = TempProject::::create_new(contracts_dir, inner).unwrap(); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); assert!(compiled.has_compiler_errors()); - assert!(compiled.compiler_output.errors.iter().any(|error| error + assert!(compiled.output().errors.iter().any(|error| error .formatted_message .as_ref() .map_or(false, |msg| msg.contains("File outside of allowed directories")))); @@ -559,25 +559,25 @@ contract Util {} let project = TempProject::::create_new(contracts_dir, inner).unwrap(); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); compiled.assert_success(); } #[test] fn zksync_can_compile_yul_sample() { - let _ = tracing_subscriber::fmt() - .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) - .try_init() - .ok(); - let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../test-data/zksync/yul-sample"); + // let _ = tracing_subscriber::fmt() + // .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) + // .try_init() + // .ok(); + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/zksync/yul-sample"); let paths = ProjectPathsConfig::builder().sources(root); let project = TempProject::::new(paths).unwrap(); - let compiled = zksync::project_compile(project.project()).unwrap(); + let compiled = project.compile().unwrap(); compiled.assert_success(); let simple_store_artifact = compiled - .compiled_artifacts + .compiled_artifacts() .values() .find_map(|contracts| { contracts