From c3e7b0790a39c4bddb955f78e10fea99f0fbcba5 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sun, 26 May 2024 22:49:00 +0300 Subject: [PATCH 01/24] wip --- Cargo.toml | 2 +- src/artifacts/mod.rs | 46 ++++-- src/compile/mod.rs | 21 ++- src/compile/project.rs | 235 ++++++++++++++++--------------- src/compilers/mod.rs | 77 ++++++---- src/compilers/solc/mod.rs | 186 +++++++++++++++---------- src/config.rs | 20 +-- src/lib.rs | 31 ++--- src/project_util/mod.rs | 2 +- src/resolver/mod.rs | 286 ++++++++++++++++++-------------------- src/resolver/parse.rs | 4 +- 11 files changed, 498 insertions(+), 412 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index c07efba4..ae3e5bef 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -95,7 +95,7 @@ path = "tests/mocked.rs" required-features = ["full", "project-util"] [features] -default = ["rustls"] +default = ["rustls", "svm-solc"] full = ["async", "svm-solc"] diff --git a/src/artifacts/mod.rs b/src/artifacts/mod.rs index 12381613..ed8bb08e 100644 --- a/src/artifacts/mod.rs +++ b/src/artifacts/mod.rs @@ -3,15 +3,15 @@ #![allow(ambiguous_glob_reexports)] use crate::{ - compile::*, error::SolcIoError, output::ErrorFilter, remappings::Remapping, utils, - ProjectPathsConfig, SolcError, + compile::*, compilers::solc::SolcLanguages, error::SolcIoError, output::ErrorFilter, + remappings::Remapping, utils, ProjectPathsConfig, SolcError, }; use alloy_primitives::hex; use md5::Digest; use semver::Version; use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; use std::{ - collections::{BTreeMap, HashSet}, + collections::{BTreeMap, HashMap, HashSet}, fmt, fs, path::{Path, PathBuf}, str::FromStr, @@ -49,10 +49,10 @@ pub type Contracts = FileToContractsMap; pub type Sources = BTreeMap; /// A set of different Solc installations with their version and the sources to be compiled -pub(crate) type VersionedSources = Vec<(C, Version, Sources)>; +pub(crate) type VersionedSources = HashMap>; /// A set of different Solc installations with their version and the sources to be compiled -pub(crate) type VersionedFilteredSources = Vec<(C, Version, FilteredSources)>; +pub(crate) type VersionedFilteredSources = HashMap>; pub const SOLIDITY: &str = "Solidity"; pub const YUL: &str = "Yul"; @@ -60,7 +60,7 @@ pub const YUL: &str = "Yul"; /// Input type `solc` expects. #[derive(Clone, Debug, Serialize, Deserialize)] pub struct SolcInput { - pub language: String, + pub language: SolcLanguages, pub sources: Sources, pub settings: Settings, } @@ -69,7 +69,7 @@ pub struct SolcInput { impl Default for SolcInput { fn default() -> Self { SolcInput { - language: SOLIDITY.to_string(), + language: SolcLanguages::Solidity, sources: Sources::default(), settings: Settings::default(), } @@ -77,6 +77,16 @@ impl Default for SolcInput { } impl SolcInput { + pub fn new(language: SolcLanguages, sources: Sources, mut settings: Settings) -> Self { + if language == SolcLanguages::Yul { + if !settings.remappings.is_empty() { + warn!("omitting remappings supplied for the yul sources"); + settings.remappings = vec![]; + } + } + Self { language, sources, settings } + } + /// This will remove/adjust values in the [`SolcInput`] that are not compatible with this /// version pub fn sanitize(&mut self, version: &Version) { @@ -125,7 +135,19 @@ impl SolcInput { /// The flag indicating whether the current [SolcInput] is /// constructed for the yul sources pub fn is_yul(&self) -> bool { - self.language == YUL + self.language == SolcLanguages::Yul + } + + pub fn with_remappings(mut self, remappings: Vec) -> Self { + if self.language == SolcLanguages::Yul { + if !remappings.is_empty() { + warn!("omitting remappings supplied for the yul sources"); + } + } else { + self.settings.remappings = remappings; + } + + self } } @@ -137,7 +159,7 @@ impl SolcInput { /// the verified contracts #[derive(Clone, Debug, Serialize, Deserialize)] pub struct StandardJsonCompilerInput { - pub language: String, + pub language: SolcLanguages, #[serde(with = "serde_helpers::tuple_vec_map")] pub sources: Vec<(PathBuf, Source)>, pub settings: Settings, @@ -147,7 +169,7 @@ pub struct StandardJsonCompilerInput { impl StandardJsonCompilerInput { pub fn new(sources: Vec<(PathBuf, Source)>, settings: Settings) -> Self { - Self { language: SOLIDITY.to_string(), sources, settings } + Self { language: SolcLanguages::Solidity, sources, settings } } /// Normalizes the EVM version used in the settings to be up to the latest one @@ -297,6 +319,10 @@ impl Settings { model_checker.show_unsupported = None; } } + + if let Some(ref mut evm_version) = self.evm_version { + self.evm_version = evm_version.normalize_version_solc(version); + } } /// Inserts a set of `ContractOutputSelection` diff --git a/src/compile/mod.rs b/src/compile/mod.rs index f2fcb6c9..7964b99f 100644 --- a/src/compile/mod.rs +++ b/src/compile/mod.rs @@ -1,6 +1,6 @@ use crate::{ artifacts::Source, - compilers::CompilerInput, + compilers::solc::SolcLanguages, error::{Result, SolcError}, resolver::parse::SolData, utils, CompilerOutput, SolcInput, @@ -386,12 +386,21 @@ impl Solc { pub fn compile_source(&self, path: impl AsRef) -> Result { let path = path.as_ref(); let mut res: CompilerOutput = Default::default(); - for input in - SolcInput::build(Source::read_sol_yul_from(path)?, Default::default(), &self.version) - { - let output = self.compile(&input)?; - res.merge(output) + + let solidity_sources = Source::read_all_from(path, &["sol"])?; + let yul_sources = Source::read_all_from(path, &["yul"])?; + + if !solidity_sources.is_empty() { + let input = + SolcInput::new(SolcLanguages::Solidity, solidity_sources, Default::default()); + res.merge(self.compile(&input)?) } + + if !yul_sources.is_empty() { + let input = SolcInput::new(SolcLanguages::Yul, yul_sources, Default::default()); + res.merge(self.compile(&input)?) + } + Ok(res) } diff --git a/src/compile/project.rs b/src/compile/project.rs index 14e29825..62e0ef61 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -105,17 +105,16 @@ use crate::{ artifacts::{VersionedFilteredSources, VersionedSources}, buildinfo::RawBuildInfo, cache::ArtifactsCache, - compilers::{Compiler, CompilerInput, CompilerVersionManager}, + compilers::{Compiler, CompilerInput, Language}, error::Result, filter::SparseOutputFilter, output::AggregatedCompilerOutput, report, resolver::GraphEdges, - ArtifactOutput, CompilerConfig, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, - Sources, + ArtifactOutput, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, Sources, }; use rayon::prelude::*; -use std::{path::PathBuf, sync::Arc, time::Instant}; +use std::{path::PathBuf, time::Instant}; #[derive(Debug)] pub struct ProjectCompiler<'a, T: ArtifactOutput, C: Compiler> { @@ -123,7 +122,7 @@ pub struct ProjectCompiler<'a, T: ArtifactOutput, C: Compiler> { edges: GraphEdges, project: &'a Project, /// how to compile all the sources - sources: CompilerSources, + sources: CompilerSources, /// How to select solc [`crate::artifacts::CompilerOutput`] for files sparse_output: SparseOutputFilter, } @@ -142,39 +141,29 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { /// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows /// multiple `jobs`, see [`crate::Project::set_solc_jobs()`]. pub fn with_sources(project: &'a Project, sources: Sources) -> Result { - match &project.compiler_config { - CompilerConfig::Specific(compiler) => { - Self::with_sources_and_compiler(project, sources, compiler.clone()) - } - CompilerConfig::AutoDetect(vm) => { - Self::with_sources_and_version_manager(project, sources, vm.clone()) - } - } + Self::with_sources_autodetect(project, sources) } /// Compiles the sources automatically detecting versions via [CompilerVersionManager] - pub fn with_sources_and_version_manager>( - project: &'a Project, - sources: Sources, - version_manager: VM, - ) -> Result { + pub fn with_sources_autodetect(project: &'a Project, sources: Sources) -> Result { let graph = Graph::resolve_sources(&project.paths, sources)?; - let (versions, edges) = graph.into_sources_by_version(project.offline, &version_manager)?; + let (sources, edges) = graph.into_sources_by_version(project.offline, &project.compiler)?; - let sources_by_version = versions.get(&version_manager)?; + let jobs_cnt = sources.values().map(|v| v.len()).sum::(); - let sources = if project.solc_jobs > 1 && sources_by_version.len() > 1 { + let sources = if project.solc_jobs > 1 && jobs_cnt > 1 { // if there are multiple different versions, and we can use multiple jobs we can compile // them in parallel - CompilerSources::Parallel(sources_by_version, project.solc_jobs) + CompilerSources::Parallel(sources, project.solc_jobs) } else { - CompilerSources::Sequential(sources_by_version) + CompilerSources::Sequential(sources) }; Ok(Self { edges, project, sources, sparse_output: Default::default() }) } /// Compiles the sources with a pinned [Compiler] instance + #[cfg(ignore)] pub fn with_sources_and_compiler( project: &'a Project, sources: Sources, @@ -254,7 +243,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { #[derive(Debug)] struct PreprocessedState<'a, T: ArtifactOutput, C: Compiler> { /// Contains all the sources to compile. - sources: FilteredCompilerSources, + sources: FilteredCompilerSources, /// Cache that holds `CacheEntry` objects if caching is enabled and the project is recompiled cache: ArtifactsCache<'a, T, C>, @@ -269,6 +258,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> PreprocessedState<'a, T, C> { let PreprocessedState { sources, cache, sparse_output } = self; let project = cache.project(); let mut output = sources.compile( + &project.compiler, &project.settings, &project.paths, sparse_output, @@ -390,15 +380,15 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { /// Determines how the `solc <-> sources` pairs are executed #[derive(Debug, Clone)] -enum CompilerSources { +enum CompilerSources { /// Compile all these sequentially - Sequential(VersionedSources), + Sequential(VersionedSources), /// Compile all these in parallel using a certain amount of jobs #[allow(dead_code)] - Parallel(VersionedSources, usize), + Parallel(VersionedSources, usize), } -impl CompilerSources { +impl CompilerSources { /// Converts all `\\` separators to `/` /// /// This effectively ensures that `solc` can find imported files like `/src/Cheats.sol` in the @@ -427,27 +417,36 @@ impl CompilerSources { } /// Filters out all sources that don't need to be compiled, see [`ArtifactsCache::filter`] - fn filtered( + fn filtered>( self, cache: &mut ArtifactsCache<'_, T, C>, - ) -> FilteredCompilerSources { + ) -> FilteredCompilerSources { fn filtered_sources( - sources: VersionedSources, + sources: VersionedSources, cache: &mut ArtifactsCache<'_, T, C>, - ) -> VersionedFilteredSources { + ) -> VersionedFilteredSources { cache.remove_dirty_sources(); sources .into_iter() - .map(|(compiler, version, sources)| { - trace!("Filtering {} sources for {}", sources.len(), version); - let sources_to_compile = cache.filter(sources, &version); - trace!( - "Detected {} sources to compile {:?}", - sources_to_compile.dirty().count(), - sources_to_compile.dirty_files().collect::>() - ); - (compiler, version, sources_to_compile) + .map(|(language, versioned_sources)| { + ( + language, + versioned_sources + .into_iter() + .map(|(version, sources)| { + trace!("Filtering {} sources for {}", sources.len(), version); + let sources_to_compile = cache.filter(sources, &version); + trace!( + "Detected {} sources to compile {:?}", + sources_to_compile.dirty().count(), + sources_to_compile.dirty_files().collect::>() + ); + + (version, sources_to_compile) + }) + .collect(), + ) }) .collect() } @@ -465,30 +464,44 @@ impl CompilerSources { /// Determines how the `solc <-> sources` pairs are executed #[derive(Debug, Clone)] -enum FilteredCompilerSources { +enum FilteredCompilerSources { /// Compile all these sequentially - Sequential(VersionedFilteredSources), + Sequential(VersionedFilteredSources), /// Compile all these in parallel using a certain amount of jobs - Parallel(VersionedFilteredSources, usize), + Parallel(VersionedFilteredSources, usize), } -impl FilteredCompilerSources { +impl FilteredCompilerSources { /// Compiles all the files with `Solc` - fn compile( + fn compile>( self, + compiler: &C, settings: &::Settings, - paths: &ProjectPathsConfig, + paths: &ProjectPathsConfig, sparse_output: SparseOutputFilter, graph: &GraphEdges, create_build_info: bool, ) -> Result> { match self { - FilteredCompilerSources::Sequential(input) => { - compile_sequential(input, settings, paths, sparse_output, graph, create_build_info) - } - FilteredCompilerSources::Parallel(input, j) => { - compile_parallel(input, j, settings, paths, sparse_output, graph, create_build_info) - } + FilteredCompilerSources::Sequential(input) => compile_sequential( + compiler, + input, + settings, + paths, + sparse_output, + graph, + create_build_info, + ), + FilteredCompilerSources::Parallel(input, j) => compile_parallel( + compiler, + input, + j, + settings, + paths, + sparse_output, + graph, + create_build_info, + ), } } @@ -504,9 +517,10 @@ impl FilteredCompilerSources { /// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s fn compile_sequential( - input: VersionedFilteredSources, + compiler: &C, + input: VersionedFilteredSources, settings: &C::Settings, - paths: &ProjectPathsConfig, + paths: &ProjectPathsConfig, sparse_output: SparseOutputFilter, graph: &GraphEdges, create_build_info: bool, @@ -518,39 +532,44 @@ fn compile_sequential( let mut include_paths = paths.include_paths.clone(); include_paths.extend(graph.include_paths().clone()); - for (compiler, version, filtered_sources) in input { - if filtered_sources.is_empty() { - // nothing to compile - trace!("skip {} for empty sources set", version); - continue; - } - trace!("compiling {} sources with \"{}\"", filtered_sources.len(), version,); + for (language, versioned_sources) in input { + for (version, filtered_sources) in versioned_sources { + if filtered_sources.is_empty() { + // nothing to compile + trace!("skip {} for empty sources set", version); + continue; + } + trace!("compiling {} sources with \"{}\"", filtered_sources.len(), version,); + + let dirty_files: Vec = filtered_sources.dirty_files().cloned().collect(); - let compiler = compiler - .with_base_path(paths.root.clone()) - .with_allowed_paths(paths.allowed_paths.clone()) - .with_include_paths(include_paths.clone()); + // depending on the composition of the filtered sources, the output selection can be + // optimized + let mut opt_settings = settings.clone(); + let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph); - let dirty_files: Vec = filtered_sources.dirty_files().cloned().collect(); + let mut input = C::Input::build(sources, opt_settings, language.clone(), &version) + .with_base_path(paths.root.clone()) + .with_allowed_paths(paths.allowed_paths.clone()) + .with_include_paths(include_paths.clone()) + .with_remappings(paths.remappings.clone()); - // depending on the composition of the filtered sources, the output selection can be - // optimized - let mut opt_settings = settings.clone(); - let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph); + input.strip_prefix(paths.root.as_path()); - for input in C::Input::build(sources, opt_settings, &version) { let actually_dirty = input .sources() .keys() .filter(|f| dirty_files.contains(f)) .cloned() .collect::>(); + if actually_dirty.is_empty() { // nothing to compile for this particular language, all dirty files are in the other // language set trace!("skip {} run due to empty source set", version); continue; } + trace!( "calling {} with {} sources {:?}", version, @@ -558,17 +577,14 @@ fn compile_sequential( input.sources().keys() ); - let mut input = input.with_remappings(paths.remappings.clone()); - input.strip_prefix(paths.root.as_path()); - let start = Instant::now(); report::compiler_spawn( &input.compiler_name(), - compiler.version(), + input.version(), actually_dirty.as_slice(), ); let mut output = compiler.compile(&input)?; - report::compiler_success(&input.compiler_name(), compiler.version(), &start.elapsed()); + report::compiler_success(&input.compiler_name(), &version, &start.elapsed()); // trace!("compiled input, output has error: {}", output.has_error()); trace!("received compiler output: {:?}", output.contracts.keys()); @@ -588,53 +604,53 @@ fn compile_sequential( /// compiles the input set using `num_jobs` threads fn compile_parallel( - versioned_sources: VersionedFilteredSources, + compiler: &C, + input: VersionedFilteredSources, num_jobs: usize, settings: &C::Settings, - paths: &ProjectPathsConfig, + paths: &ProjectPathsConfig, sparse_output: SparseOutputFilter, graph: &GraphEdges, create_build_info: bool, ) -> Result> { debug_assert!(num_jobs > 1); - trace!( - "compile {} sources in parallel using up to {} solc jobs", - versioned_sources.len(), - num_jobs - ); + trace!("compile {} sources in parallel using up to {} solc jobs", input.len(), num_jobs); // Include additional paths collected during graph resolution. let mut include_paths = paths.include_paths.clone(); include_paths.extend(graph.include_paths().clone()); - let mut jobs = Vec::with_capacity(versioned_sources.len()); - for (compiler, version, filtered_sources) in versioned_sources { - if filtered_sources.is_empty() { - // nothing to compile - trace!("skip {} for empty sources set", version); - continue; - } + let mut jobs = Vec::with_capacity(input.len()); + for (language, versioned_sources) in input { + for (version, filtered_sources) in versioned_sources { + if filtered_sources.is_empty() { + // nothing to compile + trace!("skip {} for empty sources set", version); + continue; + } + + let dirty_files: Vec = filtered_sources.dirty_files().cloned().collect(); - let compiler = compiler - .with_base_path(paths.root.clone()) - .with_allowed_paths(paths.allowed_paths.clone()) - .with_include_paths(include_paths.clone()); + // depending on the composition of the filtered sources, the output selection can be + // optimized + let mut opt_settings = settings.clone(); + let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph); - let dirty_files: Vec = filtered_sources.dirty_files().cloned().collect(); - let compiler = Arc::new(compiler); + let mut input = C::Input::build(sources, opt_settings, language.clone(), &version) + .with_base_path(paths.root.clone()) + .with_allowed_paths(paths.allowed_paths.clone()) + .with_include_paths(include_paths.clone()) + .with_remappings(paths.remappings.clone()); - // depending on the composition of the filtered sources, the output selection can be - // optimized - let mut opt_settings = settings.clone(); - let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph); + input.strip_prefix(paths.root.as_path()); - for input in C::Input::build(sources, settings.clone(), &version) { let actually_dirty = input .sources() .keys() .filter(|f| dirty_files.contains(f)) .cloned() .collect::>(); + if actually_dirty.is_empty() { // nothing to compile for this particular language, all dirty files are in the other // language set @@ -648,10 +664,7 @@ fn compile_parallel( input.sources().keys() ); - let mut input = input.with_remappings(paths.remappings.clone()); - input.strip_prefix(paths.root.as_path()); - - jobs.push((compiler.clone(), version.clone(), input, actually_dirty)); + jobs.push((input, actually_dirty)); } } @@ -665,29 +678,29 @@ fn compile_parallel( let outputs = pool.install(move || { jobs.into_par_iter() - .map(move |(compiler, version, input, actually_dirty)| { + .map(move |(input, actually_dirty)| { // set the reporter on this thread let _guard = report::set_scoped(&scoped_report); trace!( "calling solc `{}` with {} sources: {:?}", - version, + input.version(), input.sources().len(), input.sources().keys() ); let start = Instant::now(); report::compiler_spawn( &input.compiler_name(), - compiler.version(), + input.version(), actually_dirty.as_slice(), ); compiler.compile(&input).map(move |output| { report::compiler_success( &input.compiler_name(), - compiler.version(), + input.version(), &start.elapsed(), ); - (version, input, output) + (input.version().clone(), input, output) }) }) .collect::, _>>() diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index 4e3d2c0a..6d542642 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -12,12 +12,16 @@ use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ collections::{BTreeMap, BTreeSet, HashSet}, fmt::{Debug, Display}, + hash::Hash, path::{Path, PathBuf}, }; pub mod solc; mod version_manager; + +#[cfg(ignore)] pub mod vyper; + pub use version_manager::{CompilerVersion, CompilerVersionManager, VersionManagerError}; /// Compilation settings including evm_version, output_selection, etc. @@ -43,21 +47,49 @@ pub trait CompilerSettings: /// Input of a compiler, including sources and settings used for their compilation. pub trait CompilerInput: Serialize + Send + Sync + Sized { type Settings: CompilerSettings; + type Language: Language; /// Constructs one or multiple inputs from given sources set. Might return multiple inputs in /// cases when sources need to be divided into sets per language (Yul + Solidity for example). - fn build(sources: Sources, settings: Self::Settings, version: &Version) -> Vec; + fn build( + sources: Sources, + settings: Self::Settings, + language: Self::Language, + version: &Version, + ) -> Self; /// Returns reference to sources included into this input. fn sources(&self) -> &Sources; + fn language(&self) -> Self::Language; + + fn version(&self) -> &Version; + + /// Returns compiler name used by reporters to display output during compilation. + fn compiler_name(&self) -> String; + /// Method which might be invoked to add remappings to the input. fn with_remappings(self, _remappings: Vec) -> Self { self } - /// Returns compiler name used by reporters to display output during compilation. - fn compiler_name(&self) -> String; + /// Builder method to set the base path for the compiler. Primarily used by solc implementation + /// to se --base-path. + fn with_base_path(self, _base_path: PathBuf) -> Self { + self + } + + /// Builder method to set the allowed paths for the compiler. Primarily used by solc + /// implementation to set --allow-paths. + fn with_allowed_paths(self, _allowed_paths: BTreeSet) -> Self { + self + } + + /// Builder method to set the include paths for the compiler. Primarily used by solc + /// implementation to set --include-paths. + fn with_include_paths(self, _include_paths: BTreeSet) -> Self { + self + } /// Strips given prefix from all paths. fn strip_prefix(&mut self, base: &Path); @@ -67,9 +99,12 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized { /// given source. Used by path resolver to resolve imports or determine compiler versions needed to /// compiler given sources. pub trait ParsedSource: Debug + Sized + Send { + type Language: Language; + fn parse(content: &str, file: &Path) -> Self; fn version_req(&self) -> Option<&VersionReq>; fn resolve_imports(&self, paths: &ProjectPathsConfig) -> Result>; + fn language(&self) -> Self::Language; } /// Error returned by compiler. Might also represent a warning or informational message. @@ -136,45 +171,31 @@ impl Default for CompilerOutput { } } +pub trait Language: Hash + Eq + Clone + Debug { + /// Extensions of source files recognized by the language set. + const FILE_EXTENSIONS: &'static [&'static str]; +} + /// The main compiler abstraction trait. Currently mostly represents a wrapper around compiler /// binary aware of the version and able to compile given input into [CompilerOutput] including /// artifacts and errors. pub trait Compiler: Send + Sync + Clone { - /// Extensions of source files recognized by the compiler. - const FILE_EXTENSIONS: &'static [&'static str]; - /// Input type for the compiler. Contains settings and sources to be compiled. - type Input: CompilerInput; + type Input: CompilerInput; /// Error type returned by the compiler. type CompilationError: CompilationError; /// Source parser used for resolving imports and version requirements. - type ParsedSource: ParsedSource; + type ParsedSource: ParsedSource; /// Compiler settings. type Settings: CompilerSettings; + /// Enum of languages supported by the compiler. + type Language: Language; /// Main entrypoint for the compiler. Compiles given input into [CompilerOutput]. Takes /// ownership over the input and returns back version with potential modifications made to it. /// Returned input is always the one which was seen by the binary. fn compile(&self, input: &Self::Input) -> Result>; - /// Returns the version of the compiler. - fn version(&self) -> &Version; - - /// Builder method to set the base path for the compiler. Primarily used by solc implementation - /// to se --base-path. - fn with_base_path(self, _base_path: PathBuf) -> Self { - self - } - - /// Builder method to set the allowed paths for the compiler. Primarily used by solc - /// implementation to set --allow-paths. - fn with_allowed_paths(self, _allowed_paths: BTreeSet) -> Self { - self - } - - /// Builder method to set the include paths for the compiler. Primarily used by solc - /// implementation to set --include-paths. - fn with_include_paths(self, _include_paths: BTreeSet) -> Self { - self - } + /// Returns all versions available locally and remotely. + fn available_versions(&self, language: &Self::Language) -> Vec; } diff --git a/src/compilers/solc/mod.rs b/src/compilers/solc/mod.rs index 332bf535..ffda26fa 100644 --- a/src/compilers/solc/mod.rs +++ b/src/compilers/solc/mod.rs @@ -6,34 +6,75 @@ pub use version_manager::SolcVersionManager; use itertools::Itertools; use super::{ - CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, ParsedSource, + version_manager::CompilerVersion, CompilationError, Compiler, CompilerInput, CompilerOutput, + CompilerSettings, Language, ParsedSource, }; use crate::{ artifacts::{ output_selection::OutputSelection, Error, Settings as SolcSettings, SolcInput, Sources, - SOLIDITY, YUL, }, - error::Result, + error::{Result, SolcError}, remappings::Remapping, resolver::parse::SolData, + utils::RuntimeOrHandle, Solc, SOLC_EXTENSIONS, }; use semver::Version; +use serde::{Deserialize, Serialize}; use std::{ - collections::{BTreeMap, BTreeSet}, + collections::BTreeSet, path::{Path, PathBuf}, }; -impl Compiler for Solc { +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[non_exhaustive] +pub enum SolcLanguages { + Solidity, + Yul, +} + +impl Language for SolcLanguages { const FILE_EXTENSIONS: &'static [&'static str] = SOLC_EXTENSIONS; +} - type Input = SolcInput; +impl Compiler for Solc { + type Input = SolcVerionedInput; type CompilationError = crate::artifacts::Error; type ParsedSource = SolData; type Settings = SolcSettings; + type Language = SolcLanguages; fn compile(&self, input: &Self::Input) -> Result> { - let solc_output = self.compile(&input)?; + let solc = + if let Some(solc) = Solc::find_svm_installed_version(input.version().to_string())? { + solc + } else { + #[cfg(test)] + crate::take_solc_installer_lock!(_lock); + + let version = if !input.version.pre.is_empty() || !input.version.build.is_empty() { + Version::new(input.version.major, input.version.minor, input.version.patch) + } else { + input.version.clone() + }; + + trace!("blocking installing solc version \"{}\"", version); + crate::report::solc_installation_start(&version); + // The async version `svm::install` is used instead of `svm::blocking_intsall` + // because the underlying `reqwest::blocking::Client` does not behave well + // inside of a Tokio runtime. See: https://github.com/seanmonstar/reqwest/issues/1017 + match RuntimeOrHandle::new().block_on(svm::install(&version)) { + Ok(path) => { + crate::report::solc_installation_success(&version); + Ok(Solc::new_with_version(path, version)) + } + Err(err) => { + crate::report::solc_installation_error(&version, &err.to_string()); + Err(SolcError::msg(format!("failed to install {}", version))) + } + }? + }; + let solc_output = solc.compile(&input.input)?; let output = CompilerOutput { errors: solc_output.errors, @@ -44,85 +85,65 @@ impl Compiler for Solc { Ok(output) } - fn version(&self) -> &Version { - &self.version - } - - fn with_allowed_paths(mut self, allowed_paths: BTreeSet) -> Self { - self.allow_paths = allowed_paths; - self - } - - fn with_base_path(mut self, base_path: PathBuf) -> Self { - self.base_path = Some(base_path); - self + fn available_versions(&self, _language: &Self::Language) -> Vec { + Solc::installed_versions().into_iter().map(CompilerVersion::Installed).collect() } +} - fn with_include_paths(mut self, include_paths: BTreeSet) -> Self { - self.include_paths = include_paths; - self - } +#[derive(Debug, Clone, Serialize)] +pub struct SolcVerionedInput { + #[serde(skip)] + pub version: Version, + #[serde(flatten)] + pub input: SolcInput, + #[serde(skip)] + pub allowed_paths: BTreeSet, + #[serde(skip)] + pub base_path: PathBuf, + #[serde(skip)] + pub include_paths: BTreeSet, } -impl CompilerInput for SolcInput { +impl CompilerInput for SolcVerionedInput { type Settings = SolcSettings; + type Language = SolcLanguages; /// Creates a new [CompilerInput]s with default settings and the given sources /// /// A [CompilerInput] expects a language setting, supported by solc are solidity or yul. /// In case the `sources` is a mix of solidity and yul files, 2 CompilerInputs are returned - fn build(sources: Sources, mut settings: Self::Settings, version: &Version) -> Vec { - settings.sanitize(version); - if let Some(ref mut evm_version) = settings.evm_version { - settings.evm_version = evm_version.normalize_version_solc(version); + fn build( + sources: Sources, + settings: Self::Settings, + language: Self::Language, + version: &Version, + ) -> Self { + let input = SolcInput::new(language, sources, settings).sanitized(version); + + Self { + version: version.clone(), + input, + allowed_paths: BTreeSet::new(), + base_path: PathBuf::new(), + include_paths: BTreeSet::new(), } - - let mut solidity_sources = BTreeMap::new(); - let mut yul_sources = BTreeMap::new(); - for (path, source) in sources { - if path.extension() == Some(std::ffi::OsStr::new("yul")) { - yul_sources.insert(path, source); - } else { - solidity_sources.insert(path, source); - } - } - let mut res = Vec::new(); - if !solidity_sources.is_empty() { - res.push(Self { - language: SOLIDITY.to_string(), - sources: solidity_sources, - settings: settings.clone(), - }); - } - if !yul_sources.is_empty() { - if !settings.remappings.is_empty() { - warn!("omitting remappings supplied for the yul sources"); - settings.remappings = vec![]; - } - - if let Some(debug) = settings.debug.as_mut() { - if debug.revert_strings.is_some() { - warn!("omitting revertStrings supplied for the yul sources"); - debug.revert_strings = None; - } - } - res.push(Self { language: YUL.to_string(), sources: yul_sources, settings }); - } - res } fn sources(&self) -> &Sources { - &self.sources + &self.input.sources + } + + fn language(&self) -> Self::Language { + self.input.language.clone() + } + + fn version(&self) -> &Version { + &self.version } fn with_remappings(mut self, remappings: Vec) -> Self { - if self.language == YUL { - if !remappings.is_empty() { - warn!("omitting remappings supplied for the yul sources"); - } - } else { - self.settings.remappings = remappings; - } + self.input = self.input.with_remappings(remappings); + self } @@ -131,7 +152,22 @@ impl CompilerInput for SolcInput { } fn strip_prefix(&mut self, base: &Path) { - self.strip_prefix(base) + self.input.strip_prefix(base); + } + + fn with_allowed_paths(mut self, allowed_paths: BTreeSet) -> Self { + self.allowed_paths = allowed_paths; + self + } + + fn with_base_path(mut self, base_path: PathBuf) -> Self { + self.base_path = base_path; + self + } + + fn with_include_paths(mut self, include_paths: BTreeSet) -> Self { + self.include_paths = include_paths; + self } } @@ -168,6 +204,8 @@ impl CompilerSettings for SolcSettings { } impl ParsedSource for SolData { + type Language = SolcLanguages; + fn parse(content: &str, file: &std::path::Path) -> Self { SolData::parse(content, file) } @@ -179,6 +217,14 @@ impl ParsedSource for SolData { fn resolve_imports(&self, _paths: &crate::ProjectPathsConfig) -> Result> { return Ok(self.imports.iter().map(|i| i.data().path().to_path_buf()).collect_vec()); } + + fn language(&self) -> Self::Language { + if self.is_yul { + SolcLanguages::Yul + } else { + SolcLanguages::Solidity + } + } } impl CompilationError for Error { diff --git a/src/config.rs b/src/config.rs index 67c20417..0cb9c2e6 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,12 +1,12 @@ use crate::{ artifacts::{output_selection::ContractOutputSelection, Settings}, cache::SOLIDITY_FILES_CACHE_FILENAME, - compilers::Compiler, + compilers::{solc::SolcLanguages, Language}, error::{Result, SolcError, SolcIoError}, flatten::{collect_ordered_deps, combine_version_pragmas}, remappings::Remapping, resolver::{Graph, SolImportAlias}, - utils, Solc, Source, Sources, + utils, Source, Sources, }; use serde::{Deserialize, Serialize}; use std::{ @@ -19,7 +19,7 @@ use std::{ /// Where to find all files or where to write them #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ProjectPathsConfig { +pub struct ProjectPathsConfig { /// Project root pub root: PathBuf, /// Path to the cache, if any @@ -506,23 +506,23 @@ impl ProjectPathsConfig { } } -impl ProjectPathsConfig { +impl ProjectPathsConfig { /// Returns all sources found under the project's configured `sources` path pub fn read_sources(&self) -> Result { trace!("reading all sources from \"{}\"", self.sources.display()); - Ok(Source::read_all_from(&self.sources, C::FILE_EXTENSIONS)?) + Ok(Source::read_all_from(&self.sources, L::FILE_EXTENSIONS)?) } /// Returns all sources found under the project's configured `test` path pub fn read_tests(&self) -> Result { trace!("reading all tests from \"{}\"", self.tests.display()); - Ok(Source::read_all_from(&self.tests, C::FILE_EXTENSIONS)?) + Ok(Source::read_all_from(&self.tests, L::FILE_EXTENSIONS)?) } /// Returns all sources found under the project's configured `script` path pub fn read_scripts(&self) -> Result { trace!("reading all scripts from \"{}\"", self.scripts.display()); - Ok(Source::read_all_from(&self.scripts, C::FILE_EXTENSIONS)?) + Ok(Source::read_all_from(&self.scripts, L::FILE_EXTENSIONS)?) } /// Returns true if the there is at least one solidity file in this config. @@ -535,9 +535,9 @@ impl ProjectPathsConfig { /// Returns an iterator that yields all solidity file paths for `Self::sources`, `Self::tests` /// and `Self::scripts` pub fn input_files_iter(&self) -> impl Iterator + '_ { - utils::source_files_iter(&self.sources, C::FILE_EXTENSIONS) - .chain(utils::source_files_iter(&self.tests, C::FILE_EXTENSIONS)) - .chain(utils::source_files_iter(&self.scripts, C::FILE_EXTENSIONS)) + utils::source_files_iter(&self.sources, L::FILE_EXTENSIONS) + .chain(utils::source_files_iter(&self.tests, L::FILE_EXTENSIONS)) + .chain(utils::source_files_iter(&self.scripts, L::FILE_EXTENSIONS)) } /// Returns the combined set solidity file paths for `Self::sources`, `Self::tests` and diff --git a/src/lib.rs b/src/lib.rs index 8d1573ca..7671fa69 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,7 +25,7 @@ pub mod cache; pub mod flatten; pub mod hh; -use compilers::{Compiler, CompilerSettings, CompilerVersionManager}; +use compilers::{Compiler, CompilerSettings}; pub use filter::SparseOutputFileFilter; pub use hh::{HardhatArtifact, HardhatArtifacts}; @@ -71,32 +71,18 @@ use std::{ collections::{BTreeMap, HashMap, HashSet}, fs, path::{Path, PathBuf}, - sync::Arc, }; /// Utilities for creating, mocking and testing of (temporary) projects #[cfg(feature = "project-util")] pub mod project_util; -#[derive(Debug, Clone)] -pub enum CompilerConfig { - Specific(C), - AutoDetect(Arc>), -} - -#[cfg(feature = "svm-solc")] -impl Default for CompilerConfig { - fn default() -> Self { - CompilerConfig::AutoDetect(Arc::new(compilers::solc::SolcVersionManager)) - } -} - /// Represents a project workspace and handles `solc` compiling of all contracts in that workspace. #[derive(Clone, Debug)] pub struct Project { - pub compiler_config: CompilerConfig, + pub compiler: C, /// The layout of the project - pub paths: ProjectPathsConfig, + pub paths: ProjectPathsConfig, /// The compiler settings pub settings: C::Settings, /// Whether caching is enabled @@ -404,6 +390,7 @@ impl Project { /// project.compile_with_version(&solc, sources)?; /// # Ok::<(), Box>(()) /// ``` + #[cfg(ignore)] pub fn compile_with_version( &self, compiler: &C, @@ -508,7 +495,7 @@ impl Project { T: Clone, C: Clone, { - let graph = Graph::resolve(&self.paths)?; + let graph = Graph::::resolve(&self.paths)?; let mut contracts: HashMap> = HashMap::new(); for file in graph.files().keys() { @@ -558,7 +545,7 @@ impl Project { pub struct ProjectBuilder { /// The layout of the - paths: Option>, + paths: Option>, /// How solc invocation should be configured. settings: Option, /// Whether caching is enabled, default is true. @@ -602,7 +589,7 @@ impl ProjectBuilder { } #[must_use] - pub fn paths(mut self, paths: ProjectPathsConfig) -> Self { + pub fn paths(mut self, paths: ProjectPathsConfig) -> Self { self.paths = Some(paths); self } @@ -745,7 +732,7 @@ impl ProjectBuilder { } } - pub fn build(self, compiler_config: CompilerConfig) -> Result> { + pub fn build(self, compiler: C) -> Result> { let Self { paths, cached, @@ -769,7 +756,7 @@ impl ProjectBuilder { } Ok(Project { - compiler_config, + compiler, paths, cached, build_info, diff --git a/src/project_util/mod.rs b/src/project_util/mod.rs index 01048acb..e4e24488 100644 --- a/src/project_util/mod.rs +++ b/src/project_util/mod.rs @@ -86,7 +86,7 @@ impl TempProject { let solc = crate::compilers::solc::SolcVersionManager .get_or_install(&Version::parse(solc.as_ref()).unwrap()) .unwrap(); - self.inner.compiler_config = CompilerConfig::Specific(solc); + self.inner.compiler = CompilerConfig::Specific(solc); self } diff --git a/src/resolver/mod.rs b/src/resolver/mod.rs index 7e368209..2046a64f 100644 --- a/src/resolver/mod.rs +++ b/src/resolver/mod.rs @@ -47,7 +47,8 @@ //! which is defined on a per source file basis. use crate::{ - compilers::{Compiler, CompilerVersion, CompilerVersionManager, ParsedSource}, + artifacts::VersionedSources, + compilers::{Compiler, CompilerVersion, Language, ParsedSource}, error::Result, utils, ProjectPathsConfig, SolcError, Source, Sources, }; @@ -304,8 +305,8 @@ impl Graph { } /// Resolves a number of sources within the given config - pub fn resolve_sources>( - paths: &ProjectPathsConfig, + pub fn resolve_sources( + paths: &ProjectPathsConfig, sources: Sources, ) -> Result> { /// checks if the given target path was already resolved, if so it adds its id to the list @@ -447,27 +448,22 @@ impl Graph { } /// Resolves the dependencies of a project's source contracts - pub fn resolve>( - paths: &ProjectPathsConfig, - ) -> Result> { + pub fn resolve(paths: &ProjectPathsConfig) -> Result> { Self::resolve_sources(paths, paths.read_input_files()?) } } -impl Graph { +impl> Graph { /// Consumes the nodes of the graph and returns all input files together with their appropriate /// version and the edges of the graph /// /// First we determine the compatible version for each input file (from sources and test folder, /// see `Self::resolve`) and then we add all resolved library imports. - pub fn into_sources_by_version( + pub fn into_sources_by_version>( self, offline: bool, - version_manager: &VM, - ) -> Result<(VersionedSources, GraphEdges)> - where - VM::Compiler: Compiler, - { + compiler: &C, + ) -> Result<(VersionedSources, GraphEdges)> { /// insert the imports of the given node into the sources map /// There can be following graph: /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)` @@ -498,36 +494,43 @@ impl Graph { } } - let versioned_nodes = self.get_input_node_versions(offline, version_manager)?; + let versioned_nodes_by_lang = self.get_input_node_versions(offline, compiler)?; let (nodes, edges) = self.split(); - let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len()); - let mut all_nodes = nodes.into_iter().enumerate().collect::>(); + let mut resulted_sources = HashMap::new(); + // determine the `Sources` set for each solc version - for (version, input_node_indices) in versioned_nodes { - let mut sources = Sources::new(); - - // all input nodes will be processed - let mut processed_sources = input_node_indices.iter().copied().collect(); - - // we only process input nodes (from sources, tests for example) - for idx in input_node_indices { - // insert the input node in the sources set and remove it from the available set - let (path, source) = all_nodes.get(&idx).cloned().expect("node is preset. qed"); - sources.insert(path, source); - insert_imports( - idx, - &mut all_nodes, - &mut sources, - &edges.edges, - &mut processed_sources, - ); + for (language, versioned_nodes) in versioned_nodes_by_lang { + let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len()); + + for (version, input_node_indices) in versioned_nodes { + let mut sources = Sources::new(); + + // all input nodes will be processed + let mut processed_sources = input_node_indices.iter().copied().collect(); + + // we only process input nodes (from sources, tests for example) + for idx in input_node_indices { + // insert the input node in the sources set and remove it from the available set + let (path, source) = all_nodes.get(&idx).cloned().expect("node is preset. qed"); + sources.insert(path, source); + insert_imports( + idx, + &mut all_nodes, + &mut sources, + &edges.edges, + &mut processed_sources, + ); + } + versioned_sources.insert(version.into(), sources); } - versioned_sources.insert(version, sources); + + resulted_sources.insert(language, versioned_sources); } - Ok((VersionedSources { inner: versioned_sources, offline }, edges)) + + Ok((resulted_sources, edges)) } /// Writes the list of imported files into the given formatter: @@ -574,6 +577,16 @@ impl Graph { } } + fn nodes_by_language(&self) -> HashMap> { + let mut nodes = HashMap::new(); + + for (id, node) in self.nodes.iter().enumerate() { + nodes.entry(node.data.language()).or_insert_with(Vec::new).push(id); + } + + nodes + } + /// Returns a map of versions together with the input nodes that are compatible with that /// version. /// @@ -584,90 +597,105 @@ impl Graph { /// /// This also attempts to prefer local installations over remote available. /// If `offline` is set to `true` then only already installed. - fn get_input_node_versions< - VM: CompilerVersionManager, - C: Compiler, - >( + fn get_input_node_versions>( &self, offline: bool, - version_manager: &VM, - ) -> Result>> { + compiler: &C, + ) -> Result>>> { trace!("resolving input node versions"); - // this is likely called by an application and will be eventually printed so we don't exit - // on first error, instead gather all the errors and return a bundled error message instead - let mut errors = Vec::new(); - // we also don't want duplicate error diagnostic - let mut erroneous_nodes = HashSet::with_capacity(self.edges.num_input_files); - - // the sorted list of all versions - let all_versions = if offline { - version_manager.installed_versions() - } else { - version_manager.all_versions() - }; - // stores all versions and their nodes that can be compiled - let mut versioned_nodes = HashMap::new(); - - // stores all files and the versions they're compatible with - let mut all_candidates = Vec::with_capacity(self.edges.num_input_files); - // walking through the node's dep tree and filtering the versions along the way - for idx in 0..self.edges.num_input_files { - let mut candidates = all_versions.iter().collect::>(); - // remove all incompatible versions from the candidates list by checking the node and - // all its imports - self.retain_compatible_versions(idx, &mut candidates); - - if candidates.is_empty() && !erroneous_nodes.contains(&idx) { - // check if the version is even valid - let node = self.node(idx); - if let Err(version_err) = node.check_available_version(&all_versions, offline) { - let f = utils::source_name(&node.path, &self.root).display(); - errors.push(format!("Encountered invalid solc version in {f}: {version_err}")); - } else { - let mut msg = String::new(); - self.format_imports_list(idx, &mut msg).unwrap(); - errors.push(format!("Found incompatible Solidity versions:\n{msg}")); - } - - erroneous_nodes.insert(idx); + let mut resulted_nodes = HashMap::new(); + + for (language, nodes) in self.nodes_by_language() { + // this is likely called by an application and will be eventually printed so we don't + // exit on first error, instead gather all the errors and return a bundled + // error message instead + let mut errors = Vec::new(); + // we also don't want duplicate error diagnostic + let mut erroneous_nodes = HashSet::with_capacity(self.edges.num_input_files); + + // the sorted list of all versions + let all_versions = if offline { + compiler + .available_versions(&language) + .into_iter() + .filter(|v| v.is_installed()) + .collect() } else { - // found viable candidates, pick the most recent version that's already installed - let candidate = - if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) { - candidates[pos] + compiler.available_versions(&language) + }; + + // stores all versions and their nodes that can be compiled + let mut versioned_nodes = HashMap::new(); + + // stores all files and the versions they're compatible with + let mut all_candidates = Vec::with_capacity(self.edges.num_input_files); + // walking through the node's dep tree and filtering the versions along the way + for idx in nodes { + let mut candidates = all_versions.iter().collect::>(); + // remove all incompatible versions from the candidates list by checking the node + // and all its imports + self.retain_compatible_versions(idx, &mut candidates); + + if candidates.is_empty() && !erroneous_nodes.contains(&idx) { + // check if the version is even valid + let node = self.node(idx); + if let Err(version_err) = node.check_available_version(&all_versions, offline) { + let f = utils::source_name(&node.path, &self.root).display(); + errors.push(format!( + "Encountered invalid solc version in {f}: {version_err}" + )); } else { - candidates.last().expect("not empty; qed.") + let mut msg = String::new(); + self.format_imports_list(idx, &mut msg).unwrap(); + errors.push(format!("Found incompatible Solidity versions:\n{msg}")); } - .clone(); - // also store all possible candidates to optimize the set - all_candidates.push((idx, candidates.into_iter().collect::>())); + erroneous_nodes.insert(idx); + } else { + // found viable candidates, pick the most recent version that's already + // installed + let candidate = + if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) { + candidates[pos] + } else { + candidates.last().expect("not empty; qed.") + } + .clone(); + + // also store all possible candidates to optimize the set + all_candidates.push((idx, candidates.into_iter().collect::>())); + + versioned_nodes + .entry(candidate) + .or_insert_with(|| Vec::with_capacity(1)) + .push(idx); + } + } - versioned_nodes.entry(candidate).or_insert_with(|| Vec::with_capacity(1)).push(idx); + // detected multiple versions but there might still exist a single version that + // satisfies all sources + if versioned_nodes.len() > 1 { + versioned_nodes = Self::resolve_multiple_versions(all_candidates); } - } - // detected multiple versions but there might still exist a single version that satisfies - // all sources - if versioned_nodes.len() > 1 { - versioned_nodes = Self::resolve_multiple_versions(all_candidates); - } + if versioned_nodes.len() == 1 { + trace!( + "found exact solc version for all sources \"{}\"", + versioned_nodes.keys().next().unwrap() + ); + } - if versioned_nodes.len() == 1 { - trace!( - "found exact solc version for all sources \"{}\"", - versioned_nodes.keys().next().unwrap() - ); + if errors.is_empty() { + trace!("resolved {} versions {:?}", versioned_nodes.len(), versioned_nodes.keys()); + resulted_nodes.insert(language, versioned_nodes); + } else { + error!("failed to resolve versions"); + return Err(SolcError::msg(errors.join("\n"))); + } } - if errors.is_empty() { - trace!("resolved {} versions {:?}", versioned_nodes.len(), versioned_nodes.keys()); - Ok(versioned_nodes) - } else { - error!("failed to resolve versions"); - Err(SolcError::msg(errors.join("\n"))) - } + Ok(resulted_nodes) } /// Tries to find the "best" set of versions to nodes, See [Solc version @@ -781,52 +809,6 @@ impl<'a, D> Iterator for NodesIter<'a, D> { } } -/// Container type for solc versions and their compatible sources -#[derive(Debug)] -pub struct VersionedSources { - inner: HashMap, - offline: bool, -} - -impl VersionedSources { - /// Resolves or installs the corresponding `Solc` installation. - pub fn get( - self, - version_manager: &VM, - ) -> Result> { - let mut sources_by_version = Vec::new(); - for (version, sources) in self.inner { - let compiler = if !version.is_installed() { - if self.offline { - return Err(SolcError::msg(format!( - "missing solc \"{version}\" installation in offline mode" - ))); - } else { - // install missing solc - version_manager.install(version.as_ref())? - } - } else { - // find installed svm - version_manager.get_installed(version.as_ref())? - }; - - /*if self.offline { - trace!("skip verifying solc checksum for {} in offline mode", compiler.solc.display()); - } else { - trace!("verifying solc checksum for {}", compiler.solc.display()); - if let Err(err) = compiler.verify_checksum() { - trace!(?err, "corrupted solc version, redownloading \"{}\"", version); - Solc::blocking_install(version.as_ref())?; - trace!("reinstalled solc: \"{}\"", version); - } - }*/ - - sources_by_version.push((compiler, version.into(), sources)); - } - Ok(sources_by_version) - } -} - #[derive(Debug)] pub struct Node { /// path of the solidity file diff --git a/src/resolver/parse.rs b/src/resolver/parse.rs index c333b107..d8f0bfcf 100644 --- a/src/resolver/parse.rs +++ b/src/resolver/parse.rs @@ -18,6 +18,7 @@ pub struct SolData { pub imports: Vec>, pub version_req: Option, pub libraries: Vec, + pub is_yul: bool, } impl SolData { @@ -37,6 +38,7 @@ impl SolData { /// This will attempt to parse the solidity AST and extract the imports and version pragma. If /// parsing fails, we'll fall back to extract that info via regex pub fn parse(content: &str, file: &Path) -> Self { + let is_yul = file.extension().map_or(false, |ext| ext == "yul"); let mut version = None; let mut experimental = None; let mut imports = Vec::>::new(); @@ -113,7 +115,7 @@ impl SolData { }); let version_req = version.as_ref().and_then(|v| Self::parse_version_req(v.data()).ok()); - Self { version_req, version, experimental, imports, license, libraries } + Self { version_req, version, experimental, imports, license, libraries, is_yul } } /// Returns the corresponding SemVer version requirement for the solidity version. From 0c2c97ae9957a06ff9ac18ef6f77f0422a2c1c7d Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 27 May 2024 15:09:12 +0400 Subject: [PATCH 02/24] [wip] allow multiple languages for compilers --- benches/compile_many.rs | 2 +- src/artifacts/mod.rs | 29 ++++++++- src/buildinfo.rs | 3 +- src/cache.rs | 2 +- src/compile/mod.rs | 55 +++++++++-------- src/compile/project.rs | 25 ++++---- src/compilers/mod.rs | 43 +++++++++++-- src/compilers/solc/mod.rs | 84 ++++++++++++-------------- src/compilers/solc/version_manager.rs | 84 -------------------------- src/compilers/version_manager.rs | 87 --------------------------- src/config.rs | 6 +- src/error.rs | 4 -- src/lib.rs | 8 +-- src/project_util/mod.rs | 23 +++---- src/resolver/mod.rs | 20 +++--- tests/project.rs | 38 ++++++------ 16 files changed, 197 insertions(+), 316 deletions(-) delete mode 100644 src/compilers/solc/version_manager.rs delete mode 100644 src/compilers/version_manager.rs diff --git a/benches/compile_many.rs b/benches/compile_many.rs index a2793ba1..1cd1818d 100644 --- a/benches/compile_many.rs +++ b/benches/compile_many.rs @@ -12,7 +12,7 @@ use std::path::Path; fn compile_many_benchmark(c: &mut Criterion) { let inputs = load_compiler_inputs(); - let solc = SolcVersionManager::default().get_or_install(&Version::new(0, 8, 0)).unwrap(); + let solc = Solc::find_or_install((&Version::new(0, 8, 0)).unwrap(); let mut group = c.benchmark_group("compile many"); group.sample_size(10); diff --git a/src/artifacts/mod.rs b/src/artifacts/mod.rs index ed8bb08e..96279b6d 100644 --- a/src/artifacts/mod.rs +++ b/src/artifacts/mod.rs @@ -87,6 +87,31 @@ impl SolcInput { Self { language, sources, settings } } + pub fn resolve_and_build(sources: Sources, settings: Settings) -> Vec { + let mut solidity_sources = BTreeMap::new(); + let mut yul_sources = BTreeMap::new(); + + for (file, source) in sources { + if file.extension().map_or(false, |e| e == "yul") { + yul_sources.insert(file, source); + } else if file.extension().map_or(false, |e| e == "sol") { + solidity_sources.insert(file, source); + } + } + + let mut res = Vec::new(); + + if !solidity_sources.is_empty() { + res.push(SolcInput::new(SolcLanguages::Solidity, solidity_sources, settings.clone())) + } + + if !yul_sources.is_empty() { + res.push(SolcInput::new(SolcLanguages::Yul, yul_sources, settings)) + } + + res + } + /// This will remove/adjust values in the [`SolcInput`] that are not compatible with this /// version pub fn sanitize(&mut self, version: &Version) { @@ -2147,7 +2172,7 @@ mod tests { let settings = Settings { metadata: Some(BytecodeHash::Ipfs.into()), ..Default::default() }; let input = - SolcInput { language: "Solidity".to_string(), sources: Default::default(), settings }; + SolcInput { language: SolcLanguages::Solidity, sources: Default::default(), settings }; let i = input.clone().sanitized(&version); assert_eq!(i.settings.metadata.unwrap().bytecode_hash, Some(BytecodeHash::Ipfs)); @@ -2167,7 +2192,7 @@ mod tests { }; let input = - SolcInput { language: "Solidity".to_string(), sources: Default::default(), settings }; + SolcInput { language: SolcLanguages::Solidity, sources: Default::default(), settings }; let i = input.clone().sanitized(&version); assert_eq!(i.settings.metadata.unwrap().cbor_metadata, Some(true)); diff --git a/src/buildinfo.rs b/src/buildinfo.rs index f2de505b..0bba269e 100644 --- a/src/buildinfo.rs +++ b/src/buildinfo.rs @@ -107,10 +107,9 @@ mod tests { #[test] fn build_info_serde() { - let inputs = SolcInput::build( + let inputs = SolcInput::resolve_and_build( BTreeMap::from([(PathBuf::from("input.sol"), Source::new(""))]), Default::default(), - &Version::new(0, 8, 4), ); let output = CompilerOutput::::default(); let v: Version = "0.8.4+commit.c7e474f2".parse().unwrap(); diff --git a/src/cache.rs b/src/cache.rs index 5346f8cb..ac41698f 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -129,7 +129,7 @@ impl CompilerCache { /// let cache = SolFilesCache::read_joined(&project.paths)?; /// # Ok::<_, Box>(()) /// ``` - pub fn read_joined(paths: &ProjectPathsConfig) -> Result { + pub fn read_joined(paths: &ProjectPathsConfig) -> Result { let mut cache = CompilerCache::read(&paths.cache)?; cache.join_entries(&paths.root).join_artifacts_files(&paths.artifacts); Ok(cache) diff --git a/src/compile/mod.rs b/src/compile/mod.rs index 7964b99f..872580b8 100644 --- a/src/compile/mod.rs +++ b/src/compile/mod.rs @@ -1,6 +1,5 @@ use crate::{ artifacts::Source, - compilers::solc::SolcLanguages, error::{Result, SolcError}, resolver::parse::SolData, utils, CompilerOutput, SolcInput, @@ -317,6 +316,9 @@ impl Solc { pub fn blocking_install(version: &Version) -> std::result::Result { use crate::utils::RuntimeOrHandle; + #[cfg(test)] + crate::take_solc_installer_lock!(_lock); + trace!("blocking installing solc version \"{}\"", version); crate::report::solc_installation_start(version); // The async version `svm::install` is used instead of `svm::blocking_intsall` @@ -386,19 +388,12 @@ impl Solc { pub fn compile_source(&self, path: impl AsRef) -> Result { let path = path.as_ref(); let mut res: CompilerOutput = Default::default(); - - let solidity_sources = Source::read_all_from(path, &["sol"])?; - let yul_sources = Source::read_all_from(path, &["yul"])?; - - if !solidity_sources.is_empty() { - let input = - SolcInput::new(SolcLanguages::Solidity, solidity_sources, Default::default()); - res.merge(self.compile(&input)?) - } - - if !yul_sources.is_empty() { - let input = SolcInput::new(SolcLanguages::Yul, yul_sources, Default::default()); - res.merge(self.compile(&input)?) + for input in + SolcInput::resolve_and_build(Source::read_sol_yul_from(path)?, Default::default()) + { + let input = input.sanitized(&self.version); + let output = self.compile(&input)?; + res.merge(output) } Ok(res) @@ -527,16 +522,25 @@ impl Solc { cmd } + + pub fn find_or_install(version: &Version) -> Result { + let solc = if let Some(solc) = Self::find_svm_installed_version(version.to_string())? { + solc + } else { + Self::blocking_install(version)? + }; + + Ok(solc) + } } #[cfg(feature = "async")] impl Solc { /// Convenience function for compiling all sources under the given path pub async fn async_compile_source(&self, path: impl AsRef) -> Result { - self.async_compile(&SolcInput::build( + self.async_compile(&SolcInput::resolve_and_build( Source::async_read_all_from(path, SOLC_EXTENSIONS).await?, Default::default(), - &self.version, )) .await } @@ -638,11 +642,7 @@ impl AsRef for Solc { #[cfg(feature = "svm-solc")] mod tests { use super::*; - use crate::{ - compilers::{solc::SolcVersionManager, CompilerVersionManager, VersionManagerError}, - resolver::parse::SolData, - Artifact, - }; + use crate::{resolver::parse::SolData, Artifact}; #[test] fn test_version_parse() { @@ -652,7 +652,11 @@ mod tests { } fn solc() -> Solc { - SolcVersionManager::default().get_or_install(&Version::new(0, 8, 18)).unwrap() + if let Some(solc) = Solc::find_svm_installed_version("0.8.18").unwrap() { + solc + } else { + Solc::blocking_install(&Version::new(0, 8, 18)).unwrap() + } } #[test] @@ -767,7 +771,7 @@ mod tests { Solc::blocking_install(&version).unwrap(); } drop(_lock); - let res = SolcVersionManager::default().get_installed(&version).unwrap(); + let res = Solc::find_svm_installed_version(ver).unwrap().unwrap(); let expected = svm::data_dir().join(ver).join(format!("solc-{ver}")); assert_eq!(res.solc, expected); } @@ -784,8 +788,7 @@ mod tests { #[test] fn does_not_find_not_installed_version() { let ver = "1.1.1"; - let version = Version::from_str(ver).unwrap(); - let res = SolcVersionManager::default().get_installed(&version); - assert!(matches!(res, Err(VersionManagerError::VersionNotInstalled(_)))); + let res = Solc::find_svm_installed_version(ver).unwrap(); + assert!(matches!(res, None)); } } diff --git a/src/compile/project.rs b/src/compile/project.rs index 62e0ef61..7e6195fe 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -507,7 +507,7 @@ impl FilteredCompilerSources { #[cfg(test)] #[cfg(all(feature = "project-util", feature = "svm-solc"))] - fn sources(&self) -> &VersionedFilteredSources { + fn sources(&self) -> &VersionedFilteredSources { match self { FilteredCompilerSources::Sequential(v) => v, FilteredCompilerSources::Parallel(v, _) => v, @@ -550,18 +550,18 @@ fn compile_sequential( let mut input = C::Input::build(sources, opt_settings, language.clone(), &version) .with_base_path(paths.root.clone()) - .with_allowed_paths(paths.allowed_paths.clone()) + .with_allow_paths(paths.allowed_paths.clone()) .with_include_paths(include_paths.clone()) .with_remappings(paths.remappings.clone()); - input.strip_prefix(paths.root.as_path()); - let actually_dirty = input .sources() .keys() .filter(|f| dirty_files.contains(f)) .cloned() .collect::>(); + + input.strip_prefix(paths.root.as_path()); if actually_dirty.is_empty() { // nothing to compile for this particular language, all dirty files are in the other @@ -638,12 +638,10 @@ fn compile_parallel( let mut input = C::Input::build(sources, opt_settings, language.clone(), &version) .with_base_path(paths.root.clone()) - .with_allowed_paths(paths.allowed_paths.clone()) + .with_allow_paths(paths.allowed_paths.clone()) .with_include_paths(include_paths.clone()) .with_remappings(paths.remappings.clone()); - input.strip_prefix(paths.root.as_path()); - let actually_dirty = input .sources() .keys() @@ -651,6 +649,8 @@ fn compile_parallel( .cloned() .collect::>(); + input.strip_prefix(paths.root.as_path()); + if actually_dirty.is_empty() { // nothing to compile for this particular language, all dirty files are in the other // language set @@ -725,8 +725,8 @@ fn compile_parallel( mod tests { use super::*; use crate::{ - artifacts::output_selection::ContractOutputSelection, project_util::TempProject, - ConfigurableArtifacts, MinimalCombinedArtifacts, Solc, + artifacts::output_selection::ContractOutputSelection, compilers::solc::SolcRegistry, + project_util::TempProject, ConfigurableArtifacts, MinimalCombinedArtifacts, Solc, }; fn init_tracing() { @@ -759,7 +759,7 @@ mod tests { fn can_detect_cached_files() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); @@ -835,10 +835,11 @@ mod tests { assert!(cache.cache.all_artifacts_exist()); assert_eq!(cache.dirty_sources.len(), 1); + let len = sources.values().map(|v| v.len()).sum::(); // single solc - assert_eq!(sources.len(), 1); + assert_eq!(len, 1); - let filtered = &sources[0].2; + let filtered = &sources.values().next().unwrap().values().next().unwrap(); // 3 contracts total assert_eq!(filtered.0.len(), 3); diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index 6d542642..55a2c2ab 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -7,6 +7,7 @@ use crate::{ remappings::Remapping, ProjectPathsConfig, }; +use core::fmt; use semver::{Version, VersionReq}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ @@ -17,12 +18,46 @@ use std::{ }; pub mod solc; -mod version_manager; #[cfg(ignore)] pub mod vyper; -pub use version_manager::{CompilerVersion, CompilerVersionManager, VersionManagerError}; +/// A compiler version is either installed (available locally) or can be downloaded, from the remote +/// endpoint +#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] +#[serde(untagged)] +pub enum CompilerVersion { + Installed(Version), + Remote(Version), +} + +impl CompilerVersion { + pub fn is_installed(&self) -> bool { + matches!(self, CompilerVersion::Installed(_)) + } +} + +impl AsRef for CompilerVersion { + fn as_ref(&self) -> &Version { + match self { + CompilerVersion::Installed(v) | CompilerVersion::Remote(v) => v, + } + } +} + +impl From for Version { + fn from(s: CompilerVersion) -> Version { + match s { + CompilerVersion::Installed(v) | CompilerVersion::Remote(v) => v, + } + } +} + +impl fmt::Display for CompilerVersion { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.as_ref()) + } +} /// Compilation settings including evm_version, output_selection, etc. pub trait CompilerSettings: @@ -45,7 +80,7 @@ pub trait CompilerSettings: } /// Input of a compiler, including sources and settings used for their compilation. -pub trait CompilerInput: Serialize + Send + Sync + Sized { +pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug { type Settings: CompilerSettings; type Language: Language; @@ -81,7 +116,7 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized { /// Builder method to set the allowed paths for the compiler. Primarily used by solc /// implementation to set --allow-paths. - fn with_allowed_paths(self, _allowed_paths: BTreeSet) -> Self { + fn with_allow_paths(self, _allowed_paths: BTreeSet) -> Self { self } diff --git a/src/compilers/solc/mod.rs b/src/compilers/solc/mod.rs index ffda26fa..b9935829 100644 --- a/src/compilers/solc/mod.rs +++ b/src/compilers/solc/mod.rs @@ -1,13 +1,8 @@ -#[cfg(feature = "svm-solc")] -mod version_manager; -#[cfg(feature = "svm-solc")] -pub use version_manager::SolcVersionManager; - use itertools::Itertools; use super::{ - version_manager::CompilerVersion, CompilationError, Compiler, CompilerInput, CompilerOutput, - CompilerSettings, Language, ParsedSource, + CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, CompilerVersion, + Language, ParsedSource, }; use crate::{ artifacts::{ @@ -22,10 +17,14 @@ use crate::{ use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::BTreeSet, + collections::{BTreeSet, HashMap, HashSet}, path::{Path, PathBuf}, }; +#[derive(Debug, Clone, Default)] +#[non_exhaustive] +pub struct SolcRegistry; + #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[non_exhaustive] pub enum SolcLanguages { @@ -37,7 +36,7 @@ impl Language for SolcLanguages { const FILE_EXTENSIONS: &'static [&'static str] = SOLC_EXTENSIONS; } -impl Compiler for Solc { +impl Compiler for SolcRegistry { type Input = SolcVerionedInput; type CompilationError = crate::artifacts::Error; type ParsedSource = SolData; @@ -45,35 +44,11 @@ impl Compiler for Solc { type Language = SolcLanguages; fn compile(&self, input: &Self::Input) -> Result> { - let solc = - if let Some(solc) = Solc::find_svm_installed_version(input.version().to_string())? { - solc - } else { - #[cfg(test)] - crate::take_solc_installer_lock!(_lock); - - let version = if !input.version.pre.is_empty() || !input.version.build.is_empty() { - Version::new(input.version.major, input.version.minor, input.version.patch) - } else { - input.version.clone() - }; - - trace!("blocking installing solc version \"{}\"", version); - crate::report::solc_installation_start(&version); - // The async version `svm::install` is used instead of `svm::blocking_intsall` - // because the underlying `reqwest::blocking::Client` does not behave well - // inside of a Tokio runtime. See: https://github.com/seanmonstar/reqwest/issues/1017 - match RuntimeOrHandle::new().block_on(svm::install(&version)) { - Ok(path) => { - crate::report::solc_installation_success(&version); - Ok(Solc::new_with_version(path, version)) - } - Err(err) => { - crate::report::solc_installation_error(&version, &err.to_string()); - Err(SolcError::msg(format!("failed to install {}", version))) - } - }? - }; + let mut solc = Solc::find_or_install(&input.version)?; + solc.base_path = input.base_path.clone(); + solc.allow_paths = input.allow_paths.clone(); + solc.include_paths = input.include_paths.clone(); + let solc_output = solc.compile(&input.input)?; let output = CompilerOutput { @@ -86,7 +61,22 @@ impl Compiler for Solc { } fn available_versions(&self, _language: &Self::Language) -> Vec { - Solc::installed_versions().into_iter().map(CompilerVersion::Installed).collect() + let mut all_versions = Solc::installed_versions().into_iter().map(CompilerVersion::Installed).collect::>(); + let mut uniques = all_versions + .iter() + .map(|v| { + let v = v.as_ref(); + (v.major, v.minor, v.patch) + }) + .collect::>(); + all_versions.extend( + Solc::released_versions() + .into_iter() + .filter(|v| uniques.insert((v.major, v.minor, v.patch))) + .map(CompilerVersion::Remote), + ); + all_versions.sort_unstable(); + all_versions } } @@ -97,9 +87,9 @@ pub struct SolcVerionedInput { #[serde(flatten)] pub input: SolcInput, #[serde(skip)] - pub allowed_paths: BTreeSet, + pub allow_paths: BTreeSet, #[serde(skip)] - pub base_path: PathBuf, + pub base_path: Option, #[serde(skip)] pub include_paths: BTreeSet, } @@ -123,9 +113,9 @@ impl CompilerInput for SolcVerionedInput { Self { version: version.clone(), input, - allowed_paths: BTreeSet::new(), - base_path: PathBuf::new(), - include_paths: BTreeSet::new(), + base_path: None, + include_paths: Default::default(), + allow_paths: Default::default(), } } @@ -155,13 +145,13 @@ impl CompilerInput for SolcVerionedInput { self.input.strip_prefix(base); } - fn with_allowed_paths(mut self, allowed_paths: BTreeSet) -> Self { - self.allowed_paths = allowed_paths; + fn with_allow_paths(mut self, allowed_paths: BTreeSet) -> Self { + self.allow_paths = allowed_paths; self } fn with_base_path(mut self, base_path: PathBuf) -> Self { - self.base_path = base_path; + self.base_path = Some(base_path); self } diff --git a/src/compilers/solc/version_manager.rs b/src/compilers/solc/version_manager.rs deleted file mode 100644 index 6188bfa5..00000000 --- a/src/compilers/solc/version_manager.rs +++ /dev/null @@ -1,84 +0,0 @@ -use crate::{ - compilers::version_manager::{CompilerVersion, CompilerVersionManager, VersionManagerError}, - Solc, -}; -use semver::Version; -use std::collections::HashSet; - -/// [CompilerVersionManager] implementation for [Solc]. Uses [svm] to install and manage versions. -#[derive(Debug, Default)] -#[non_exhaustive] -pub struct SolcVersionManager; - -impl CompilerVersionManager for SolcVersionManager { - type Compiler = Solc; - - fn all_versions(&self) -> Vec { - let mut all_versions = self.installed_versions(); - let mut uniques = all_versions - .iter() - .map(|v| { - let v = v.as_ref(); - (v.major, v.minor, v.patch) - }) - .collect::>(); - all_versions.extend( - Solc::released_versions() - .into_iter() - .filter(|v| uniques.insert((v.major, v.minor, v.patch))) - .map(CompilerVersion::Remote), - ); - all_versions.sort_unstable(); - all_versions - } - - fn installed_versions(&self) -> Vec { - Solc::installed_versions().into_iter().map(CompilerVersion::Installed).collect() - } - - fn install(&self, version: &Version) -> Result { - use crate::utils::RuntimeOrHandle; - - #[cfg(test)] - crate::take_solc_installer_lock!(_lock); - - let version = if !version.pre.is_empty() || !version.build.is_empty() { - Version::new(version.major, version.minor, version.patch) - } else { - version.clone() - }; - - trace!("blocking installing solc version \"{}\"", version); - crate::report::solc_installation_start(&version); - // The async version `svm::install` is used instead of `svm::blocking_intsall` - // because the underlying `reqwest::blocking::Client` does not behave well - // inside of a Tokio runtime. See: https://github.com/seanmonstar/reqwest/issues/1017 - match RuntimeOrHandle::new().block_on(svm::install(&version)) { - Ok(path) => { - crate::report::solc_installation_success(&version); - Ok(Solc::new_with_version(path, version)) - } - Err(err) => { - crate::report::solc_installation_error(&version, &err.to_string()); - Err(VersionManagerError::IntallationFailed(Box::new(err))) - } - } - } - - fn get_installed(&self, version: &Version) -> Result { - #[cfg(test)] - crate::take_solc_installer_lock!(_lock); - - let s_version = version.to_string(); - - let solc = Solc::svm_home() - .ok_or_else(|| VersionManagerError::msg("svm home dir not found"))? - .join(s_version.as_str()) - .join(format!("solc-{s_version}")); - - if !solc.is_file() { - return Err(VersionManagerError::VersionNotInstalled(version.clone())); - } - Ok(Solc::new_with_version(solc, version.clone())) - } -} diff --git a/src/compilers/version_manager.rs b/src/compilers/version_manager.rs deleted file mode 100644 index e80eafad..00000000 --- a/src/compilers/version_manager.rs +++ /dev/null @@ -1,87 +0,0 @@ -use core::fmt; -use std::fmt::Debug; - -use super::Compiler; -use auto_impl::auto_impl; -use semver::Version; -use serde::{Deserialize, Serialize}; - -/// A compiler version is either installed (available locally) or can be downloaded, from the remote -/// endpoint -#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] -#[serde(untagged)] -pub enum CompilerVersion { - Installed(Version), - Remote(Version), -} - -impl CompilerVersion { - pub fn is_installed(&self) -> bool { - matches!(self, CompilerVersion::Installed(_)) - } -} - -impl AsRef for CompilerVersion { - fn as_ref(&self) -> &Version { - match self { - CompilerVersion::Installed(v) | CompilerVersion::Remote(v) => v, - } - } -} - -impl From for Version { - fn from(s: CompilerVersion) -> Version { - match s { - CompilerVersion::Installed(v) | CompilerVersion::Remote(v) => v, - } - } -} - -impl fmt::Display for CompilerVersion { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.as_ref()) - } -} - -#[derive(Debug, thiserror::Error)] -pub enum VersionManagerError { - #[error("compiler version {0} not installed")] - VersionNotInstalled(Version), - - #[error("{0}")] - Message(String), - - #[error("installation failed: {0}")] - IntallationFailed(Box), -} - -impl VersionManagerError { - pub fn msg(msg: impl std::fmt::Display) -> Self { - VersionManagerError::Message(msg.to_string()) - } -} - -/// Abstraction over a compiler version manager. Currently main implementation is -/// [`SolcVersionManager`]. Acts as a factory of [Compiler]s. -/// -/// [`SolcVersionManager`]: crate::compilers::solc::SolcVersionManager -#[auto_impl(&, Box, Arc)] -pub trait CompilerVersionManager: Debug + Send + Sync { - type Compiler: Compiler; - - /// Returns all versions available locally and remotely. - fn all_versions(&self) -> Vec; - /// Returns all versions available locally. - fn installed_versions(&self) -> Vec; - - /// Installs a compiler version and returns the compiler instance. - fn install(&self, version: &Version) -> Result; - /// Returns the compiler instance for the given version if it is installed. If not, returns an - /// error. - fn get_installed(&self, version: &Version) -> Result; - - /// Returns the compiler instance for the given version if it is installed. If not, installs it. - fn get_or_install(&self, version: &Version) -> Result { - self.get_installed(version).or_else(|_| self.install(version)) - } -} diff --git a/src/config.rs b/src/config.rs index 0cb9c2e6..433b2e1a 100644 --- a/src/config.rs +++ b/src/config.rs @@ -19,7 +19,7 @@ use std::{ /// Where to find all files or where to write them #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ProjectPathsConfig { +pub struct ProjectPathsConfig { /// Project root pub root: PathBuf, /// Path to the cache, if any @@ -43,7 +43,7 @@ pub struct ProjectPathsConfig { /// The paths which will be allowed for library inclusion pub allowed_paths: BTreeSet, - pub _c: PhantomData, + pub _l: PhantomData, } impl ProjectPathsConfig { @@ -816,7 +816,7 @@ impl ProjectPathsConfigBuilder { root, include_paths: self.include_paths, allowed_paths, - _c: PhantomData, + _l: PhantomData, } } diff --git a/src/error.rs b/src/error.rs index b9d71319..db3b3955 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,4 +1,3 @@ -use crate::compilers::VersionManagerError; use semver::Version; use std::{ io, @@ -72,9 +71,6 @@ pub enum SolcError { #[cfg(feature = "project-util")] #[error(transparent)] FsExtra(#[from] fs_extra::error::Error), - - #[error(transparent)] - VersionManager(#[from] VersionManagerError), } impl SolcError { diff --git a/src/lib.rs b/src/lib.rs index 7671fa69..40bca430 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,7 +25,7 @@ pub mod cache; pub mod flatten; pub mod hh; -use compilers::{Compiler, CompilerSettings}; +use compilers::{solc::SolcRegistry, Compiler, CompilerSettings}; pub use filter::SparseOutputFileFilter; pub use hh::{HardhatArtifact, HardhatArtifacts}; @@ -79,7 +79,7 @@ pub mod project_util; /// Represents a project workspace and handles `solc` compiling of all contracts in that workspace. #[derive(Clone, Debug)] -pub struct Project { +pub struct Project { pub compiler: C, /// The layout of the project pub paths: ProjectPathsConfig, @@ -152,7 +152,7 @@ impl Project { } } -impl Project { +impl Project { /// Returns standard-json-input to compile the target contract pub fn standard_json_input( &self, @@ -543,7 +543,7 @@ impl Project { } } -pub struct ProjectBuilder { +pub struct ProjectBuilder { /// The layout of the paths: Option>, /// How solc invocation should be configured. diff --git a/src/project_util/mod.rs b/src/project_util/mod.rs index e4e24488..c46f4545 100644 --- a/src/project_util/mod.rs +++ b/src/project_util/mod.rs @@ -2,7 +2,7 @@ use crate::{ artifacts::{Error, Settings}, - compilers::Compiler, + compilers::{solc::SolcRegistry, Compiler}, config::ProjectPathsConfigBuilder, error::{Result, SolcError}, filter::SparseOutputFileFilter, @@ -29,18 +29,18 @@ pub mod mock; /// A [`Project`] wrapper that lives in a new temporary directory /// /// Once `TempProject` is dropped, the temp dir is automatically removed, see [`TempDir::drop()`] -pub struct TempProject { +pub struct TempProject { /// temporary workspace root _root: TempDir, /// actual project workspace with the `root` tempdir as its root inner: Project, } -impl TempProject { +impl TempProject { /// Makes sure all resources are created pub fn create_new( root: TempDir, - inner: Project, + inner: Project, ) -> std::result::Result { let mut project = Self { _root: root, inner }; project.paths().create_all()?; @@ -79,6 +79,7 @@ impl TempProject { /// Explicitly sets the solc version for the project #[cfg(feature = "svm-solc")] + #[cfg(ignore)] pub fn set_solc(&mut self, solc: impl AsRef) -> &mut Self { use crate::{compilers::CompilerVersionManager, CompilerConfig}; use semver::Version; @@ -90,7 +91,7 @@ impl TempProject { self } - pub fn project(&self) -> &Project { + pub fn project(&self) -> &Project { &self.inner } @@ -98,7 +99,7 @@ impl TempProject { self.project().flatten(target) } - pub fn project_mut(&mut self) -> &mut Project { + pub fn project_mut(&mut self) -> &mut Project { &mut self.inner } @@ -350,7 +351,7 @@ contract {} {{}} } } -impl TempProject { +impl TempProject { /// Creates a new temp project inside a tempdir with a prefixed directory #[cfg(feature = "svm-solc")] pub fn prefixed(prefix: &str, paths: ProjectPathsConfigBuilder) -> Result { @@ -375,7 +376,7 @@ impl TempProject { } } -impl fmt::Debug for TempProject { +impl fmt::Debug for TempProject { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("TempProject").field("paths", self.paths()).finish() } @@ -400,7 +401,7 @@ fn contract_file_name(name: impl AsRef) -> String { } #[cfg(feature = "svm-solc")] -impl TempProject { +impl TempProject { /// Creates an empty new hardhat style workspace in a new temporary dir pub fn hardhat() -> Result { let tmp_dir = tempdir("tmp_hh")?; @@ -475,8 +476,8 @@ impl TempProject { } } -impl AsRef> for TempProject { - fn as_ref(&self) -> &Project { +impl AsRef> for TempProject { + fn as_ref(&self) -> &Project { self.project() } } diff --git a/src/resolver/mod.rs b/src/resolver/mod.rs index 2046a64f..9e7cd511 100644 --- a/src/resolver/mod.rs +++ b/src/resolver/mod.rs @@ -914,9 +914,9 @@ mod tests { #[test] fn can_resolve_hardhat_dependency_graph() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/hardhat-sample"); - let paths = ProjectPathsConfig::::hardhat(root).unwrap(); + let paths = ProjectPathsConfig::hardhat(root).unwrap(); - let graph = Graph::resolve(&paths).unwrap(); + let graph = Graph::::resolve(&paths).unwrap(); assert_eq!(graph.edges.num_input_files, 1); assert_eq!(graph.files().len(), 2); @@ -933,9 +933,9 @@ mod tests { #[test] fn can_resolve_dapp_dependency_graph() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); - let paths = ProjectPathsConfig::::dapptools(root).unwrap(); + let paths = ProjectPathsConfig::dapptools(root).unwrap(); - let graph = Graph::resolve(&paths).unwrap(); + let graph = Graph::::resolve(&paths).unwrap(); assert_eq!(graph.edges.num_input_files, 2); assert_eq!(graph.files().len(), 3); @@ -960,11 +960,11 @@ mod tests { #[test] #[cfg(not(target_os = "windows"))] fn can_print_dapp_sample_graph() { - use crate::Solc; + use crate::compilers::solc::SolcLanguages; let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); - let paths = ProjectPathsConfig::::dapptools(root).unwrap(); - let graph = Graph::resolve(&paths).unwrap(); + let paths = ProjectPathsConfig::dapptools(root).unwrap(); + let graph = Graph::::resolve(&paths).unwrap(); let mut out = Vec::::new(); tree::print(&graph, &Default::default(), &mut out).unwrap(); @@ -985,11 +985,11 @@ src/Dapp.t.sol >=0.6.6 #[test] #[cfg(not(target_os = "windows"))] fn can_print_hardhat_sample_graph() { - use crate::Solc; + use crate::{compilers::solc::SolcLanguages, Solc}; let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/hardhat-sample"); - let paths = ProjectPathsConfig::::hardhat(root).unwrap(); - let graph = Graph::resolve(&paths).unwrap(); + let paths = ProjectPathsConfig::hardhat(root).unwrap(); + let graph = Graph::::resolve(&paths).unwrap(); let mut out = Vec::::new(); tree::print(&graph, &Default::default(), &mut out).unwrap(); assert_eq!( diff --git a/tests/project.rs b/tests/project.rs index 0c55e6aa..25c06072 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -10,9 +10,7 @@ use foundry_compilers::{ buildinfo::BuildInfo, cache::{CompilerCache, SOLIDITY_FILES_CACHE_FILENAME}, compilers::{ - solc::SolcVersionManager, - vyper::{Vyper, VyperSettings}, - CompilerOutput, CompilerVersionManager, + solc::SolcRegistry, CompilerOutput }, error::SolcError, flatten::Flattener, @@ -21,7 +19,7 @@ use foundry_compilers::{ remappings::Remapping, resolver::parse::SolData, utils::{self, RuntimeOrHandle}, - Artifact, CompilerConfig, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, + Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, Solc, SolcInput, SolcSparseFileFilter, TestFileFilter, }; @@ -37,6 +35,7 @@ use std::{ }; use svm::{platform, Platform}; +#[cfg(ignore)] pub static VYPER: Lazy = Lazy::new(|| { RuntimeOrHandle::new().block_on(async { #[cfg(target_family = "unix")] @@ -89,7 +88,7 @@ fn can_compile_hardhat_sample() { let paths = ProjectPathsConfig::builder() .sources(root.join("contracts")) .lib(root.join("node_modules")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); assert!(compiled.find_first("Greeter").is_some()); @@ -114,7 +113,7 @@ fn can_compile_hardhat_sample() { fn can_compile_dapp_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); assert!(compiled.find_first("Dapp").is_some()); @@ -141,7 +140,7 @@ fn can_compile_dapp_sample() { fn can_compile_yul_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/yul-sample"); let paths = ProjectPathsConfig::builder().sources(root); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); assert!(compiled.find_first("Dapp").is_some()); @@ -543,7 +542,7 @@ fn can_flatten_file_with_external_lib() { let paths = ProjectPathsConfig::builder() .sources(root.join("contracts")) .lib(root.join("node_modules")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let target = root.join("contracts").join("Greeter.sol"); @@ -558,7 +557,7 @@ fn can_flatten_file_with_external_lib() { fn can_flatten_file_in_dapp_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let target = root.join("src/Dapp.t.sol"); @@ -2615,7 +2614,7 @@ fn can_create_standard_json_input_with_external_file() { ] ); - let solc = SolcVersionManager::default().get_or_install(&Version::new(0, 8, 24)).unwrap(); + let solc = Solc::find_or_install(&Version::new(0, 8, 24)).unwrap(); // can compile using the created json let compiler_errors = solc @@ -2640,7 +2639,7 @@ fn can_compile_std_json_input() { assert!(input.sources.contains_key(Path::new("lib/ds-test/src/test.sol"))); // should be installed - if let Ok(solc) = SolcVersionManager::default().get_or_install(&Version::new(0, 8, 24)) { + if let Ok(solc) = Solc::find_or_install(&Version::new(0, 8, 24)) { let out = solc.compile(&input).unwrap(); assert!(out.errors.is_empty()); assert!(out.sources.contains_key(Path::new("lib/ds-test/src/test.sol"))); @@ -2704,7 +2703,7 @@ fn can_create_standard_json_input_with_symlink() { ] ); - let solc = SolcVersionManager::default().get_or_install(&Version::new(0, 8, 24)).unwrap(); + let solc = Solc::find_or_install(&Version::new(0, 8, 24)).unwrap(); // can compile using the created json let compiler_errors = solc @@ -2722,7 +2721,7 @@ fn can_compile_model_checker_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/model-checker-sample"); let paths = ProjectPathsConfig::builder().sources(root); - let mut project = TempProject::::new(paths).unwrap(); + let mut project = TempProject::::new(paths).unwrap(); project.project_mut().settings.model_checker = Some(ModelCheckerSettings { engine: Some(CHC), timeout: Some(10000), @@ -2833,7 +2832,7 @@ fn test_compiler_severity_filter_and_ignored_error_codes() { } fn remove_solc_if_exists(version: &Version) { - if SolcVersionManager::default().get_installed(version).is_ok() { + if Solc::find_svm_installed_version(version.to_string()).unwrap().is_some() { svm::remove_version(version).expect("failed to remove version") } } @@ -2867,7 +2866,7 @@ async fn can_install_solc_and_compile_std_json_input_async() { tmp.assert_no_errors(); let source = tmp.list_source_files().into_iter().find(|p| p.ends_with("Dapp.t.sol")).unwrap(); let input = tmp.project().standard_json_input(source).unwrap(); - let solc = SolcVersionManager::default().get_or_install(&Version::new(0, 8, 24)).unwrap(); + let solc = Solc::find_or_install(&Version::new(0, 8, 24)).unwrap(); assert!(input.settings.remappings.contains(&"ds-test/=lib/ds-test/src/".parse().unwrap())); let input: SolcInput = input.into(); @@ -2879,6 +2878,7 @@ async fn can_install_solc_and_compile_std_json_input_async() { } #[test] +#[cfg(ignore)] fn can_purge_obsolete_artifacts() { let mut project = TempProject::dapptools().unwrap(); project.set_solc("0.8.10"); @@ -2910,6 +2910,7 @@ fn can_purge_obsolete_artifacts() { } #[test] +#[cfg(ignore)] fn can_parse_notice() { let mut project = TempProject::dapptools().unwrap(); project.project_mut().artifacts.additional_values.userdoc = true; @@ -3786,11 +3787,10 @@ fn test_deterministic_metadata() { let orig_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); copy_dir_all(orig_root, &tmp_dir).unwrap(); - let vm = SolcVersionManager::default(); let paths = ProjectPathsConfig::builder().root(root).build().unwrap(); let project = Project::builder() .paths(paths) - .build(CompilerConfig::Specific(vm.get_or_install(&Version::new(0, 8, 18)).unwrap())) + .build(SolcRegistry::default()) .unwrap(); let compiled = project.compile().unwrap(); @@ -3809,6 +3809,7 @@ fn test_deterministic_metadata() { } #[test] +#[cfg(ignore)] fn can_compile_vyper_with_cache() { let tmp_dir = tempfile::tempdir().unwrap(); let root = tmp_dir.path(); @@ -3863,13 +3864,14 @@ fn yul_remappings_ignored() { name: "@openzeppelin".to_string(), path: root.to_string_lossy().to_string(), }); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); } #[test] +#[cfg(ignore)] fn test_vyper_imports() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/vyper-imports"); From 7194bf5bfb7266bd78b91a904e909cb63cff85e4 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 27 May 2024 20:47:04 +0400 Subject: [PATCH 03/24] fmt --- benches/compile_many.rs | 2 +- src/compile/project.rs | 2 +- src/compilers/solc/mod.rs | 5 ++++- tests/project.rs | 16 +++++----------- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/benches/compile_many.rs b/benches/compile_many.rs index 1cd1818d..ed361487 100644 --- a/benches/compile_many.rs +++ b/benches/compile_many.rs @@ -12,7 +12,7 @@ use std::path::Path; fn compile_many_benchmark(c: &mut Criterion) { let inputs = load_compiler_inputs(); - let solc = Solc::find_or_install((&Version::new(0, 8, 0)).unwrap(); + let solc = Solc::find_or_install(&Version::new(0, 8, 0)).unwrap(); let mut group = c.benchmark_group("compile many"); group.sample_size(10); diff --git a/src/compile/project.rs b/src/compile/project.rs index 7e6195fe..b97cb19c 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -560,7 +560,7 @@ fn compile_sequential( .filter(|f| dirty_files.contains(f)) .cloned() .collect::>(); - + input.strip_prefix(paths.root.as_path()); if actually_dirty.is_empty() { diff --git a/src/compilers/solc/mod.rs b/src/compilers/solc/mod.rs index b9935829..92683d63 100644 --- a/src/compilers/solc/mod.rs +++ b/src/compilers/solc/mod.rs @@ -61,7 +61,10 @@ impl Compiler for SolcRegistry { } fn available_versions(&self, _language: &Self::Language) -> Vec { - let mut all_versions = Solc::installed_versions().into_iter().map(CompilerVersion::Installed).collect::>(); + let mut all_versions = Solc::installed_versions() + .into_iter() + .map(CompilerVersion::Installed) + .collect::>(); let mut uniques = all_versions .iter() .map(|v| { diff --git a/tests/project.rs b/tests/project.rs index 25c06072..7adc77b5 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -9,9 +9,7 @@ use foundry_compilers::{ }, buildinfo::BuildInfo, cache::{CompilerCache, SOLIDITY_FILES_CACHE_FILENAME}, - compilers::{ - solc::SolcRegistry, CompilerOutput - }, + compilers::{solc::SolcRegistry, CompilerOutput}, error::SolcError, flatten::Flattener, info::ContractInfo, @@ -19,9 +17,9 @@ use foundry_compilers::{ remappings::Remapping, resolver::parse::SolData, utils::{self, RuntimeOrHandle}, - Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, - ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, Solc, SolcInput, - SolcSparseFileFilter, TestFileFilter, + Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, ProjectBuilder, + ProjectCompileOutput, ProjectPathsConfig, Solc, SolcInput, SolcSparseFileFilter, + TestFileFilter, }; use once_cell::sync::Lazy; use pretty_assertions::assert_eq; @@ -2910,7 +2908,6 @@ fn can_purge_obsolete_artifacts() { } #[test] -#[cfg(ignore)] fn can_parse_notice() { let mut project = TempProject::dapptools().unwrap(); project.project_mut().artifacts.additional_values.userdoc = true; @@ -3788,10 +3785,7 @@ fn test_deterministic_metadata() { copy_dir_all(orig_root, &tmp_dir).unwrap(); let paths = ProjectPathsConfig::builder().root(root).build().unwrap(); - let project = Project::builder() - .paths(paths) - .build(SolcRegistry::default()) - .unwrap(); + let project = Project::builder().paths(paths).build(SolcRegistry::default()).unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); From 607a39aa15aa6d171093cc9f846d83c91f0ab686 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 28 May 2024 19:36:07 +0300 Subject: [PATCH 04/24] clippy --- benches/compile_many.rs | 5 +- src/artifacts/mod.rs | 32 ++- src/buildinfo.rs | 6 +- src/compile/mod.rs | 2 +- src/compile/project.rs | 311 +++++++++---------------- src/compilers/mod.rs | 9 +- src/compilers/{solc/mod.rs => solc.rs} | 41 ++-- src/compilers/vyper/input.rs | 52 ++++- src/compilers/vyper/mod.rs | 45 ++-- src/compilers/vyper/parser.rs | 8 + src/config.rs | 4 +- src/lib.rs | 50 ++-- src/project_util/mod.rs | 12 +- src/resolver/mod.rs | 29 ++- tests/project.rs | 24 +- 15 files changed, 295 insertions(+), 335 deletions(-) rename src/compilers/{solc/mod.rs => solc.rs} (89%) diff --git a/benches/compile_many.rs b/benches/compile_many.rs index ed361487..ada73a62 100644 --- a/benches/compile_many.rs +++ b/benches/compile_many.rs @@ -3,10 +3,7 @@ extern crate criterion; use criterion::Criterion; -use foundry_compilers::{ - compilers::{solc::SolcVersionManager, CompilerVersionManager}, - SolcInput, -}; +use foundry_compilers::{Solc, SolcInput}; use semver::Version; use std::path::Path; diff --git a/src/artifacts/mod.rs b/src/artifacts/mod.rs index 96279b6d..71f1edd5 100644 --- a/src/artifacts/mod.rs +++ b/src/artifacts/mod.rs @@ -3,7 +3,7 @@ #![allow(ambiguous_glob_reexports)] use crate::{ - compile::*, compilers::solc::SolcLanguages, error::SolcIoError, output::ErrorFilter, + compile::*, compilers::solc::SolcLanguage, error::SolcIoError, output::ErrorFilter, remappings::Remapping, utils, ProjectPathsConfig, SolcError, }; use alloy_primitives::hex; @@ -60,7 +60,7 @@ pub const YUL: &str = "Yul"; /// Input type `solc` expects. #[derive(Clone, Debug, Serialize, Deserialize)] pub struct SolcInput { - pub language: SolcLanguages, + pub language: SolcLanguage, pub sources: Sources, pub settings: Settings, } @@ -69,7 +69,7 @@ pub struct SolcInput { impl Default for SolcInput { fn default() -> Self { SolcInput { - language: SolcLanguages::Solidity, + language: SolcLanguage::Solidity, sources: Sources::default(), settings: Settings::default(), } @@ -77,12 +77,10 @@ impl Default for SolcInput { } impl SolcInput { - pub fn new(language: SolcLanguages, sources: Sources, mut settings: Settings) -> Self { - if language == SolcLanguages::Yul { - if !settings.remappings.is_empty() { - warn!("omitting remappings supplied for the yul sources"); - settings.remappings = vec![]; - } + pub fn new(language: SolcLanguage, sources: Sources, mut settings: Settings) -> Self { + if language == SolcLanguage::Yul && !settings.remappings.is_empty() { + warn!("omitting remappings supplied for the yul sources"); + settings.remappings = vec![]; } Self { language, sources, settings } } @@ -102,11 +100,11 @@ impl SolcInput { let mut res = Vec::new(); if !solidity_sources.is_empty() { - res.push(SolcInput::new(SolcLanguages::Solidity, solidity_sources, settings.clone())) + res.push(SolcInput::new(SolcLanguage::Solidity, solidity_sources, settings.clone())) } if !yul_sources.is_empty() { - res.push(SolcInput::new(SolcLanguages::Yul, yul_sources, settings)) + res.push(SolcInput::new(SolcLanguage::Yul, yul_sources, settings)) } res @@ -160,11 +158,11 @@ impl SolcInput { /// The flag indicating whether the current [SolcInput] is /// constructed for the yul sources pub fn is_yul(&self) -> bool { - self.language == SolcLanguages::Yul + self.language == SolcLanguage::Yul } pub fn with_remappings(mut self, remappings: Vec) -> Self { - if self.language == SolcLanguages::Yul { + if self.language == SolcLanguage::Yul { if !remappings.is_empty() { warn!("omitting remappings supplied for the yul sources"); } @@ -184,7 +182,7 @@ impl SolcInput { /// the verified contracts #[derive(Clone, Debug, Serialize, Deserialize)] pub struct StandardJsonCompilerInput { - pub language: SolcLanguages, + pub language: SolcLanguage, #[serde(with = "serde_helpers::tuple_vec_map")] pub sources: Vec<(PathBuf, Source)>, pub settings: Settings, @@ -194,7 +192,7 @@ pub struct StandardJsonCompilerInput { impl StandardJsonCompilerInput { pub fn new(sources: Vec<(PathBuf, Source)>, settings: Settings) -> Self { - Self { language: SolcLanguages::Solidity, sources, settings } + Self { language: SolcLanguage::Solidity, sources, settings } } /// Normalizes the EVM version used in the settings to be up to the latest one @@ -2172,7 +2170,7 @@ mod tests { let settings = Settings { metadata: Some(BytecodeHash::Ipfs.into()), ..Default::default() }; let input = - SolcInput { language: SolcLanguages::Solidity, sources: Default::default(), settings }; + SolcInput { language: SolcLanguage::Solidity, sources: Default::default(), settings }; let i = input.clone().sanitized(&version); assert_eq!(i.settings.metadata.unwrap().bytecode_hash, Some(BytecodeHash::Ipfs)); @@ -2192,7 +2190,7 @@ mod tests { }; let input = - SolcInput { language: SolcLanguages::Solidity, sources: Default::default(), settings }; + SolcInput { language: SolcLanguage::Solidity, sources: Default::default(), settings }; let i = input.clone().sanitized(&version); assert_eq!(i.settings.metadata.unwrap().cbor_metadata, Some(true)); diff --git a/src/buildinfo.rs b/src/buildinfo.rs index 0bba269e..c06561b7 100644 --- a/src/buildinfo.rs +++ b/src/buildinfo.rs @@ -98,11 +98,7 @@ impl std::io::Write for BuildInfoWriter { #[cfg(test)] mod tests { use super::*; - use crate::{ - artifacts::Error, - compilers::{CompilerInput, CompilerOutput}, - SolcInput, Source, - }; + use crate::{artifacts::Error, compilers::CompilerOutput, SolcInput, Source}; use std::{collections::BTreeMap, path::PathBuf}; #[test] diff --git a/src/compile/mod.rs b/src/compile/mod.rs index 872580b8..4f5355b1 100644 --- a/src/compile/mod.rs +++ b/src/compile/mod.rs @@ -789,6 +789,6 @@ mod tests { fn does_not_find_not_installed_version() { let ver = "1.1.1"; let res = Solc::find_svm_installed_version(ver).unwrap(); - assert!(matches!(res, None)); + assert!(res.is_none()); } } diff --git a/src/compile/project.rs b/src/compile/project.rs index b97cb19c..781e7d5f 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -105,7 +105,7 @@ use crate::{ artifacts::{VersionedFilteredSources, VersionedSources}, buildinfo::RawBuildInfo, cache::ArtifactsCache, - compilers::{Compiler, CompilerInput, Language}, + compilers::{Compiler, CompilerInput, CompilerOutput, Language}, error::Result, filter::SparseOutputFilter, output::AggregatedCompilerOutput, @@ -141,13 +141,12 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { /// Multiple (`Solc` -> `Sources`) pairs can be compiled in parallel if the `Project` allows /// multiple `jobs`, see [`crate::Project::set_solc_jobs()`]. pub fn with_sources(project: &'a Project, sources: Sources) -> Result { - Self::with_sources_autodetect(project, sources) - } - - /// Compiles the sources automatically detecting versions via [CompilerVersionManager] - pub fn with_sources_autodetect(project: &'a Project, sources: Sources) -> Result { let graph = Graph::resolve_sources(&project.paths, sources)?; - let (sources, edges) = graph.into_sources_by_version(project.offline, &project.compiler)?; + let (sources, edges) = graph.into_sources_by_version( + project.offline, + &project.locked_versions, + &project.compiler, + )?; let jobs_cnt = sources.values().map(|v| v.len()).sum::(); @@ -162,22 +161,6 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { Ok(Self { edges, project, sources, sparse_output: Default::default() }) } - /// Compiles the sources with a pinned [Compiler] instance - #[cfg(ignore)] - pub fn with_sources_and_compiler( - project: &'a Project, - sources: Sources, - compiler: C, - ) -> Result { - let version = compiler.version().clone(); - let (sources, edges) = Graph::resolve_sources(&project.paths, sources)?.into_sources(); - - let sources_by_version = vec![(compiler, version.clone(), sources)]; - let sources = CompilerSources::Sequential(sources_by_version); - - Ok(Self { edges, project, sources, sparse_output: Default::default() }) - } - /// Applies the specified filter to be applied when selecting solc output for /// specific files to be compiled pub fn with_sparse_output( @@ -482,27 +465,85 @@ impl FilteredCompilerSources { graph: &GraphEdges, create_build_info: bool, ) -> Result> { - match self { - FilteredCompilerSources::Sequential(input) => compile_sequential( - compiler, - input, - settings, - paths, - sparse_output, - graph, - create_build_info, - ), - FilteredCompilerSources::Parallel(input, j) => compile_parallel( - compiler, - input, - j, - settings, - paths, - sparse_output, - graph, - create_build_info, - ), + let jobs_cnt = if let Self::Parallel(_, jobs_cnt) = self { Some(jobs_cnt) } else { None }; + + let sources = self.into_sources(); + // Include additional paths collected during graph resolution. + let mut include_paths = paths.include_paths.clone(); + include_paths.extend(graph.include_paths().clone()); + + let mut jobs = Vec::new(); + for (language, versioned_sources) in sources { + for (version, filtered_sources) in versioned_sources { + if filtered_sources.is_empty() { + // nothing to compile + trace!("skip {} for empty sources set", version); + continue; + } + + let dirty_files: Vec = filtered_sources.dirty_files().cloned().collect(); + + // depending on the composition of the filtered sources, the output selection can be + // optimized + let mut opt_settings = settings.clone(); + let sources = + sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph); + + let mut input = + C::Input::build(sources, opt_settings, language.clone(), version.clone()) + .with_base_path(paths.root.clone()) + .with_allow_paths(paths.allowed_paths.clone()) + .with_include_paths(include_paths.clone()) + .with_remappings(paths.remappings.clone()); + + let actually_dirty = input + .sources() + .keys() + .filter(|f| dirty_files.contains(f)) + .cloned() + .collect::>(); + + input.strip_prefix(paths.root.as_path()); + + if actually_dirty.is_empty() { + // nothing to compile for this particular language, all dirty files are in the + // other language set + trace!("skip {} run due to empty source set", version); + continue; + } + trace!( + "calling {} with {} sources {:?}", + version, + input.sources().len(), + input.sources().keys() + ); + + jobs.push((input, actually_dirty)); + } } + + let results = if let Some(num_jobs) = jobs_cnt { + compile_parallel(compiler, jobs, num_jobs) + } else { + compile_sequential(compiler, jobs) + }?; + + let mut aggregated = AggregatedCompilerOutput::default(); + + for (input, mut output) in results { + let version = input.version(); + // if configured also create the build info + if create_build_info { + let build_info = RawBuildInfo::new(&input, &output, version)?; + aggregated.build_infos.insert(version.clone(), build_info); + } + + output.join_all(paths.root.as_path()); + + aggregated.extend(version.clone(), output); + } + + Ok(aggregated) } #[cfg(test)] @@ -513,161 +554,42 @@ impl FilteredCompilerSources { FilteredCompilerSources::Parallel(v, _) => v, } } + + fn into_sources(self) -> VersionedFilteredSources { + match self { + FilteredCompilerSources::Sequential(v) => v, + FilteredCompilerSources::Parallel(v, _) => v, + } + } } -/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s -fn compile_sequential( +/// Compiles the input set sequentially and returns a [Vec] of outputs. +fn compile_sequential, I: CompilerInput>( compiler: &C, - input: VersionedFilteredSources, - settings: &C::Settings, - paths: &ProjectPathsConfig, - sparse_output: SparseOutputFilter, - graph: &GraphEdges, - create_build_info: bool, -) -> Result> { - let mut aggregated = AggregatedCompilerOutput::default(); - trace!("compiling {} jobs sequentially", input.len()); - - // Include additional paths collected during graph resolution. - let mut include_paths = paths.include_paths.clone(); - include_paths.extend(graph.include_paths().clone()); - - for (language, versioned_sources) in input { - for (version, filtered_sources) in versioned_sources { - if filtered_sources.is_empty() { - // nothing to compile - trace!("skip {} for empty sources set", version); - continue; - } - trace!("compiling {} sources with \"{}\"", filtered_sources.len(), version,); - - let dirty_files: Vec = filtered_sources.dirty_files().cloned().collect(); - - // depending on the composition of the filtered sources, the output selection can be - // optimized - let mut opt_settings = settings.clone(); - let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph); - - let mut input = C::Input::build(sources, opt_settings, language.clone(), &version) - .with_base_path(paths.root.clone()) - .with_allow_paths(paths.allowed_paths.clone()) - .with_include_paths(include_paths.clone()) - .with_remappings(paths.remappings.clone()); - - let actually_dirty = input - .sources() - .keys() - .filter(|f| dirty_files.contains(f)) - .cloned() - .collect::>(); - - input.strip_prefix(paths.root.as_path()); - - if actually_dirty.is_empty() { - // nothing to compile for this particular language, all dirty files are in the other - // language set - trace!("skip {} run due to empty source set", version); - continue; - } - - trace!( - "calling {} with {} sources {:?}", - version, - input.sources().len(), - input.sources().keys() - ); - + jobs: Vec<(I, Vec)>, +) -> Result)>> { + jobs.into_iter() + .map(|(input, actually_dirty)| { let start = Instant::now(); report::compiler_spawn( &input.compiler_name(), input.version(), actually_dirty.as_slice(), ); - let mut output = compiler.compile(&input)?; - report::compiler_success(&input.compiler_name(), &version, &start.elapsed()); - // trace!("compiled input, output has error: {}", output.has_error()); - trace!("received compiler output: {:?}", output.contracts.keys()); - - // if configured also create the build info - if create_build_info { - let build_info = RawBuildInfo::new(&input, &output, &version)?; - aggregated.build_infos.insert(version.clone(), build_info); - } - - output.join_all(paths.root.as_path()); + let output = compiler.compile(&input)?; + report::compiler_success(&input.compiler_name(), input.version(), &start.elapsed()); - aggregated.extend(version.clone(), output); - } - } - Ok(aggregated) + Ok((input, output)) + }) + .collect() } /// compiles the input set using `num_jobs` threads -fn compile_parallel( +fn compile_parallel, I: CompilerInput>( compiler: &C, - input: VersionedFilteredSources, + jobs: Vec<(I, Vec)>, num_jobs: usize, - settings: &C::Settings, - paths: &ProjectPathsConfig, - sparse_output: SparseOutputFilter, - graph: &GraphEdges, - create_build_info: bool, -) -> Result> { - debug_assert!(num_jobs > 1); - trace!("compile {} sources in parallel using up to {} solc jobs", input.len(), num_jobs); - - // Include additional paths collected during graph resolution. - let mut include_paths = paths.include_paths.clone(); - include_paths.extend(graph.include_paths().clone()); - - let mut jobs = Vec::with_capacity(input.len()); - for (language, versioned_sources) in input { - for (version, filtered_sources) in versioned_sources { - if filtered_sources.is_empty() { - // nothing to compile - trace!("skip {} for empty sources set", version); - continue; - } - - let dirty_files: Vec = filtered_sources.dirty_files().cloned().collect(); - - // depending on the composition of the filtered sources, the output selection can be - // optimized - let mut opt_settings = settings.clone(); - let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph); - - let mut input = C::Input::build(sources, opt_settings, language.clone(), &version) - .with_base_path(paths.root.clone()) - .with_allow_paths(paths.allowed_paths.clone()) - .with_include_paths(include_paths.clone()) - .with_remappings(paths.remappings.clone()); - - let actually_dirty = input - .sources() - .keys() - .filter(|f| dirty_files.contains(f)) - .cloned() - .collect::>(); - - input.strip_prefix(paths.root.as_path()); - - if actually_dirty.is_empty() { - // nothing to compile for this particular language, all dirty files are in the other - // language set - trace!("skip {} run due to empty source set", version); - continue; - } - trace!( - "calling {} with {} sources {:?}", - version, - input.sources().len(), - input.sources().keys() - ); - - jobs.push((input, actually_dirty)); - } - } - +) -> Result)>> { // need to get the currently installed reporter before installing the pool, otherwise each new // thread in the pool will get initialized with the default value of the `thread_local!`'s // localkey. This way we keep access to the reporter in the rayon pool @@ -676,7 +598,7 @@ fn compile_parallel( // start a rayon threadpool that will execute all `Solc::compile()` processes let pool = rayon::ThreadPoolBuilder::new().num_threads(num_jobs).build().unwrap(); - let outputs = pool.install(move || { + pool.install(move || { jobs.into_par_iter() .map(move |(input, actually_dirty)| { // set the reporter on this thread @@ -700,24 +622,11 @@ fn compile_parallel( input.version(), &start.elapsed(), ); - (input.version().clone(), input, output) + (input, output) }) }) - .collect::, _>>() - })?; - - let mut aggregated = AggregatedCompilerOutput::default(); - for (version, input, mut output) in outputs { - // if configured also create the build info - if create_build_info { - let build_info = RawBuildInfo::new(&input, &output, &version)?; - aggregated.build_infos.insert(version.clone(), build_info); - } - output.join_all(paths.root.as_path()); - aggregated.extend(version, output); - } - - Ok(aggregated) + .collect() + }) } #[cfg(test)] @@ -726,7 +635,7 @@ mod tests { use super::*; use crate::{ artifacts::output_selection::ContractOutputSelection, compilers::solc::SolcRegistry, - project_util::TempProject, ConfigurableArtifacts, MinimalCombinedArtifacts, Solc, + project_util::TempProject, ConfigurableArtifacts, MinimalCombinedArtifacts, }; fn init_tracing() { diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index 55a2c2ab..b3a63d0f 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -18,8 +18,6 @@ use std::{ }; pub mod solc; - -#[cfg(ignore)] pub mod vyper; /// A compiler version is either installed (available locally) or can be downloaded, from the remote @@ -90,7 +88,7 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug { sources: Sources, settings: Self::Settings, language: Self::Language, - version: &Version, + version: Version, ) -> Self; /// Returns reference to sources included into this input. @@ -206,14 +204,15 @@ impl Default for CompilerOutput { } } -pub trait Language: Hash + Eq + Clone + Debug { +pub trait Language: Hash + Eq + Clone + Debug + Display + 'static { /// Extensions of source files recognized by the language set. const FILE_EXTENSIONS: &'static [&'static str]; } /// The main compiler abstraction trait. Currently mostly represents a wrapper around compiler /// binary aware of the version and able to compile given input into [CompilerOutput] including -/// artifacts and errors. +/// artifacts and errors.' +#[auto_impl::auto_impl(&, Box, Arc)] pub trait Compiler: Send + Sync + Clone { /// Input type for the compiler. Contains settings and sources to be compiled. type Input: CompilerInput; diff --git a/src/compilers/solc/mod.rs b/src/compilers/solc.rs similarity index 89% rename from src/compilers/solc/mod.rs rename to src/compilers/solc.rs index 92683d63..56655dbc 100644 --- a/src/compilers/solc/mod.rs +++ b/src/compilers/solc.rs @@ -8,16 +8,16 @@ use crate::{ artifacts::{ output_selection::OutputSelection, Error, Settings as SolcSettings, SolcInput, Sources, }, - error::{Result, SolcError}, + error::Result, remappings::Remapping, resolver::parse::SolData, - utils::RuntimeOrHandle, Solc, SOLC_EXTENSIONS, }; use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::{BTreeSet, HashMap, HashSet}, + collections::{BTreeSet, HashSet}, + fmt, path::{Path, PathBuf}, }; @@ -27,21 +27,30 @@ pub struct SolcRegistry; #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[non_exhaustive] -pub enum SolcLanguages { +pub enum SolcLanguage { Solidity, Yul, } -impl Language for SolcLanguages { +impl Language for SolcLanguage { const FILE_EXTENSIONS: &'static [&'static str] = SOLC_EXTENSIONS; } +impl fmt::Display for SolcLanguage { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Solidity => write!(f, "Solidity"), + Self::Yul => write!(f, "Yul"), + } + } +} + impl Compiler for SolcRegistry { - type Input = SolcVerionedInput; + type Input = SolcVersionedInput; type CompilationError = crate::artifacts::Error; type ParsedSource = SolData; type Settings = SolcSettings; - type Language = SolcLanguages; + type Language = SolcLanguage; fn compile(&self, input: &Self::Input) -> Result> { let mut solc = Solc::find_or_install(&input.version)?; @@ -84,7 +93,7 @@ impl Compiler for SolcRegistry { } #[derive(Debug, Clone, Serialize)] -pub struct SolcVerionedInput { +pub struct SolcVersionedInput { #[serde(skip)] pub version: Version, #[serde(flatten)] @@ -97,9 +106,9 @@ pub struct SolcVerionedInput { pub include_paths: BTreeSet, } -impl CompilerInput for SolcVerionedInput { +impl CompilerInput for SolcVersionedInput { type Settings = SolcSettings; - type Language = SolcLanguages; + type Language = SolcLanguage; /// Creates a new [CompilerInput]s with default settings and the given sources /// @@ -109,12 +118,12 @@ impl CompilerInput for SolcVerionedInput { sources: Sources, settings: Self::Settings, language: Self::Language, - version: &Version, + version: Version, ) -> Self { - let input = SolcInput::new(language, sources, settings).sanitized(version); + let input = SolcInput::new(language, sources, settings).sanitized(&version); Self { - version: version.clone(), + version, input, base_path: None, include_paths: Default::default(), @@ -197,7 +206,7 @@ impl CompilerSettings for SolcSettings { } impl ParsedSource for SolData { - type Language = SolcLanguages; + type Language = SolcLanguage; fn parse(content: &str, file: &std::path::Path) -> Self { SolData::parse(content, file) @@ -213,9 +222,9 @@ impl ParsedSource for SolData { fn language(&self) -> Self::Language { if self.is_yul { - SolcLanguages::Yul + SolcLanguage::Yul } else { - SolcLanguages::Solidity + SolcLanguage::Solidity } } } diff --git a/src/compilers/vyper/input.rs b/src/compilers/vyper/input.rs index 1bd40c62..0cac920d 100644 --- a/src/compilers/vyper/input.rs +++ b/src/compilers/vyper/input.rs @@ -1,6 +1,6 @@ use std::path::Path; -use super::settings::VyperSettings; +use super::{settings::VyperSettings, VyperLanguage}; use crate::{artifacts::Sources, compilers::CompilerInput}; use semver::Version; use serde::{Deserialize, Serialize}; @@ -12,15 +12,44 @@ pub struct VyperInput { pub settings: VyperSettings, } -impl CompilerInput for VyperInput { +#[derive(Debug, Serialize)] +pub struct VyperVersionedInput { + #[serde(flatten)] + pub input: VyperInput, + #[serde(skip)] + pub version: Version, +} + +impl VyperInput { + pub fn new(sources: Sources, settings: VyperSettings) -> Self { + VyperInput { language: "Vyper".to_string(), sources, settings } + } + + pub fn strip_prefix(&mut self, base: &Path) { + self.sources = std::mem::take(&mut self.sources) + .into_iter() + .map(|(path, s)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), s)) + .collect(); + + self.settings.strip_prefix(base) + } +} + +impl CompilerInput for VyperVersionedInput { type Settings = VyperSettings; + type Language = VyperLanguage; - fn build(sources: Sources, settings: Self::Settings, _version: &Version) -> Vec { - vec![VyperInput { language: "Vyper".to_string(), sources, settings }] + fn build( + sources: Sources, + settings: Self::Settings, + _language: Self::Language, + version: Version, + ) -> Self { + Self { input: VyperInput::new(sources, settings), version } } fn sources(&self) -> &Sources { - &self.sources + &self.input.sources } fn compiler_name(&self) -> String { @@ -28,11 +57,14 @@ impl CompilerInput for VyperInput { } fn strip_prefix(&mut self, base: &Path) { - self.sources = std::mem::take(&mut self.sources) - .into_iter() - .map(|(path, s)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), s)) - .collect(); + self.input.strip_prefix(base); + } - self.settings.strip_prefix(base) + fn language(&self) -> Self::Language { + VyperLanguage + } + + fn version(&self) -> &Version { + &self.version } } diff --git a/src/compilers/vyper/mod.rs b/src/compilers/vyper/mod.rs index 9a270239..9a3c6d99 100644 --- a/src/compilers/vyper/mod.rs +++ b/src/compilers/vyper/mod.rs @@ -1,9 +1,14 @@ -use self::{error::VyperCompilationError, input::VyperInput, parser::VyperParsedSource}; -use super::{Compiler, CompilerInput, CompilerOutput}; +use self::{ + error::VyperCompilationError, + input::{VyperInput, VyperVersionedInput}, + parser::VyperParsedSource, +}; +use super::{Compiler, CompilerOutput, Language}; use crate::{ artifacts::Source, error::{Result, SolcError}, }; +use core::fmt; use semver::Version; use serde::{de::DeserializeOwned, Serialize}; use std::{ @@ -23,6 +28,20 @@ pub type VyperCompilerOutput = CompilerOutput; /// File extensions that are recognized as Vyper source files. pub const VYPER_EXTENSIONS: &[&str] = &["vy"]; +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +#[non_exhaustive] +pub struct VyperLanguage; + +impl Language for VyperLanguage { + const FILE_EXTENSIONS: &'static [&'static str] = VYPER_EXTENSIONS; +} + +impl fmt::Display for VyperLanguage { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Vyper") + } +} + #[derive(Debug, Clone)] pub struct Vyper { pub path: PathBuf, @@ -40,16 +59,9 @@ impl Vyper { /// Convenience function for compiling all sources under the given path pub fn compile_source(&self, path: impl AsRef) -> Result { let path = path.as_ref(); - let mut res: VyperCompilerOutput = Default::default(); - for input in VyperInput::build( - Source::read_all_from(path, VYPER_EXTENSIONS)?, - Default::default(), - &self.version, - ) { - let output = self.compile(&input)?; - res.merge(output) - } - Ok(res) + let input = + VyperInput::new(Source::read_all_from(path, VYPER_EXTENSIONS)?, Default::default()); + self.compile(&input) } /// Same as [`Self::compile()`], but only returns those files which are included in the @@ -145,18 +157,17 @@ impl Vyper { } impl Compiler for Vyper { - const FILE_EXTENSIONS: &'static [&'static str] = VYPER_EXTENSIONS; - type Settings = VyperSettings; type CompilationError = VyperCompilationError; type ParsedSource = VyperParsedSource; - type Input = VyperInput; + type Input = VyperVersionedInput; + type Language = VyperLanguage; fn compile(&self, input: &Self::Input) -> Result { self.compile(input) } - fn version(&self) -> &Version { - &self.version + fn available_versions(&self, _language: &Self::Language) -> Vec { + vec![super::CompilerVersion::Installed(self.version.clone())] } } diff --git a/src/compilers/vyper/parser.rs b/src/compilers/vyper/parser.rs index f32151f7..c862d823 100644 --- a/src/compilers/vyper/parser.rs +++ b/src/compilers/vyper/parser.rs @@ -14,6 +14,8 @@ use winnow::{ PResult, Parser, }; +use super::VyperLanguage; + #[derive(Debug, PartialEq)] pub struct VyperImport { pub level: usize, @@ -29,6 +31,8 @@ pub struct VyperParsedSource { } impl ParsedSource for VyperParsedSource { + type Language = VyperLanguage; + fn parse(content: &str, file: &Path) -> Self { let version_req = capture_outer_and_inner(content, &RE_VYPER_VERSION, &["version"]) .first() @@ -117,6 +121,10 @@ impl ParsedSource for VyperParsedSource { } Ok(imports) } + + fn language(&self) -> Self::Language { + VyperLanguage + } } /// Parses given source trying to find all import directives. diff --git a/src/config.rs b/src/config.rs index 433b2e1a..2879153c 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,7 +1,7 @@ use crate::{ artifacts::{output_selection::ContractOutputSelection, Settings}, cache::SOLIDITY_FILES_CACHE_FILENAME, - compilers::{solc::SolcLanguages, Language}, + compilers::{solc::SolcLanguage, Language}, error::{Result, SolcError, SolcIoError}, flatten::{collect_ordered_deps, combine_version_pragmas}, remappings::Remapping, @@ -19,7 +19,7 @@ use std::{ /// Where to find all files or where to write them #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ProjectPathsConfig { +pub struct ProjectPathsConfig { /// Project root pub root: PathBuf, /// Path to the cache, if any diff --git a/src/lib.rs b/src/lib.rs index 40bca430..bc881465 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -81,6 +81,8 @@ pub mod project_util; #[derive(Clone, Debug)] pub struct Project { pub compiler: C, + /// Compiler versions locked for specific languages. + pub locked_versions: HashMap, /// The layout of the project pub paths: ProjectPathsConfig, /// The compiler settings @@ -371,35 +373,6 @@ impl Project { project::ProjectCompiler::with_sources(self, sources)?.with_sparse_output(filter).compile() } - /// Compiles the given source files with the exact [Compiler] instance - /// - /// First all libraries for the sources are resolved by scanning all their imports. - /// If caching is enabled for the `Project`, then all unchanged files are filtered from the - /// sources and their existing artifacts are read instead. This will also update the cache - /// file and cleans up entries for files which may have been removed. Unchanged files that - /// for which an artifact exist, are not compiled again. - /// - /// # Examples - /// - /// ```no_run - /// use foundry_compilers::{Project, Solc}; - /// - /// let project = Project::builder().build()?; - /// let sources = project.paths.read_sources()?; - /// let solc = Solc::find_svm_installed_version("0.8.11")?.unwrap(); - /// project.compile_with_version(&solc, sources)?; - /// # Ok::<(), Box>(()) - /// ``` - #[cfg(ignore)] - pub fn compile_with_version( - &self, - compiler: &C, - sources: Sources, - ) -> Result> { - project::ProjectCompiler::with_sources_and_compiler(self, sources, compiler.clone())? - .compile() - } - /// Removes the project's artifacts and cache file /// /// If the cache file was the only file in the folder, this also removes the empty folder. @@ -546,6 +519,8 @@ impl Project { pub struct ProjectBuilder { /// The layout of the paths: Option>, + /// Compiler versions locked for specific languages. + locked_versions: HashMap, /// How solc invocation should be configured. settings: Option, /// Whether caching is enabled, default is true. @@ -585,6 +560,7 @@ impl ProjectBuilder { compiler_severity_filter: Severity::Error, solc_jobs: None, settings: None, + locked_versions: Default::default(), } } @@ -700,6 +676,18 @@ impl ProjectBuilder { self.solc_jobs(1) } + #[must_use] + pub fn locked_version(mut self, lang: C::Language, version: Version) -> Self { + self.locked_versions.insert(lang, version); + self + } + + #[must_use] + pub fn locked_versions(mut self, versions: HashMap) -> Self { + self.locked_versions = versions; + self + } + /// Set arbitrary `ArtifactOutputHandler` pub fn artifacts(self, artifacts: A) -> ProjectBuilder { let ProjectBuilder { @@ -714,6 +702,7 @@ impl ProjectBuilder { slash_paths, ignored_file_paths, settings, + locked_versions, .. } = self; ProjectBuilder { @@ -729,6 +718,7 @@ impl ProjectBuilder { solc_jobs, build_info, settings, + locked_versions, } } @@ -746,6 +736,7 @@ impl ProjectBuilder { build_info, slash_paths, settings, + locked_versions, } = self; let mut paths = paths.map(Ok).unwrap_or_else(ProjectPathsConfig::current_hardhat)?; @@ -771,6 +762,7 @@ impl ProjectBuilder { offline, slash_paths, settings: settings.unwrap_or_default(), + locked_versions, }) } } diff --git a/src/project_util/mod.rs b/src/project_util/mod.rs index c46f4545..78589c86 100644 --- a/src/project_util/mod.rs +++ b/src/project_util/mod.rs @@ -13,7 +13,7 @@ use crate::{ utils::{self, tempdir}, Artifact, ArtifactOutput, Artifacts, CompilerCache, ConfigurableArtifacts, ConfigurableContractArtifact, PathStyle, Project, ProjectCompileOutput, ProjectPathsConfig, - Solc, SolcIoError, + SolcIoError, }; use fs_extra::{dir, file}; use std::{ @@ -79,15 +79,13 @@ impl TempProject { /// Explicitly sets the solc version for the project #[cfg(feature = "svm-solc")] - #[cfg(ignore)] pub fn set_solc(&mut self, solc: impl AsRef) -> &mut Self { - use crate::{compilers::CompilerVersionManager, CompilerConfig}; + use crate::compilers::solc::SolcLanguage; use semver::Version; - let solc = crate::compilers::solc::SolcVersionManager - .get_or_install(&Version::parse(solc.as_ref()).unwrap()) - .unwrap(); - self.inner.compiler = CompilerConfig::Specific(solc); + let version = Version::parse(solc.as_ref()).unwrap(); + self.inner.locked_versions.insert(SolcLanguage::Solidity, version.clone()); + self.inner.locked_versions.insert(SolcLanguage::Yul, version.clone()); self } diff --git a/src/resolver/mod.rs b/src/resolver/mod.rs index 9e7cd511..2c6ad7ef 100644 --- a/src/resolver/mod.rs +++ b/src/resolver/mod.rs @@ -55,7 +55,7 @@ use crate::{ use core::fmt; use parse::SolData; use rayon::prelude::*; -use semver::VersionReq; +use semver::{Version, VersionReq}; use std::{ collections::{BTreeSet, HashMap, HashSet, VecDeque}, io, @@ -462,6 +462,7 @@ impl> Graph { pub fn into_sources_by_version>( self, offline: bool, + locked_versions: &HashMap, compiler: &C, ) -> Result<(VersionedSources, GraphEdges)> { /// insert the imports of the given node into the sources map @@ -494,7 +495,8 @@ impl> Graph { } } - let versioned_nodes_by_lang = self.get_input_node_versions(offline, compiler)?; + let versioned_nodes_by_lang = + self.get_input_node_versions(offline, locked_versions, compiler)?; let (nodes, edges) = self.split(); let mut all_nodes = nodes.into_iter().enumerate().collect::>(); @@ -524,7 +526,7 @@ impl> Graph { &mut processed_sources, ); } - versioned_sources.insert(version.into(), sources); + versioned_sources.insert(version, sources); } resulted_sources.insert(language, versioned_sources); @@ -600,13 +602,18 @@ impl> Graph { fn get_input_node_versions>( &self, offline: bool, + locked_versions: &HashMap, compiler: &C, - ) -> Result>>> { + ) -> Result>>> { trace!("resolving input node versions"); let mut resulted_nodes = HashMap::new(); for (language, nodes) in self.nodes_by_language() { + if let Some(version) = locked_versions.get(&language) { + resulted_nodes.insert(language, HashMap::from([(version.clone(), nodes)])); + continue; + } // this is likely called by an application and will be eventually printed so we don't // exit on first error, instead gather all the errors and return a bundled // error message instead @@ -688,7 +695,13 @@ impl> Graph { if errors.is_empty() { trace!("resolved {} versions {:?}", versioned_nodes.len(), versioned_nodes.keys()); - resulted_nodes.insert(language, versioned_nodes); + resulted_nodes.insert( + language, + versioned_nodes + .into_iter() + .map(|(v, nodes)| (Version::from(v), nodes)) + .collect(), + ); } else { error!("failed to resolve versions"); return Err(SolcError::msg(errors.join("\n"))); @@ -907,8 +920,6 @@ enum SourceVersionError { #[cfg(test)] mod tests { - use crate::Solc; - use super::*; #[test] @@ -960,8 +971,6 @@ mod tests { #[test] #[cfg(not(target_os = "windows"))] fn can_print_dapp_sample_graph() { - use crate::compilers::solc::SolcLanguages; - let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::dapptools(root).unwrap(); let graph = Graph::::resolve(&paths).unwrap(); @@ -985,8 +994,6 @@ src/Dapp.t.sol >=0.6.6 #[test] #[cfg(not(target_os = "windows"))] fn can_print_hardhat_sample_graph() { - use crate::{compilers::solc::SolcLanguages, Solc}; - let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/hardhat-sample"); let paths = ProjectPathsConfig::hardhat(root).unwrap(); let graph = Graph::::resolve(&paths).unwrap(); diff --git a/tests/project.rs b/tests/project.rs index 7adc77b5..4993607e 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -9,7 +9,11 @@ use foundry_compilers::{ }, buildinfo::BuildInfo, cache::{CompilerCache, SOLIDITY_FILES_CACHE_FILENAME}, - compilers::{solc::SolcRegistry, CompilerOutput}, + compilers::{ + solc::{SolcLanguage, SolcRegistry}, + vyper::{Vyper, VyperLanguage, VyperSettings}, + CompilerOutput, + }, error::SolcError, flatten::Flattener, info::ContractInfo, @@ -33,7 +37,6 @@ use std::{ }; use svm::{platform, Platform}; -#[cfg(ignore)] pub static VYPER: Lazy = Lazy::new(|| { RuntimeOrHandle::new().block_on(async { #[cfg(target_family = "unix")] @@ -2876,7 +2879,6 @@ async fn can_install_solc_and_compile_std_json_input_async() { } #[test] -#[cfg(ignore)] fn can_purge_obsolete_artifacts() { let mut project = TempProject::dapptools().unwrap(); project.set_solc("0.8.10"); @@ -3785,7 +3787,11 @@ fn test_deterministic_metadata() { copy_dir_all(orig_root, &tmp_dir).unwrap(); let paths = ProjectPathsConfig::builder().root(root).build().unwrap(); - let project = Project::builder().paths(paths).build(SolcRegistry::default()).unwrap(); + let project = Project::builder() + .locked_version(SolcLanguage::Solidity, Version::new(0, 8, 18)) + .paths(paths) + .build(SolcRegistry::default()) + .unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); @@ -3803,7 +3809,6 @@ fn test_deterministic_metadata() { } #[test] -#[cfg(ignore)] fn can_compile_vyper_with_cache() { let tmp_dir = tempfile::tempdir().unwrap(); let root = tmp_dir.path(); @@ -3818,7 +3823,7 @@ fn can_compile_vyper_with_cache() { .sources(root.join("src")) .artifacts(root.join("out")) .root(root) - .build::() + .build::() .unwrap(); let settings = VyperSettings { @@ -3830,7 +3835,7 @@ fn can_compile_vyper_with_cache() { let project = ProjectBuilder::::new(Default::default()) .settings(settings) .paths(paths) - .build(CompilerConfig::Specific(VYPER.clone())) + .build(VYPER.clone()) .unwrap(); let compiled = project.compile().unwrap(); @@ -3865,14 +3870,13 @@ fn yul_remappings_ignored() { } #[test] -#[cfg(ignore)] fn test_vyper_imports() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/vyper-imports"); let paths = ProjectPathsConfig::builder() .sources(root.join("src")) .root(root) - .build::() + .build::() .unwrap(); let settings = VyperSettings { @@ -3884,7 +3888,7 @@ fn test_vyper_imports() { .settings(settings) .paths(paths) .no_artifacts() - .build(CompilerConfig::Specific(VYPER.clone())) + .build(VYPER.clone()) .unwrap(); project.compile().unwrap().assert_success(); From a5f6fc73fece3c31b3449edb931ab5069c5a1fef Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 28 May 2024 19:57:32 +0200 Subject: [PATCH 05/24] fixes --- src/compile/project.rs | 20 +++++++++++--------- src/resolver/mod.rs | 7 +++---- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/src/compile/project.rs b/src/compile/project.rs index 781e7d5f..fec93dcb 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -381,15 +381,17 @@ impl CompilerSources { { use path_slash::PathBufExt; - fn slash_versioned_sources(v: &mut VersionedSources) { - for (_, _, sources) in v { - *sources = std::mem::take(sources) - .into_iter() - .map(|(path, source)| { - (PathBuf::from(path.to_slash_lossy().as_ref()), source) - }) - .collect() - } + fn slash_versioned_sources(v: &mut VersionedSources) { + v.iter_mut().map(|(language, versioned_sources)| { + versioned_sources.iter_mut().map(|(version, sources)| { + *sources = std::mem::take(sources) + .into_iter() + .map(|(path, source)| { + (PathBuf::from(path.to_slash_lossy().as_ref()), source) + }) + .collect() + }) + }); } match self { diff --git a/src/resolver/mod.rs b/src/resolver/mod.rs index 2c6ad7ef..10074584 100644 --- a/src/resolver/mod.rs +++ b/src/resolver/mod.rs @@ -6,10 +6,9 @@ //! Some constraints we're working with when resolving contracts //! //! 1. Each file can contain several source units and can have any number of imports/dependencies -//! (using the term interchangeably). Each dependency can declare a version range that it is -//! compatible with, solidity version pragma. -//! 2. A dependency can be imported from any directory, -//! see `Remappings` +//! (using the term interchangeably). Each dependency can declare a version range that it is +//! compatible with, solidity version pragma. +//! 2. A dependency can be imported from any directory, see `Remappings` //! //! Finding all dependencies is fairly simple, we're simply doing a DFS, starting the source //! contracts From b86b4740d71af093f3cb9172c4d7e8ed00539e24 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 28 May 2024 20:10:00 +0200 Subject: [PATCH 06/24] fix --- src/compile/project.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compile/project.rs b/src/compile/project.rs index fec93dcb..50a90331 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -382,8 +382,8 @@ impl CompilerSources { use path_slash::PathBufExt; fn slash_versioned_sources(v: &mut VersionedSources) { - v.iter_mut().map(|(language, versioned_sources)| { - versioned_sources.iter_mut().map(|(version, sources)| { + v.values_mut().for_each(|versioned_sources| { + versioned_sources.values_mut().for_each(|sources| { *sources = std::mem::take(sources) .into_iter() .map(|(path, source)| { From 3caeba119b5f6fb7357a41bf2a08b44c941e30e8 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 28 May 2024 20:53:07 +0200 Subject: [PATCH 07/24] MultiCompiler impl --- src/compile/project.rs | 4 +- src/compilers/mod.rs | 19 ++- src/compilers/multi.rs | 276 ++++++++++++++++++++++++++++++++ src/compilers/solc.rs | 12 +- src/compilers/vyper/input.rs | 4 +- src/compilers/vyper/parser.rs | 8 +- src/compilers/vyper/settings.rs | 4 +- src/filter.rs | 75 ++++----- src/lib.rs | 13 +- src/project_util/mod.rs | 22 +-- src/resolver/mod.rs | 8 +- src/resolver/parse.rs | 4 +- tests/project.rs | 18 +-- 13 files changed, 372 insertions(+), 95 deletions(-) create mode 100644 src/compilers/multi.rs diff --git a/src/compile/project.rs b/src/compile/project.rs index 50a90331..1272106d 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -636,7 +636,7 @@ fn compile_parallel, I: CompilerInput>( mod tests { use super::*; use crate::{ - artifacts::output_selection::ContractOutputSelection, compilers::solc::SolcRegistry, + artifacts::output_selection::ContractOutputSelection, compilers::solc::SolcCompiler, project_util::TempProject, ConfigurableArtifacts, MinimalCombinedArtifacts, }; @@ -670,7 +670,7 @@ mod tests { fn can_detect_cached_files() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index b3a63d0f..ea159318 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -17,6 +17,7 @@ use std::{ path::{Path, PathBuf}, }; +pub mod multi; pub mod solc; pub mod vyper; @@ -61,8 +62,8 @@ impl fmt::Display for CompilerVersion { pub trait CompilerSettings: Default + Serialize + DeserializeOwned + Clone + Debug + Send + Sync + 'static { - /// Returns mutable reference to configured [OutputSelection]. - fn output_selection_mut(&mut self) -> &mut OutputSelection; + /// Executes given fn with mutable reference to configured [OutputSelection]. + fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy); /// Returns true if artifacts compiled with given `other` config are compatible with this /// config and if compilation can be skipped. @@ -134,16 +135,14 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug { pub trait ParsedSource: Debug + Sized + Send { type Language: Language; - fn parse(content: &str, file: &Path) -> Self; + fn parse(content: &str, file: &Path) -> Result; fn version_req(&self) -> Option<&VersionReq>; fn resolve_imports(&self, paths: &ProjectPathsConfig) -> Result>; fn language(&self) -> Self::Language; } /// Error returned by compiler. Might also represent a warning or informational message. -pub trait CompilationError: - Serialize + DeserializeOwned + Send + Sync + Display + Debug + Clone + 'static -{ +pub trait CompilationError: Serialize + Send + Sync + Display + Debug + Clone + 'static { fn is_warning(&self) -> bool; fn is_error(&self) -> bool; fn source_location(&self) -> Option; @@ -196,6 +195,14 @@ impl CompilerOutput { .map(|(path, source)| (root.join(path), source)) .collect(); } + + pub fn map_err F>(self, op: O) -> CompilerOutput { + CompilerOutput { + errors: self.errors.into_iter().map(op).collect(), + contracts: self.contracts, + sources: self.sources, + } + } } impl Default for CompilerOutput { diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs new file mode 100644 index 00000000..5d1695c0 --- /dev/null +++ b/src/compilers/multi.rs @@ -0,0 +1,276 @@ +use super::{ + solc::{SolcCompiler, SolcLanguage, SolcVersionedInput}, + vyper::{ + error::VyperCompilationError, input::VyperVersionedInput, parser::VyperParsedSource, Vyper, + VyperLanguage, VyperSettings, + }, + CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, CompilerVersion, + Language, ParsedSource, +}; +use crate::{ + artifacts::{output_selection::OutputSelection, Error, Settings as SolcSettings, Sources}, + error::{Result, SolcError}, + remappings::Remapping, + resolver::parse::SolData, +}; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::{ + collections::BTreeSet, + fmt, + path::{Path, PathBuf}, +}; + +#[derive(Debug, Clone)] +pub struct MultiCompiler { + solc_compiler: SolcCompiler, + vyper_compiler: Vyper, +} + +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +pub enum MultiCompilerLanguage { + Solc(SolcLanguage), + Vyper(VyperLanguage), +} + +#[derive(Debug, Clone)] +pub enum MultiCompilerParsedSource { + Solc(SolData), + Vyper(VyperParsedSource), +} + +#[derive(Debug, Clone, Serialize)] +#[serde(untagged)] +pub enum MultiCompilerError { + Solc(Error), + Vyper(VyperCompilationError), +} + +impl Language for MultiCompilerLanguage { + const FILE_EXTENSIONS: &'static [&'static str] = &["sol", "vy", "yul"]; +} + +impl fmt::Display for MultiCompilerLanguage { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Solc(lang) => lang.fmt(f), + Self::Vyper(lang) => lang.fmt(f), + } + } +} + +#[derive(Default, Clone, Debug, Serialize, Deserialize)] +pub struct MultiCompilerSettings { + solc: SolcSettings, + vyper: VyperSettings, +} + +impl CompilerSettings for MultiCompilerSettings { + fn can_use_cached(&self, other: &Self) -> bool { + self.solc.can_use_cached(&other.solc) && self.vyper.can_use_cached(&other.vyper) + } + + fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy) { + f(&mut self.solc.output_selection); + f(&mut self.vyper.output_selection); + } +} + +#[derive(Debug, Clone, Serialize)] +#[serde(untagged)] +pub enum MultiCompilerInput { + Solc(SolcVersionedInput), + Vyper(VyperVersionedInput), +} + +impl CompilerInput for MultiCompilerInput { + type Language = MultiCompilerLanguage; + type Settings = MultiCompilerSettings; + + fn build( + sources: Sources, + settings: Self::Settings, + language: Self::Language, + version: Version, + ) -> Self { + match language { + MultiCompilerLanguage::Solc(language) => { + Self::Solc(SolcVersionedInput::build(sources, settings.solc, language, version)) + } + MultiCompilerLanguage::Vyper(language) => { + Self::Vyper(VyperVersionedInput::build(sources, settings.vyper, language, version)) + } + } + } + + fn compiler_name(&self) -> String { + match self { + Self::Solc(input) => input.compiler_name(), + Self::Vyper(input) => input.compiler_name(), + } + } + + fn language(&self) -> Self::Language { + match self { + Self::Solc(input) => MultiCompilerLanguage::Solc(input.language()), + Self::Vyper(input) => MultiCompilerLanguage::Vyper(input.language()), + } + } + + fn sources(&self) -> &Sources { + match self { + Self::Solc(input) => input.sources(), + Self::Vyper(input) => input.sources(), + } + } + + fn strip_prefix(&mut self, base: &Path) { + match self { + Self::Solc(input) => input.strip_prefix(base), + Self::Vyper(input) => input.strip_prefix(base), + } + } + + fn version(&self) -> &Version { + match self { + Self::Solc(input) => input.version(), + Self::Vyper(input) => input.version(), + } + } + + fn with_allow_paths(self, allowed_paths: BTreeSet) -> Self { + match self { + Self::Solc(input) => Self::Solc(input.with_allow_paths(allowed_paths)), + Self::Vyper(input) => Self::Vyper(input.with_allow_paths(allowed_paths)), + } + } + + fn with_base_path(self, base_path: PathBuf) -> Self { + match self { + Self::Solc(input) => Self::Solc(input.with_base_path(base_path)), + Self::Vyper(input) => Self::Vyper(input.with_base_path(base_path)), + } + } + + fn with_include_paths(self, include_paths: BTreeSet) -> Self { + match self { + Self::Solc(input) => Self::Solc(input.with_include_paths(include_paths)), + Self::Vyper(input) => Self::Vyper(input.with_include_paths(include_paths)), + } + } + + fn with_remappings(self, remappings: Vec) -> Self { + match self { + Self::Solc(input) => Self::Solc(input.with_remappings(remappings)), + Self::Vyper(input) => Self::Vyper(input.with_remappings(remappings)), + } + } +} + +impl Compiler for MultiCompiler { + type Input = MultiCompilerInput; + type CompilationError = MultiCompilerError; + type ParsedSource = MultiCompilerParsedSource; + type Settings = MultiCompilerSettings; + type Language = MultiCompilerLanguage; + + fn compile(&self, input: &Self::Input) -> Result> { + match input { + MultiCompilerInput::Solc(input) => { + self.solc_compiler.compile(input).map(|res| res.map_err(MultiCompilerError::Solc)) + } + MultiCompilerInput::Vyper(input) => { + self.vyper_compiler.compile(input).map(|res| res.map_err(MultiCompilerError::Vyper)) + } + } + } + + fn available_versions(&self, language: &Self::Language) -> Vec { + match language { + MultiCompilerLanguage::Solc(language) => { + self.solc_compiler.available_versions(language) + } + MultiCompilerLanguage::Vyper(language) => { + self.vyper_compiler.available_versions(language) + } + } + } +} + +impl ParsedSource for MultiCompilerParsedSource { + type Language = MultiCompilerLanguage; + + fn parse(content: &str, file: &std::path::Path) -> Result { + match file.extension().and_then(|e| e.to_str()) { + Some("sol" | "yul") => ::parse(content, file).map(Self::Solc), + Some("vy") => VyperParsedSource::parse(content, file).map(Self::Vyper), + _ => Err(SolcError::msg("unexpected file extension")), + } + } + + fn version_req(&self) -> Option<&semver::VersionReq> { + match self { + Self::Solc(parsed) => parsed.version_req(), + Self::Vyper(parsed) => parsed.version_req(), + } + } + + fn resolve_imports(&self, paths: &crate::ProjectPathsConfig) -> Result> { + match self { + Self::Solc(parsed) => parsed.resolve_imports(paths), + Self::Vyper(parsed) => parsed.resolve_imports(paths), + } + } + + fn language(&self) -> Self::Language { + match self { + Self::Solc(parsed) => MultiCompilerLanguage::Solc(parsed.language()), + Self::Vyper(parsed) => MultiCompilerLanguage::Vyper(parsed.language()), + } + } +} + +impl CompilationError for MultiCompilerError { + fn is_warning(&self) -> bool { + match self { + Self::Solc(error) => error.is_warning(), + Self::Vyper(error) => error.is_warning(), + } + } + fn is_error(&self) -> bool { + match self { + Self::Solc(error) => error.is_error(), + Self::Vyper(error) => error.is_error(), + } + } + + fn source_location(&self) -> Option { + match self { + Self::Solc(error) => error.source_location(), + Self::Vyper(error) => error.source_location(), + } + } + + fn severity(&self) -> crate::artifacts::error::Severity { + match self { + Self::Solc(error) => error.severity(), + Self::Vyper(error) => error.severity(), + } + } + + fn error_code(&self) -> Option { + match self { + Self::Solc(error) => error.error_code(), + Self::Vyper(error) => error.error_code(), + } + } +} + +impl fmt::Display for MultiCompilerError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Solc(error) => error.fmt(f), + Self::Vyper(error) => error.fmt(f), + } + } +} diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index 56655dbc..7161bdf3 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -23,7 +23,7 @@ use std::{ #[derive(Debug, Clone, Default)] #[non_exhaustive] -pub struct SolcRegistry; +pub struct SolcCompiler; #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[non_exhaustive] @@ -45,7 +45,7 @@ impl fmt::Display for SolcLanguage { } } -impl Compiler for SolcRegistry { +impl Compiler for SolcCompiler { type Input = SolcVersionedInput; type CompilationError = crate::artifacts::Error; type ParsedSource = SolData; @@ -174,8 +174,8 @@ impl CompilerInput for SolcVersionedInput { } impl CompilerSettings for SolcSettings { - fn output_selection_mut(&mut self) -> &mut OutputSelection { - &mut self.output_selection + fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection) + Copy) { + f(&mut self.output_selection) } fn can_use_cached(&self, other: &Self) -> bool { @@ -208,8 +208,8 @@ impl CompilerSettings for SolcSettings { impl ParsedSource for SolData { type Language = SolcLanguage; - fn parse(content: &str, file: &std::path::Path) -> Self { - SolData::parse(content, file) + fn parse(content: &str, file: &std::path::Path) -> Result { + Ok(SolData::parse(content, file)) } fn version_req(&self) -> Option<&semver::VersionReq> { diff --git a/src/compilers/vyper/input.rs b/src/compilers/vyper/input.rs index 0cac920d..be2db846 100644 --- a/src/compilers/vyper/input.rs +++ b/src/compilers/vyper/input.rs @@ -5,14 +5,14 @@ use crate::{artifacts::Sources, compilers::CompilerInput}; use semver::Version; use serde::{Deserialize, Serialize}; -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct VyperInput { pub language: String, pub sources: Sources, pub settings: VyperSettings, } -#[derive(Debug, Serialize)] +#[derive(Debug, Clone, Serialize)] pub struct VyperVersionedInput { #[serde(flatten)] pub input: VyperInput, diff --git a/src/compilers/vyper/parser.rs b/src/compilers/vyper/parser.rs index c862d823..3061d6ba 100644 --- a/src/compilers/vyper/parser.rs +++ b/src/compilers/vyper/parser.rs @@ -16,14 +16,14 @@ use winnow::{ use super::VyperLanguage; -#[derive(Debug, PartialEq)] +#[derive(Debug, Clone, PartialEq)] pub struct VyperImport { pub level: usize, pub path: Option, pub final_part: Option, } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct VyperParsedSource { path: PathBuf, version_req: Option, @@ -33,7 +33,7 @@ pub struct VyperParsedSource { impl ParsedSource for VyperParsedSource { type Language = VyperLanguage; - fn parse(content: &str, file: &Path) -> Self { + fn parse(content: &str, file: &Path) -> Result { let version_req = capture_outer_and_inner(content, &RE_VYPER_VERSION, &["version"]) .first() .and_then(|(cap, _)| VersionReq::parse(cap.as_str()).ok()); @@ -42,7 +42,7 @@ impl ParsedSource for VyperParsedSource { let path = file.to_path_buf(); - VyperParsedSource { path, version_req, imports } + Ok(VyperParsedSource { path, version_req, imports }) } fn version_req(&self) -> Option<&VersionReq> { diff --git a/src/compilers/vyper/settings.rs b/src/compilers/vyper/settings.rs index 15c2e199..2d039ac6 100644 --- a/src/compilers/vyper/settings.rs +++ b/src/compilers/vyper/settings.rs @@ -57,8 +57,8 @@ impl VyperSettings { } impl CompilerSettings for VyperSettings { - fn output_selection_mut(&mut self) -> &mut OutputSelection { - &mut self.output_selection + fn update_output_selection(&mut self, f: impl FnOnce(&mut OutputSelection)) { + f(&mut self.output_selection) } fn can_use_cached(&self, other: &Self) -> bool { diff --git a/src/filter.rs b/src/filter.rs index a4b451a7..551e0d05 100644 --- a/src/filter.rs +++ b/src/filter.rs @@ -151,13 +151,6 @@ impl SparseOutputFilter { graph: &GraphEdges, f: &dyn SparseOutputFileFilter, ) { - trace!("optimizing output selection with custom filter"); - let selection = settings - .output_selection_mut() - .as_mut() - .remove("*") - .unwrap_or_else(OutputSelection::default_file_output_selection); - let mut full_compilation = HashSet::new(); // populate sources which need complete compilation with data from filter @@ -169,18 +162,23 @@ impl SparseOutputFilter { } } - // set output selections - for file in sources.0.keys() { - let key = format!("{}", file.display()); - if full_compilation.contains(file) { - settings.output_selection_mut().as_mut().insert(key, selection.clone()); - } else { - settings - .output_selection_mut() - .as_mut() - .insert(key, OutputSelection::empty_file_output_select()); + settings.update_output_selection(|selection| { + trace!("optimizing output selection with custom filter"); + let default_selection = selection + .as_mut() + .remove("*") + .unwrap_or_else(OutputSelection::default_file_output_selection); + + // set output selections + for file in sources.0.keys() { + let key = format!("{}", file.display()); + if full_compilation.contains(file) { + selection.as_mut().insert(key, default_selection.clone()); + } else { + selection.as_mut().insert(key, OutputSelection::empty_file_output_select()); + } } - } + }) } /// prunes all clean sources and only selects an output for dirty sources @@ -192,31 +190,26 @@ impl SparseOutputFilter { sources.len() ); - let default = settings - .output_selection_mut() - .as_mut() - .remove("*") - .unwrap_or_else(OutputSelection::default_file_output_selection); - - let optimized = S::minimal_output_selection(); - - for (file, kind) in sources.0.iter() { - match kind { - SourceCompilationKind::Complete(_) => { - settings - .output_selection_mut() - .as_mut() - .insert(format!("{}", file.display()), default.clone()); - } - SourceCompilationKind::Optimized(_) => { - trace!("using pruned output selection for {}", file.display()); - settings - .output_selection_mut() - .as_mut() - .insert(format!("{}", file.display()), optimized.clone()); + settings.update_output_selection(|selection| { + let selection = selection.as_mut(); + let default = selection + .remove("*") + .unwrap_or_else(OutputSelection::default_file_output_selection); + + let optimized = S::minimal_output_selection(); + + for (file, kind) in sources.0.iter() { + match kind { + SourceCompilationKind::Complete(_) => { + selection.insert(format!("{}", file.display()), default.clone()); + } + SourceCompilationKind::Optimized(_) => { + trace!("using pruned output selection for {}", file.display()); + selection.insert(format!("{}", file.display()), optimized.clone()); + } } } - } + }); } } diff --git a/src/lib.rs b/src/lib.rs index bc881465..b0403d3f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,7 +25,7 @@ pub mod cache; pub mod flatten; pub mod hh; -use compilers::{solc::SolcRegistry, Compiler, CompilerSettings}; +use compilers::{solc::SolcCompiler, Compiler, CompilerSettings}; pub use filter::SparseOutputFileFilter; pub use hh::{HardhatArtifact, HardhatArtifacts}; @@ -79,7 +79,7 @@ pub mod project_util; /// Represents a project workspace and handles `solc` compiling of all contracts in that workspace. #[derive(Clone, Debug)] -pub struct Project { +pub struct Project { pub compiler: C, /// Compiler versions locked for specific languages. pub locked_versions: HashMap, @@ -154,7 +154,7 @@ impl Project { } } -impl Project { +impl Project { /// Returns standard-json-input to compile the target contract pub fn standard_json_input( &self, @@ -440,8 +440,9 @@ impl Project { { let mut temp_project = (*self).clone(); temp_project.no_artifacts = true; - *temp_project.settings.output_selection_mut() = - OutputSelection::common_output_selection([]); + temp_project.settings.update_output_selection(|selection| { + *selection = OutputSelection::common_output_selection([]); + }); let output = temp_project.compile()?; @@ -516,7 +517,7 @@ impl Project { } } -pub struct ProjectBuilder { +pub struct ProjectBuilder { /// The layout of the paths: Option>, /// Compiler versions locked for specific languages. diff --git a/src/project_util/mod.rs b/src/project_util/mod.rs index 78589c86..75d782bd 100644 --- a/src/project_util/mod.rs +++ b/src/project_util/mod.rs @@ -2,7 +2,7 @@ use crate::{ artifacts::{Error, Settings}, - compilers::{solc::SolcRegistry, Compiler}, + compilers::{solc::SolcCompiler, Compiler}, config::ProjectPathsConfigBuilder, error::{Result, SolcError}, filter::SparseOutputFileFilter, @@ -29,18 +29,18 @@ pub mod mock; /// A [`Project`] wrapper that lives in a new temporary directory /// /// Once `TempProject` is dropped, the temp dir is automatically removed, see [`TempDir::drop()`] -pub struct TempProject { +pub struct TempProject { /// temporary workspace root _root: TempDir, /// actual project workspace with the `root` tempdir as its root inner: Project, } -impl TempProject { +impl TempProject { /// Makes sure all resources are created pub fn create_new( root: TempDir, - inner: Project, + inner: Project, ) -> std::result::Result { let mut project = Self { _root: root, inner }; project.paths().create_all()?; @@ -89,7 +89,7 @@ impl TempProject { self } - pub fn project(&self) -> &Project { + pub fn project(&self) -> &Project { &self.inner } @@ -97,7 +97,7 @@ impl TempProject { self.project().flatten(target) } - pub fn project_mut(&mut self) -> &mut Project { + pub fn project_mut(&mut self) -> &mut Project { &mut self.inner } @@ -349,7 +349,7 @@ contract {} {{}} } } -impl TempProject { +impl TempProject { /// Creates a new temp project inside a tempdir with a prefixed directory #[cfg(feature = "svm-solc")] pub fn prefixed(prefix: &str, paths: ProjectPathsConfigBuilder) -> Result { @@ -374,7 +374,7 @@ impl TempProject { } } -impl fmt::Debug for TempProject { +impl fmt::Debug for TempProject { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("TempProject").field("paths", self.paths()).finish() } @@ -399,7 +399,7 @@ fn contract_file_name(name: impl AsRef) -> String { } #[cfg(feature = "svm-solc")] -impl TempProject { +impl TempProject { /// Creates an empty new hardhat style workspace in a new temporary dir pub fn hardhat() -> Result { let tmp_dir = tempdir("tmp_hh")?; @@ -474,8 +474,8 @@ impl TempProject { } } -impl AsRef> for TempProject { - fn as_ref(&self) -> &Project { +impl AsRef> for TempProject { + fn as_ref(&self) -> &Project { self.project() } } diff --git a/src/resolver/mod.rs b/src/resolver/mod.rs index 10074584..f7cf8d2e 100644 --- a/src/resolver/mod.rs +++ b/src/resolver/mod.rs @@ -335,10 +335,10 @@ impl Graph { let mut unresolved: VecDeque<_> = sources .into_par_iter() .map(|(path, source)| { - let data = D::parse(source.as_ref(), &path); - (path.clone(), Node { path, source, data }) + let data = D::parse(source.as_ref(), &path)?; + Ok((path.clone(), Node { path, source, data })) }) - .collect(); + .collect::>()?; // identifiers of all resolved files let mut index: HashMap<_, _> = @@ -853,7 +853,7 @@ impl Node { } } })?; - let data = D::parse(source.as_ref(), file); + let data = D::parse(source.as_ref(), file)?; Ok(Self { path: file.to_path_buf(), source, data }) } diff --git a/src/resolver/parse.rs b/src/resolver/parse.rs index d8f0bfcf..29880375 100644 --- a/src/resolver/parse.rs +++ b/src/resolver/parse.rs @@ -10,7 +10,7 @@ use std::{ }; /// Represents various information about a solidity file parsed via [solang_parser] -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct SolData { pub license: Option>, pub version: Option>, @@ -170,7 +170,7 @@ impl SolImport { } /// Minimal representation of a contract inside a solidity file -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct SolLibrary { pub functions: Vec, } diff --git a/tests/project.rs b/tests/project.rs index 4993607e..e2b9c604 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -10,7 +10,7 @@ use foundry_compilers::{ buildinfo::BuildInfo, cache::{CompilerCache, SOLIDITY_FILES_CACHE_FILENAME}, compilers::{ - solc::{SolcLanguage, SolcRegistry}, + solc::{SolcCompiler, SolcLanguage}, vyper::{Vyper, VyperLanguage, VyperSettings}, CompilerOutput, }, @@ -89,7 +89,7 @@ fn can_compile_hardhat_sample() { let paths = ProjectPathsConfig::builder() .sources(root.join("contracts")) .lib(root.join("node_modules")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); assert!(compiled.find_first("Greeter").is_some()); @@ -114,7 +114,7 @@ fn can_compile_hardhat_sample() { fn can_compile_dapp_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); assert!(compiled.find_first("Dapp").is_some()); @@ -141,7 +141,7 @@ fn can_compile_dapp_sample() { fn can_compile_yul_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/yul-sample"); let paths = ProjectPathsConfig::builder().sources(root); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); assert!(compiled.find_first("Dapp").is_some()); @@ -543,7 +543,7 @@ fn can_flatten_file_with_external_lib() { let paths = ProjectPathsConfig::builder() .sources(root.join("contracts")) .lib(root.join("node_modules")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let target = root.join("contracts").join("Greeter.sol"); @@ -558,7 +558,7 @@ fn can_flatten_file_with_external_lib() { fn can_flatten_file_in_dapp_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let target = root.join("src/Dapp.t.sol"); @@ -2722,7 +2722,7 @@ fn can_compile_model_checker_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/model-checker-sample"); let paths = ProjectPathsConfig::builder().sources(root); - let mut project = TempProject::::new(paths).unwrap(); + let mut project = TempProject::::new(paths).unwrap(); project.project_mut().settings.model_checker = Some(ModelCheckerSettings { engine: Some(CHC), timeout: Some(10000), @@ -3790,7 +3790,7 @@ fn test_deterministic_metadata() { let project = Project::builder() .locked_version(SolcLanguage::Solidity, Version::new(0, 8, 18)) .paths(paths) - .build(SolcRegistry::default()) + .build(SolcCompiler::default()) .unwrap(); let compiled = project.compile().unwrap(); @@ -3863,7 +3863,7 @@ fn yul_remappings_ignored() { name: "@openzeppelin".to_string(), path: root.to_string_lossy().to_string(), }); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); From 5768b545bc685d436b84bae6fc760636fc085c83 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 28 May 2024 23:18:46 +0200 Subject: [PATCH 08/24] test for MultiCompiler --- src/compilers/multi.rs | 20 +++++------ test-data/multi-sample/src/Counter.sol | 11 ++++++ test-data/multi-sample/src/Counter.vy | 13 +++++++ .../multi-sample/src/interfaces/ICounter.vy | 14 ++++++++ tests/project.rs | 34 +++++++++++++++++++ 5 files changed, 80 insertions(+), 12 deletions(-) create mode 100644 test-data/multi-sample/src/Counter.sol create mode 100644 test-data/multi-sample/src/Counter.vy create mode 100644 test-data/multi-sample/src/interfaces/ICounter.vy diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index 5d1695c0..cd10a6cf 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -23,8 +23,8 @@ use std::{ #[derive(Debug, Clone)] pub struct MultiCompiler { - solc_compiler: SolcCompiler, - vyper_compiler: Vyper, + pub solc: SolcCompiler, + pub vyper: Vyper, } #[derive(Debug, Clone, Hash, Eq, PartialEq)] @@ -61,8 +61,8 @@ impl fmt::Display for MultiCompilerLanguage { #[derive(Default, Clone, Debug, Serialize, Deserialize)] pub struct MultiCompilerSettings { - solc: SolcSettings, - vyper: VyperSettings, + pub solc: SolcSettings, + pub vyper: VyperSettings, } impl CompilerSettings for MultiCompilerSettings { @@ -177,22 +177,18 @@ impl Compiler for MultiCompiler { fn compile(&self, input: &Self::Input) -> Result> { match input { MultiCompilerInput::Solc(input) => { - self.solc_compiler.compile(input).map(|res| res.map_err(MultiCompilerError::Solc)) + self.solc.compile(input).map(|res| res.map_err(MultiCompilerError::Solc)) } MultiCompilerInput::Vyper(input) => { - self.vyper_compiler.compile(input).map(|res| res.map_err(MultiCompilerError::Vyper)) + self.vyper.compile(input).map(|res| res.map_err(MultiCompilerError::Vyper)) } } } fn available_versions(&self, language: &Self::Language) -> Vec { match language { - MultiCompilerLanguage::Solc(language) => { - self.solc_compiler.available_versions(language) - } - MultiCompilerLanguage::Vyper(language) => { - self.vyper_compiler.available_versions(language) - } + MultiCompilerLanguage::Solc(language) => self.solc.available_versions(language), + MultiCompilerLanguage::Vyper(language) => self.vyper.available_versions(language), } } } diff --git a/test-data/multi-sample/src/Counter.sol b/test-data/multi-sample/src/Counter.sol new file mode 100644 index 00000000..4f0c3503 --- /dev/null +++ b/test-data/multi-sample/src/Counter.sol @@ -0,0 +1,11 @@ +contract Counter { + uint256 public number; + + function setNumber(uint256 newNumber) public { + number = newNumber; + } + + function increment() public { + number++; + } +} diff --git a/test-data/multi-sample/src/Counter.vy b/test-data/multi-sample/src/Counter.vy new file mode 100644 index 00000000..08fc0a2a --- /dev/null +++ b/test-data/multi-sample/src/Counter.vy @@ -0,0 +1,13 @@ +import interfaces.ICounter as ICounter +implements: ICounter + +number: public(uint256) + +@external +def set_number(new_number: uint256): + self.number = new_number + +@external +def increment() -> uint256: + self.number += 1 + return self.number diff --git a/test-data/multi-sample/src/interfaces/ICounter.vy b/test-data/multi-sample/src/interfaces/ICounter.vy new file mode 100644 index 00000000..6923a6a1 --- /dev/null +++ b/test-data/multi-sample/src/interfaces/ICounter.vy @@ -0,0 +1,14 @@ +# pragma version ^0.3.10 + +@external +@view +def number() -> uint256: + return empty(uint256) + +@external +def set_number(new_number: uint256): + pass + +@external +def increment() -> uint256: + return empty(uint256) diff --git a/tests/project.rs b/tests/project.rs index e2b9c604..9a8fb7d0 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -10,6 +10,7 @@ use foundry_compilers::{ buildinfo::BuildInfo, cache::{CompilerCache, SOLIDITY_FILES_CACHE_FILENAME}, compilers::{ + multi::{MultiCompiler, MultiCompilerLanguage, MultiCompilerSettings}, solc::{SolcCompiler, SolcLanguage}, vyper::{Vyper, VyperLanguage, VyperSettings}, CompilerOutput, @@ -3893,3 +3894,36 @@ fn test_vyper_imports() { project.compile().unwrap().assert_success(); } + +#[test] +fn test_can_compile_multi() { + let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/multi-sample"); + + let paths = ProjectPathsConfig::builder() + .sources(root.join("src")) + .root(&root) + .build::() + .unwrap(); + + let settings = MultiCompilerSettings { + vyper: VyperSettings { + output_selection: OutputSelection::default_output_selection(), + ..Default::default() + }, + solc: Default::default(), + }; + + let compiler = MultiCompiler { solc: SolcCompiler::default(), vyper: VYPER.clone() }; + + let project = ProjectBuilder::::new(Default::default()) + .settings(settings) + .paths(paths) + .no_artifacts() + .build(compiler) + .unwrap(); + + let compiled = project.compile().unwrap(); + assert!(compiled.find(root.join("src/Counter.sol").to_string_lossy(), "Counter").is_some()); + assert!(compiled.find(root.join("src/Counter.vy").to_string_lossy(), "Counter").is_some()); + compiled.assert_success(); +} From 4ee664e51c2e718514ffa962f75946c825543f50 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 29 May 2024 14:02:05 +0200 Subject: [PATCH 09/24] impl Default for MultiCompiler --- src/compilers/multi.rs | 27 ++++++++++++++++++++++++--- src/compilers/solc.rs | 14 ++++++++++++-- 2 files changed, 36 insertions(+), 5 deletions(-) diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index cd10a6cf..ed6f81c4 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -24,7 +24,22 @@ use std::{ #[derive(Debug, Clone)] pub struct MultiCompiler { pub solc: SolcCompiler, - pub vyper: Vyper, + pub vyper: Option, +} + +impl Default for MultiCompiler { + fn default() -> Self { + let vyper = Vyper::new("vyper").ok(); + + Self { solc: SolcCompiler::default(), vyper } + } +} + +impl MultiCompiler { + pub fn new(vyper_path: Option) -> Result { + let vyper = vyper_path.map(|path| Vyper::new(path)).transpose()?; + Ok(Self { solc: SolcCompiler::default(), vyper }) + } } #[derive(Debug, Clone, Hash, Eq, PartialEq)] @@ -180,7 +195,11 @@ impl Compiler for MultiCompiler { self.solc.compile(input).map(|res| res.map_err(MultiCompilerError::Solc)) } MultiCompilerInput::Vyper(input) => { - self.vyper.compile(input).map(|res| res.map_err(MultiCompilerError::Vyper)) + if let Some(vyper) = &self.vyper { + vyper.compile(input).map(|res| res.map_err(MultiCompilerError::Vyper)) + } else { + Err(SolcError::msg("vyper compiler is not available")) + } } } } @@ -188,7 +207,9 @@ impl Compiler for MultiCompiler { fn available_versions(&self, language: &Self::Language) -> Vec { match language { MultiCompilerLanguage::Solc(language) => self.solc.available_versions(language), - MultiCompilerLanguage::Vyper(language) => self.vyper.available_versions(language), + MultiCompilerLanguage::Vyper(language) => { + self.vyper.as_ref().map(|v| v.available_versions(language)).unwrap_or_default() + } } } } diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index 7161bdf3..3a80a4c2 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -23,7 +23,11 @@ use std::{ #[derive(Debug, Clone, Default)] #[non_exhaustive] -pub struct SolcCompiler; +pub enum SolcCompiler { + #[default] + AutoDetect, + Specific(Solc), +} #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[non_exhaustive] @@ -53,7 +57,10 @@ impl Compiler for SolcCompiler { type Language = SolcLanguage; fn compile(&self, input: &Self::Input) -> Result> { - let mut solc = Solc::find_or_install(&input.version)?; + let mut solc = match self { + Self::Specific(solc) => solc.clone(), + Self::AutoDetect => Solc::find_or_install(&input.version)?, + }; solc.base_path = input.base_path.clone(); solc.allow_paths = input.allow_paths.clone(); solc.include_paths = input.include_paths.clone(); @@ -70,6 +77,9 @@ impl Compiler for SolcCompiler { } fn available_versions(&self, _language: &Self::Language) -> Vec { + if let Self::Specific(solc) = self { + return vec![CompilerVersion::Installed(solc.version.clone())]; + } let mut all_versions = Solc::installed_versions() .into_iter() .map(CompilerVersion::Installed) From 89a078ab99b799c464e02878946f7239b3dc2ebd Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 29 May 2024 16:09:07 +0200 Subject: [PATCH 10/24] update generics --- src/artifact_output/configurable.rs | 2 +- src/compile/output/mod.rs | 6 +- src/compile/project.rs | 8 +- src/compilers/multi.rs | 16 +- src/compilers/vyper/settings.rs | 2 +- src/config.rs | 66 +++--- src/filter.rs | 59 +++-- src/flatten.rs | 3 +- src/lib.rs | 13 +- src/project_util/mock.rs | 18 +- src/project_util/mod.rs | 329 +++++++++++++++------------- tests/mocked.rs | 3 +- tests/project.rs | 181 +++++++-------- 13 files changed, 391 insertions(+), 315 deletions(-) diff --git a/src/artifact_output/configurable.rs b/src/artifact_output/configurable.rs index ac6668f3..ced84ff1 100644 --- a/src/artifact_output/configurable.rs +++ b/src/artifact_output/configurable.rs @@ -178,7 +178,7 @@ impl ConfigurableArtifacts { } /// Returns the `Settings` this configuration corresponds to - pub fn settings(&self) -> Settings { + pub fn solc_settings(&self) -> Settings { SolcConfig::builder().additional_outputs(self.output_selection()).build().into() } diff --git a/src/compile/output/mod.rs b/src/compile/output/mod.rs index 3bb4fb74..ae0233c7 100644 --- a/src/compile/output/mod.rs +++ b/src/compile/output/mod.rs @@ -3,10 +3,10 @@ use crate::{ artifacts::{ contract::{CompactContractBytecode, CompactContractRef, Contract}, - Error, Severity, + Severity, }, buildinfo::RawBuildInfo, - compilers::{CompilationError, CompilerOutput}, + compilers::{multi::MultiCompilerError, CompilationError, CompilerOutput}, info::ContractInfoRef, sources::{VersionedSourceFile, VersionedSourceFiles}, Artifact, ArtifactId, ArtifactOutput, Artifacts, ConfigurableArtifacts, SolcIoError, @@ -29,7 +29,7 @@ pub mod sources; /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. #[derive(Debug, Clone, PartialEq, Default)] -pub struct ProjectCompileOutput { +pub struct ProjectCompileOutput { /// contains the aggregated `CompilerOutput` pub(crate) compiler_output: AggregatedCompilerOutput, /// all artifact files from `output` that were freshly compiled and written diff --git a/src/compile/project.rs b/src/compile/project.rs index 1272106d..06fd1ff2 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -636,7 +636,7 @@ fn compile_parallel, I: CompilerInput>( mod tests { use super::*; use crate::{ - artifacts::output_selection::ContractOutputSelection, compilers::solc::SolcCompiler, + artifacts::output_selection::ContractOutputSelection, compilers::multi::MultiCompiler, project_util::TempProject, ConfigurableArtifacts, MinimalCombinedArtifacts, }; @@ -670,7 +670,7 @@ mod tests { fn can_detect_cached_files() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); @@ -683,7 +683,7 @@ mod tests { #[test] fn can_recompile_with_optimized_output() { - let tmp = TempProject::dapptools().unwrap(); + let tmp = TempProject::::dapptools().unwrap(); tmp.add_source( "A", @@ -805,7 +805,7 @@ mod tests { fn extra_output_cached() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let mut project = TempProject::new(paths.clone()).unwrap(); + let mut project = TempProject::::new(paths.clone()).unwrap(); // Compile once without enabled extra output project.compile().unwrap(); diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index ed6f81c4..d377c19f 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -37,7 +37,7 @@ impl Default for MultiCompiler { impl MultiCompiler { pub fn new(vyper_path: Option) -> Result { - let vyper = vyper_path.map(|path| Vyper::new(path)).transpose()?; + let vyper = vyper_path.map(Vyper::new).transpose()?; Ok(Self { solc: SolcCompiler::default(), vyper }) } } @@ -48,6 +48,18 @@ pub enum MultiCompilerLanguage { Vyper(VyperLanguage), } +impl From for MultiCompilerLanguage { + fn from(language: SolcLanguage) -> Self { + Self::Solc(language) + } +} + +impl From for MultiCompilerLanguage { + fn from(language: VyperLanguage) -> Self { + Self::Vyper(language) + } +} + #[derive(Debug, Clone)] pub enum MultiCompilerParsedSource { Solc(SolData), @@ -74,7 +86,7 @@ impl fmt::Display for MultiCompilerLanguage { } } -#[derive(Default, Clone, Debug, Serialize, Deserialize)] +#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct MultiCompilerSettings { pub solc: SolcSettings, pub vyper: VyperSettings, diff --git a/src/compilers/vyper/settings.rs b/src/compilers/vyper/settings.rs index 2d039ac6..4bf8a7be 100644 --- a/src/compilers/vyper/settings.rs +++ b/src/compilers/vyper/settings.rs @@ -17,7 +17,7 @@ pub enum VyperOptimizationMode { None, } -#[derive(Debug, Serialize, Clone, Default, Deserialize)] +#[derive(Debug, Serialize, Clone, Default, Deserialize, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct VyperSettings { #[serde( diff --git a/src/config.rs b/src/config.rs index 2879153c..9b5f98a8 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,7 +1,7 @@ use crate::{ artifacts::{output_selection::ContractOutputSelection, Settings}, cache::SOLIDITY_FILES_CACHE_FILENAME, - compilers::{solc::SolcLanguage, Language}, + compilers::{multi::MultiCompilerLanguage, solc::SolcLanguage, Language}, error::{Result, SolcError, SolcIoError}, flatten::{collect_ordered_deps, combine_version_pragmas}, remappings::Remapping, @@ -19,7 +19,7 @@ use std::{ /// Where to find all files or where to write them #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ProjectPathsConfig { +pub struct ProjectPathsConfig { /// Project root pub root: PathBuf, /// Path to the cache, if any @@ -51,6 +51,38 @@ impl ProjectPathsConfig { ProjectPathsConfigBuilder::default() } + /// Attempts to autodetect the artifacts directory based on the given root path + /// + /// Dapptools layout takes precedence over hardhat style. + /// This will return: + /// - `/out` if it exists or `/artifacts` does not exist, + /// - `/artifacts` if it exists and `/out` does not exist. + pub fn find_artifacts_dir(root: impl AsRef) -> PathBuf { + utils::find_fave_or_alt_path(root, "out", "artifacts") + } + + /// Attempts to autodetect the source directory based on the given root path + /// + /// Dapptools layout takes precedence over hardhat style. + /// This will return: + /// - `/src` if it exists or `/contracts` does not exist, + /// - `/contracts` if it exists and `/src` does not exist. + pub fn find_source_dir(root: impl AsRef) -> PathBuf { + utils::find_fave_or_alt_path(root, "src", "contracts") + } + + /// Attempts to autodetect the lib directory based on the given root path + /// + /// Dapptools layout takes precedence over hardhat style. + /// This will return: + /// - `/lib` if it exists or `/node_modules` does not exist, + /// - `/node_modules` if it exists and `/lib` does not exist. + pub fn find_libs(root: impl AsRef) -> Vec { + vec![utils::find_fave_or_alt_path(root, "lib", "node_modules")] + } +} + +impl ProjectPathsConfig { /// Flattens all file imports into a single string pub fn flatten(&self, target: &Path) -> Result { trace!("flattening file"); @@ -180,36 +212,6 @@ impl ProjectPathsConfig { Ok(format!("{}\n", utils::RE_THREE_OR_MORE_NEWLINES.replace_all(&result, "\n\n").trim())) } - - /// Attempts to autodetect the artifacts directory based on the given root path - /// - /// Dapptools layout takes precedence over hardhat style. - /// This will return: - /// - `/out` if it exists or `/artifacts` does not exist, - /// - `/artifacts` if it exists and `/out` does not exist. - pub fn find_artifacts_dir(root: impl AsRef) -> PathBuf { - utils::find_fave_or_alt_path(root, "out", "artifacts") - } - - /// Attempts to autodetect the source directory based on the given root path - /// - /// Dapptools layout takes precedence over hardhat style. - /// This will return: - /// - `/src` if it exists or `/contracts` does not exist, - /// - `/contracts` if it exists and `/src` does not exist. - pub fn find_source_dir(root: impl AsRef) -> PathBuf { - utils::find_fave_or_alt_path(root, "src", "contracts") - } - - /// Attempts to autodetect the lib directory based on the given root path - /// - /// Dapptools layout takes precedence over hardhat style. - /// This will return: - /// - `/lib` if it exists or `/node_modules` does not exist, - /// - `/node_modules` if it exists and `/lib` does not exist. - pub fn find_libs(root: impl AsRef) -> Vec { - vec![utils::find_fave_or_alt_path(root, "lib", "node_modules")] - } } impl ProjectPathsConfig { diff --git a/src/filter.rs b/src/filter.rs index 551e0d05..ea5535f2 100644 --- a/src/filter.rs +++ b/src/filter.rs @@ -2,7 +2,7 @@ use crate::{ artifacts::output_selection::OutputSelection, - compilers::CompilerSettings, + compilers::{multi::MultiCompilerParsedSource, CompilerSettings}, resolver::{parse::SolData, GraphEdges}, Source, Sources, }; @@ -48,33 +48,47 @@ impl FileFilter for TestFileFilter { } } -/// Wrapper around a [FileFilter] that includes files matched by the inner filter and their link -/// references obtained from [GraphEdges]. -pub struct SolcSparseFileFilter { - file_filter: T, +trait MaybeSolData { + fn sol_data(&self) -> Option<&SolData>; } -impl SolcSparseFileFilter { - pub fn new(file_filter: T) -> Self { - Self { file_filter } +impl MaybeSolData for SolData { + fn sol_data(&self) -> Option<&SolData> { + Some(self) } } -impl FileFilter for SolcSparseFileFilter { - fn is_match(&self, file: &Path) -> bool { - self.file_filter.is_match(file) +impl MaybeSolData for MultiCompilerParsedSource { + fn sol_data(&self) -> Option<&SolData> { + match self { + MultiCompilerParsedSource::Solc(data) => Some(data), + _ => None, + } } } -impl SparseOutputFileFilter for SolcSparseFileFilter { +fn sparse_solc(file: &Path, graph: &GraphEdges) -> Vec { + let mut sources_to_compile = vec![file.to_path_buf()]; + for import in graph.imports(file) { + if let Some(parsed) = graph.get_parsed_source(import).and_then(MaybeSolData::sol_data) { + if !parsed.libraries.is_empty() { + sources_to_compile.push(import.to_path_buf()); + } + } + } + + sources_to_compile +} + +impl SparseOutputFileFilter for T { fn sparse_sources(&self, file: &Path, graph: &GraphEdges) -> Vec { - if !self.file_filter.is_match(file) { + if !self.is_match(file) { return vec![]; } let mut sources_to_compile = vec![file.to_path_buf()]; for import in graph.imports(file) { - if let Some(parsed) = graph.get_parsed_source(import) { + if let Some(parsed) = graph.get_parsed_source(import).and_then(|d| d.sol_data()) { if !parsed.libraries.is_empty() { sources_to_compile.push(import.to_path_buf()); } @@ -85,6 +99,23 @@ impl SparseOutputFileFilter for SolcSparseFileFilter } } +impl SparseOutputFileFilter for T { + fn sparse_sources( + &self, + file: &Path, + graph: &GraphEdges, + ) -> Vec { + if !self.is_match(file) { + return vec![]; + } + + match file.extension().and_then(|e| e.to_str()) { + Some("yul" | "sol") => sparse_solc(file, graph), + _ => vec![file.to_path_buf()], + } + } +} + /// This trait behaves in a similar way to [FileFilter] but used to configure [OutputSelection] /// configuration. In certain cases, we might want to include some of the file dependencies into the /// compiler output even if we might not be directly interested in them. diff --git a/src/flatten.rs b/src/flatten.rs index 1181819b..c9f2d4e9 100644 --- a/src/flatten.rs +++ b/src/flatten.rs @@ -5,6 +5,7 @@ use crate::{ ContractDefinitionPart, Error, ExternalInlineAssemblyReference, Identifier, IdentifierPath, MemberAccess, Source, SourceUnit, SourceUnitPart, Sources, }, + compilers::solc::SolcCompiler, error::SolcError, resolver::parse::SolData, utils, ConfigurableArtifacts, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, Result, @@ -175,7 +176,7 @@ impl Flattener { /// Flattener caller is expected to resolve all imports of target file, compile them and pass /// into this function. pub fn new( - project: &Project, + project: &Project, output: &ProjectCompileOutput, target: &Path, ) -> Result { diff --git a/src/lib.rs b/src/lib.rs index b0403d3f..17c0ef8d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,7 +25,7 @@ pub mod cache; pub mod flatten; pub mod hh; -use compilers::{solc::SolcCompiler, Compiler, CompilerSettings}; +use compilers::{multi::MultiCompiler, solc::SolcCompiler, Compiler, CompilerSettings}; pub use filter::SparseOutputFileFilter; pub use hh::{HardhatArtifact, HardhatArtifacts}; @@ -48,8 +48,7 @@ pub mod remappings; mod filter; pub use filter::{ - FileFilter, FilteredSources, SolcSparseFileFilter, SourceCompilationKind, SparseOutputFilter, - TestFileFilter, + FileFilter, FilteredSources, SourceCompilationKind, SparseOutputFilter, TestFileFilter, }; use solang_parser::pt::SourceUnitPart; @@ -79,7 +78,7 @@ pub mod project_util; /// Represents a project workspace and handles `solc` compiling of all contracts in that workspace. #[derive(Clone, Debug)] -pub struct Project { +pub struct Project { pub compiler: C, /// Compiler versions locked for specific languages. pub locked_versions: HashMap, @@ -517,7 +516,7 @@ impl Project { } } -pub struct ProjectBuilder { +pub struct ProjectBuilder { /// The layout of the paths: Option>, /// Compiler versions locked for specific languages. @@ -678,8 +677,8 @@ impl ProjectBuilder { } #[must_use] - pub fn locked_version(mut self, lang: C::Language, version: Version) -> Self { - self.locked_versions.insert(lang, version); + pub fn locked_version(mut self, lang: impl Into, version: Version) -> Self { + self.locked_versions.insert(lang.into(), version); self } diff --git a/src/project_util/mock.rs b/src/project_util/mock.rs index 6c78b791..5c810b4e 100644 --- a/src/project_util/mock.rs +++ b/src/project_util/mock.rs @@ -1,10 +1,10 @@ //! Helpers to generate mock projects use crate::{ - compilers::ParsedSource, + compilers::{multi::MultiCompilerParsedSource, Language, ParsedSource}, error::Result, remappings::Remapping, - resolver::{parse::SolData, GraphEdges}, + resolver::GraphEdges, Graph, ProjectPathsConfig, SolcError, }; use rand::{ @@ -68,7 +68,7 @@ impl MockProjectGenerator { Some(libs) } - let graph = Graph::::resolve(paths)?; + let graph = Graph::::resolve(paths)?; let mut gen = MockProjectGenerator::default(); let (_, edges) = graph.into_sources(); @@ -111,7 +111,11 @@ impl MockProjectGenerator { } /// Generate all solidity files and write under the paths config - pub fn write_to(&self, paths: &ProjectPathsConfig, version: impl AsRef) -> Result<()> { + pub fn write_to( + &self, + paths: &ProjectPathsConfig, + version: impl AsRef, + ) -> Result<()> { let version = version.as_ref(); for file in self.inner.files.iter() { let imports = self.get_imports(file.id); @@ -432,7 +436,11 @@ impl MockFile { self.lib_id.is_some() } - pub fn target_path(&self, gen: &MockProjectGenerator, paths: &ProjectPathsConfig) -> PathBuf { + pub fn target_path( + &self, + gen: &MockProjectGenerator, + paths: &ProjectPathsConfig, + ) -> PathBuf { let mut target = if let Some(lib) = self.lib_id { paths.root.join("lib").join(&gen.inner.libraries[lib].name).join("src").join(&self.name) } else { diff --git a/src/project_util/mod.rs b/src/project_util/mod.rs index 75d782bd..21c71408 100644 --- a/src/project_util/mod.rs +++ b/src/project_util/mod.rs @@ -1,19 +1,22 @@ //! Utilities for mocking project workspaces. use crate::{ - artifacts::{Error, Settings}, - compilers::{solc::SolcCompiler, Compiler}, + artifacts::Settings, + compilers::{ + multi::{MultiCompiler, MultiCompilerSettings}, + solc::SolcCompiler, + Compiler, + }, config::ProjectPathsConfigBuilder, error::{Result, SolcError}, filter::SparseOutputFileFilter, hh::HardhatArtifacts, project_util::mock::{MockProjectGenerator, MockProjectSettings}, remappings::Remapping, - resolver::parse::SolData, utils::{self, tempdir}, Artifact, ArtifactOutput, Artifacts, CompilerCache, ConfigurableArtifacts, - ConfigurableContractArtifact, PathStyle, Project, ProjectCompileOutput, ProjectPathsConfig, - SolcIoError, + ConfigurableContractArtifact, PathStyle, Project, ProjectBuilder, ProjectCompileOutput, + ProjectPathsConfig, SolcIoError, }; use fs_extra::{dir, file}; use std::{ @@ -29,31 +32,120 @@ pub mod mock; /// A [`Project`] wrapper that lives in a new temporary directory /// /// Once `TempProject` is dropped, the temp dir is automatically removed, see [`TempDir::drop()`] -pub struct TempProject { +pub struct TempProject { /// temporary workspace root _root: TempDir, /// actual project workspace with the `root` tempdir as its root inner: Project, } -impl TempProject { +impl TempProject { + /// Creates a new temp project using the provided paths and artifacts handler. + /// sets the project root to a temp dir + #[cfg(feature = "svm-solc")] + pub fn with_artifacts(paths: ProjectPathsConfigBuilder, artifacts: T) -> Result { + Self::prefixed_with_artifacts("temp-project", paths, artifacts) + } + + /// Overwrites the settings to pass to `solc` + pub fn with_solc_settings(mut self, settings: impl Into) -> Self { + self.inner.settings.solc = settings.into(); + self + } + + /// Explicitly sets the solc version for the project + #[cfg(feature = "svm-solc")] + pub fn set_solc(&mut self, solc: impl AsRef) -> &mut Self { + use crate::compilers::{multi::MultiCompilerLanguage, solc::SolcLanguage}; + use semver::Version; + + let version = Version::parse(solc.as_ref()).unwrap(); + self.inner + .locked_versions + .insert(MultiCompilerLanguage::Solc(SolcLanguage::Solidity), version.clone()); + self.inner + .locked_versions + .insert(MultiCompilerLanguage::Solc(SolcLanguage::Yul), version.clone()); + self + } +} + +impl TempProject { + /// Creates a new temp project for the given `PathStyle` + #[cfg(feature = "svm-solc")] + pub fn with_style(prefix: &str, style: PathStyle) -> Result { + let tmp_dir = tempdir(prefix)?; + let paths = style.paths(tmp_dir.path())?; + let inner = + Project::builder().artifacts(T::default()).paths(paths).build(Default::default())?; + Ok(Self::create_new(tmp_dir, inner)?) + } +} + +impl fmt::Debug for TempProject { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("TempProject").field("paths", &self.inner.paths).finish() + } +} + +pub(crate) fn create_contract_file(path: PathBuf, content: impl AsRef) -> Result { + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .map_err(|err| SolcIoError::new(err, parent.to_path_buf()))?; + } + std::fs::write(&path, content.as_ref()).map_err(|err| SolcIoError::new(err, path.clone()))?; + Ok(path) +} + +fn contract_file_name(name: impl AsRef) -> String { + let name = name.as_ref().trim(); + if name.ends_with(".sol") { + name.to_string() + } else { + format!("{name}.sol") + } +} + +#[cfg(feature = "svm-solc")] +impl TempProject { + /// Creates an empty new hardhat style workspace in a new temporary dir + pub fn hardhat() -> Result { + let tmp_dir = tempdir("tmp_hh")?; + + let paths = ProjectPathsConfig::hardhat(tmp_dir.path())?; + + let inner = Project::builder() + .artifacts(HardhatArtifacts::default()) + .paths(paths) + .build(Default::default())?; + Ok(Self::create_new(tmp_dir, inner)?) + } +} + +impl TempProject { /// Makes sure all resources are created pub fn create_new( root: TempDir, - inner: Project, + inner: Project, ) -> std::result::Result { let mut project = Self { _root: root, inner }; - project.paths().create_all()?; + project.inner.paths.create_all()?; // ignore license warnings project.inner.ignored_error_codes.push(1878); Ok(project) } - /// Creates a new temp project using the provided paths and artifacts handler. - /// sets the project root to a temp dir + /// Creates a new temp project using the provided paths and setting the project root to a temp + /// dir #[cfg(feature = "svm-solc")] - pub fn with_artifacts(paths: ProjectPathsConfigBuilder, artifacts: T) -> Result { - Self::prefixed_with_artifacts("temp-project", paths, artifacts) + pub fn new(paths: ProjectPathsConfigBuilder) -> Result { + Self::prefixed("temp-project", paths) + } + + /// Creates a new temp project inside a tempdir with a prefixed directory + #[cfg(feature = "svm-solc")] + pub fn prefixed(prefix: &str, paths: ProjectPathsConfigBuilder) -> Result { + Self::prefixed_with_artifacts(prefix, paths, T::default()) } /// Creates a new temp project inside a tempdir with a prefixed directory and the given @@ -66,71 +158,53 @@ impl TempProject { ) -> Result { let tmp_dir = tempdir(prefix)?; let paths = paths.build_with_root(tmp_dir.path()); - let inner = - Project::builder().artifacts(artifacts).paths(paths).build(Default::default())?; + let inner = ProjectBuilder::::new(Default::default()) + .artifacts(artifacts) + .paths(paths) + .build(Default::default())?; Ok(Self::create_new(tmp_dir, inner)?) } - /// Overwrites the settings to pass to `solc` - pub fn with_settings(mut self, settings: impl Into) -> Self { - self.inner.settings = settings.into(); - self + /// Creates an empty new dapptools style workspace in a new temporary dir + pub fn dapptools() -> Result { + let tmp_dir = tempdir("tmp_dapp")?; + let paths = ProjectPathsConfig::dapptools(tmp_dir.path())?; + + let inner = ProjectBuilder::::new(Default::default()) + .paths(paths) + .build(Default::default())?; + Ok(Self::create_new(tmp_dir, inner)?) } - /// Explicitly sets the solc version for the project - #[cfg(feature = "svm-solc")] - pub fn set_solc(&mut self, solc: impl AsRef) -> &mut Self { - use crate::compilers::solc::SolcLanguage; - use semver::Version; + /// Creates an initialized dapptools style workspace in a new temporary dir + pub fn dapptools_init() -> Result { + let mut project = Self::dapptools()?; + let orig_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); + copy_dir(orig_root, project.root())?; + project.project_mut().paths.remappings = Remapping::find_many(project.root()); + project.project_mut().paths.remappings.iter_mut().for_each(|r| r.slash_path()); - let version = Version::parse(solc.as_ref()).unwrap(); - self.inner.locked_versions.insert(SolcLanguage::Solidity, version.clone()); - self.inner.locked_versions.insert(SolcLanguage::Yul, version.clone()); - self + Ok(project) } - pub fn project(&self) -> &Project { + pub fn project(&self) -> &Project { &self.inner } - pub fn flatten(&self, target: &Path) -> Result { - self.project().flatten(target) - } - - pub fn project_mut(&mut self) -> &mut Project { + pub fn project_mut(&mut self) -> &mut Project { &mut self.inner } /// The configured paths of the project - pub fn paths(&self) -> &ProjectPathsConfig { + pub fn paths(&self) -> &ProjectPathsConfig { &self.project().paths } /// The configured paths of the project - pub fn paths_mut(&mut self) -> &mut ProjectPathsConfig { + pub fn paths_mut(&mut self) -> &mut ProjectPathsConfig { &mut self.project_mut().paths } - /// Returns the path to the artifacts directory - pub fn artifacts_path(&self) -> &PathBuf { - &self.paths().artifacts - } - - /// Returns the path to the sources directory - pub fn sources_path(&self) -> &PathBuf { - &self.paths().sources - } - - /// Returns the path to the cache file - pub fn cache_path(&self) -> &PathBuf { - &self.paths().cache - } - - /// The root path of the temporary workspace - pub fn root(&self) -> &Path { - self.project().paths.root.as_path() - } - /// Copies a single file into the projects source pub fn copy_source(&self, source: impl AsRef) -> Result<()> { copy_file(source, &self.paths().sources) @@ -254,8 +328,39 @@ contract {} {{}} create_contract_file(source, content) } + /// Returns the path to the artifacts directory + pub fn artifacts_path(&self) -> &PathBuf { + &self.paths().artifacts + } + + /// Returns the path to the sources directory + pub fn sources_path(&self) -> &PathBuf { + &self.paths().sources + } + + /// Returns the path to the cache file + pub fn cache_path(&self) -> &PathBuf { + &self.paths().cache + } + + /// The root path of the temporary workspace + pub fn root(&self) -> &Path { + self.project().paths.root.as_path() + } + + pub fn compile(&self) -> Result> { + self.project().compile() + } + + pub fn compile_sparse( + &self, + filter: Box>, + ) -> Result> { + self.project().compile_sparse(filter) + } + /// Returns a snapshot of all cached artifacts - pub fn artifacts_snapshot(&self) -> Result> { + pub fn artifacts_snapshot(&self) -> Result> { let cache = self.project().read_cache_file()?; let artifacts = cache.read_artifacts::()?; Ok(ArtifactsSnapshot { cache, artifacts }) @@ -336,95 +441,10 @@ contract {} {{}} pub fn list_source_files(&self) -> Vec { utils::sol_source_files(self.project().sources_path()) } - - pub fn compile(&self) -> Result> { - self.project().compile() - } - - pub fn compile_sparse( - &self, - filter: Box>, - ) -> Result> { - self.project().compile_sparse(filter) - } -} - -impl TempProject { - /// Creates a new temp project inside a tempdir with a prefixed directory - #[cfg(feature = "svm-solc")] - pub fn prefixed(prefix: &str, paths: ProjectPathsConfigBuilder) -> Result { - Self::prefixed_with_artifacts(prefix, paths, T::default()) - } - - /// Creates a new temp project for the given `PathStyle` - #[cfg(feature = "svm-solc")] - pub fn with_style(prefix: &str, style: PathStyle) -> Result { - let tmp_dir = tempdir(prefix)?; - let paths = style.paths(tmp_dir.path())?; - let inner = - Project::builder().artifacts(T::default()).paths(paths).build(Default::default())?; - Ok(Self::create_new(tmp_dir, inner)?) - } - - /// Creates a new temp project using the provided paths and setting the project root to a temp - /// dir - #[cfg(feature = "svm-solc")] - pub fn new(paths: ProjectPathsConfigBuilder) -> Result { - Self::prefixed("temp-project", paths) - } -} - -impl fmt::Debug for TempProject { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("TempProject").field("paths", self.paths()).finish() - } -} - -pub(crate) fn create_contract_file(path: PathBuf, content: impl AsRef) -> Result { - if let Some(parent) = path.parent() { - std::fs::create_dir_all(parent) - .map_err(|err| SolcIoError::new(err, parent.to_path_buf()))?; - } - std::fs::write(&path, content.as_ref()).map_err(|err| SolcIoError::new(err, path.clone()))?; - Ok(path) -} - -fn contract_file_name(name: impl AsRef) -> String { - let name = name.as_ref().trim(); - if name.ends_with(".sol") { - name.to_string() - } else { - format!("{name}.sol") - } -} - -#[cfg(feature = "svm-solc")] -impl TempProject { - /// Creates an empty new hardhat style workspace in a new temporary dir - pub fn hardhat() -> Result { - let tmp_dir = tempdir("tmp_hh")?; - - let paths = ProjectPathsConfig::hardhat(tmp_dir.path())?; - - let inner = Project::builder() - .artifacts(HardhatArtifacts::default()) - .paths(paths) - .build(Default::default())?; - Ok(Self::create_new(tmp_dir, inner)?) - } } #[cfg(feature = "svm-solc")] impl TempProject { - /// Creates an empty new dapptools style workspace in a new temporary dir - pub fn dapptools() -> Result { - let tmp_dir = tempdir("tmp_dapp")?; - let paths = ProjectPathsConfig::dapptools(tmp_dir.path())?; - - let inner = Project::builder().paths(paths).build(Default::default())?; - Ok(Self::create_new(tmp_dir, inner)?) - } - pub fn dapptools_with_ignore_paths(paths_to_ignore: Vec) -> Result { let tmp_dir = tempdir("tmp_dapp")?; let paths = ProjectPathsConfig::dapptools(tmp_dir.path())?; @@ -436,17 +456,6 @@ impl TempProject { Ok(Self::create_new(tmp_dir, inner)?) } - /// Creates an initialized dapptools style workspace in a new temporary dir - pub fn dapptools_init() -> Result { - let mut project = Self::dapptools()?; - let orig_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); - copy_dir(orig_root, project.root())?; - project.project_mut().paths.remappings = Remapping::find_many(project.root()); - project.project_mut().paths.remappings.iter_mut().for_each(|r| r.slash_path()); - - Ok(project) - } - /// Clones the given repo into a temp dir, initializes it recursively and configures it. pub fn checkout(repo: impl AsRef) -> Result { let tmp_dir = tempdir("tmp_checkout")?; @@ -474,12 +483,20 @@ impl TempProject { } } -impl AsRef> for TempProject { - fn as_ref(&self) -> &Project { +impl AsRef> + for TempProject +{ + fn as_ref(&self) -> &Project { self.project() } } +impl TempProject { + pub fn flatten(&self, target: &Path) -> Result { + self.project().flatten(target) + } +} + /// The cache file and all the artifacts it references #[derive(Debug, Clone)] pub struct ArtifactsSnapshot { @@ -487,7 +504,7 @@ pub struct ArtifactsSnapshot { pub artifacts: Artifacts, } -impl ArtifactsSnapshot { +impl ArtifactsSnapshot { /// Ensures that all artifacts have abi, bytecode, deployedbytecode pub fn assert_artifacts_essentials_present(&self) { for artifact in self.artifacts.artifact_files() { diff --git a/tests/mocked.rs b/tests/mocked.rs index 87481779..d72ec1fe 100644 --- a/tests/mocked.rs +++ b/tests/mocked.rs @@ -1,6 +1,7 @@ //! mocked project tests use foundry_compilers::{ + compilers::multi::MultiCompiler, error::Result, project_util::{ mock::{MockProjectGenerator, MockProjectSettings, MockProjectSkeleton}, @@ -109,7 +110,7 @@ fn can_compile_mocked_modified_all() { // a test useful to manually debug a serialized skeleton #[test] fn can_compile_skeleton() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); let s = r#"{"files":[{"id":0,"name":"SourceFile0","imports":[{"External":[0,1]},{"External":[3,4]}],"lib_id":null,"emit_artifacts":true},{"id":1,"name":"SourceFile1","imports":[],"lib_id":0,"emit_artifacts":true},{"id":2,"name":"SourceFile2","imports":[],"lib_id":1,"emit_artifacts":true},{"id":3,"name":"SourceFile3","imports":[],"lib_id":2,"emit_artifacts":true},{"id":4,"name":"SourceFile4","imports":[],"lib_id":3,"emit_artifacts":true}],"libraries":[{"name":"Lib0","id":0,"offset":1,"num_files":1},{"name":"Lib1","id":1,"offset":2,"num_files":1},{"name":"Lib2","id":2,"offset":3,"num_files":1},{"name":"Lib3","id":3,"offset":4,"num_files":1}]}"#; let gen: MockProjectGenerator = serde_json::from_str::(s).unwrap().into(); let remappings = gen.remappings_at(project.root()); diff --git a/tests/project.rs b/tests/project.rs index 9a8fb7d0..5df1da11 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -10,7 +10,10 @@ use foundry_compilers::{ buildinfo::BuildInfo, cache::{CompilerCache, SOLIDITY_FILES_CACHE_FILENAME}, compilers::{ - multi::{MultiCompiler, MultiCompilerLanguage, MultiCompilerSettings}, + multi::{ + MultiCompiler, MultiCompilerError, MultiCompilerLanguage, MultiCompilerParsedSource, + MultiCompilerSettings, + }, solc::{SolcCompiler, SolcLanguage}, vyper::{Vyper, VyperLanguage, VyperSettings}, CompilerOutput, @@ -20,11 +23,9 @@ use foundry_compilers::{ info::ContractInfo, project_util::*, remappings::Remapping, - resolver::parse::SolData, utils::{self, RuntimeOrHandle}, Artifact, ConfigurableArtifacts, ExtraOutputValues, Graph, Project, ProjectBuilder, - ProjectCompileOutput, ProjectPathsConfig, Solc, SolcInput, SolcSparseFileFilter, - TestFileFilter, + ProjectCompileOutput, ProjectPathsConfig, Solc, SolcInput, TestFileFilter, }; use once_cell::sync::Lazy; use pretty_assertions::assert_eq; @@ -184,8 +185,8 @@ fn can_compile_configured() { ..Default::default() }; - let settings = handler.settings(); - let project = TempProject::with_artifacts(paths, handler).unwrap().with_settings(settings); + let settings = handler.solc_settings(); + let project = TempProject::with_artifacts(paths, handler).unwrap().with_solc_settings(settings); let compiled = project.compile().unwrap(); let artifact = compiled.find_first("Dapp").unwrap(); assert!(artifact.metadata.is_some()); @@ -197,7 +198,7 @@ fn can_compile_configured() { #[test] fn can_compile_dapp_detect_changes_in_libs() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); let remapping = project.paths().libraries[0].join("remapping"); project @@ -228,7 +229,7 @@ fn can_compile_dapp_detect_changes_in_libs() { ) .unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); assert_eq!(graph.files().clone(), HashMap::from([(src, 0), (lib, 1),])); @@ -258,7 +259,7 @@ fn can_compile_dapp_detect_changes_in_libs() { ) .unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); let compiled = project.compile().unwrap(); @@ -270,7 +271,7 @@ fn can_compile_dapp_detect_changes_in_libs() { #[test] fn can_compile_dapp_detect_changes_in_sources() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let src = project .add_source( @@ -300,7 +301,7 @@ fn can_compile_dapp_detect_changes_in_sources() { ) .unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); assert_eq!(graph.files().clone(), HashMap::from([(base, 0), (src, 1),])); assert_eq!(graph.imported_nodes(1).to_vec(), vec![0]); @@ -337,7 +338,7 @@ fn can_compile_dapp_detect_changes_in_sources() { ", ) .unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); let compiled = project.compile().unwrap(); @@ -355,7 +356,7 @@ fn can_compile_dapp_detect_changes_in_sources() { #[test] fn can_emit_build_info() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); project.project_mut().build_info = true; project .add_source( @@ -395,7 +396,7 @@ contract B { } #[test] fn can_clean_build_info() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); project.project_mut().build_info = true; project.project_mut().paths.build_infos = project.project_mut().paths.root.join("build-info"); @@ -528,7 +529,11 @@ fn copy_dir_all(src: impl AsRef, dst: impl AsRef) -> io::Result<()> // Runs both `flatten` implementations, asserts that their outputs match and runs additional checks // against the output. -fn test_flatteners(project: &TempProject, target: &Path, additional_checks: fn(&str)) { +fn test_flatteners( + project: &TempProject, + target: &Path, + additional_checks: fn(&str), +) { let result = project.flatten(target).unwrap(); let solc_result = Flattener::new(project.project(), &project.compile().unwrap(), target).unwrap().flatten(); @@ -559,7 +564,7 @@ fn can_flatten_file_with_external_lib() { fn can_flatten_file_in_dapp_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let target = root.join("src/Dapp.t.sol"); @@ -573,7 +578,7 @@ fn can_flatten_file_in_dapp_sample() { #[test] fn can_flatten_unique() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -632,7 +637,7 @@ contract A { } #[test] fn can_flatten_experimental_pragma() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -695,7 +700,7 @@ contract A { } #[test] fn can_flatten_on_solang_failure() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -749,7 +754,7 @@ contract Contract { #[test] fn can_flatten_multiline() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -811,7 +816,7 @@ contract A { } #[test] fn can_flatten_remove_extra_spacing() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -870,7 +875,7 @@ contract A { } #[test] fn can_flatten_with_alias() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -1029,7 +1034,7 @@ contract Contract is ParentContract, #[test] fn can_flatten_with_version_pragma_after_imports() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -1103,7 +1108,7 @@ contract A { } #[test] fn can_flatten_with_duplicates() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1159,7 +1164,7 @@ contract Bar_1 is Foo {} #[test] fn can_flatten_complex_aliases_setup_with_duplicates() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1302,7 +1307,7 @@ contract D is A_0 { // https://github.com/foundry-rs/compilers/issues/34 #[test] fn can_flatten_34_repro() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( "FlieA.sol", @@ -1369,7 +1374,7 @@ contract A { #[test] fn can_flatten_experimental_in_other_file() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1416,7 +1421,7 @@ contract B is A {} #[test] fn can_detect_type_error() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1439,7 +1444,7 @@ fn can_detect_type_error() { #[test] fn can_flatten_aliases_with_pragma_and_license_after_source() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1477,7 +1482,7 @@ contract B is A {} #[test] fn can_flatten_rename_inheritdocs() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1544,7 +1549,7 @@ contract B is A_1 { #[test] fn can_flatten_rename_inheritdocs_alias() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1597,7 +1602,7 @@ contract B is A { #[test] fn can_flatten_rename_user_defined_functions() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1693,7 +1698,7 @@ contract Foo { #[test] fn can_flatten_rename_global_functions() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1762,7 +1767,7 @@ contract Foo { #[test] fn can_flatten_rename_in_assembly() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1829,7 +1834,7 @@ contract Foo { #[test] fn can_flatten_combine_pragmas() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1869,7 +1874,7 @@ contract B {} #[test] fn can_compile_single_files() { - let tmp = TempProject::dapptools().unwrap(); + let tmp = TempProject::::dapptools().unwrap(); let f = tmp .add_contract( @@ -1905,7 +1910,7 @@ fn can_compile_single_files() { #[test] fn consistent_bytecode() { - let tmp = TempProject::dapptools().unwrap(); + let tmp = TempProject::::dapptools().unwrap(); tmp.add_source( "LinkTest", @@ -1940,7 +1945,7 @@ contract LinkTest { #[test] fn can_apply_libraries() { - let mut tmp = TempProject::dapptools().unwrap(); + let mut tmp = TempProject::::dapptools().unwrap(); tmp.add_source( "LinkTest", @@ -1979,12 +1984,12 @@ library MyLib { assert!(bytecode.is_unlinked()); // provide the library settings to let solc link - tmp.project_mut().settings.libraries = BTreeMap::from([( + tmp.project_mut().settings.solc.libraries = BTreeMap::from([( lib, BTreeMap::from([("MyLib".to_string(), format!("{:?}", Address::ZERO))]), )]) .into(); - tmp.project_mut().settings.libraries.slash_paths(); + tmp.project_mut().settings.solc.libraries.slash_paths(); let compiled = tmp.compile().unwrap(); compiled.assert_success(); @@ -1996,7 +2001,7 @@ library MyLib { let libs = Libraries::parse(&[format!("./src/MyLib.sol:MyLib:{:?}", Address::ZERO)]).unwrap(); // provide the library settings to let solc link - tmp.project_mut().settings.libraries = libs.with_applied_remappings(tmp.paths()); + tmp.project_mut().settings.solc.libraries = libs.with_applied_remappings(tmp.paths()); let compiled = tmp.compile().unwrap(); compiled.assert_success(); @@ -2012,7 +2017,7 @@ fn can_ignore_warning_from_paths() { let setup_and_compile = |ignore_paths: Option>| { let tmp = match ignore_paths { Some(paths) => TempProject::dapptools_with_ignore_paths(paths).unwrap(), - None => TempProject::dapptools().unwrap(), + None => TempProject::::dapptools().unwrap(), }; tmp.add_source( @@ -2057,7 +2062,7 @@ fn can_ignore_warning_from_paths() { } #[test] fn can_apply_libraries_with_remappings() { - let mut tmp = TempProject::dapptools().unwrap(); + let mut tmp = TempProject::::dapptools().unwrap(); let remapping = tmp.paths().libraries[0].join("remapping"); tmp.paths_mut() @@ -2101,8 +2106,8 @@ library MyLib { let libs = Libraries::parse(&[format!("remapping/MyLib.sol:MyLib:{:?}", Address::ZERO)]).unwrap(); // provide the library settings to let solc link - tmp.project_mut().settings.libraries = libs.with_applied_remappings(tmp.paths()); - tmp.project_mut().settings.libraries.slash_paths(); + tmp.project_mut().settings.solc.libraries = libs.with_applied_remappings(tmp.paths()); + tmp.project_mut().settings.solc.libraries.slash_paths(); let compiled = tmp.compile().unwrap(); compiled.assert_success(); @@ -2115,7 +2120,7 @@ library MyLib { #[test] fn can_detect_invalid_version() { - let tmp = TempProject::dapptools().unwrap(); + let tmp = TempProject::::dapptools().unwrap(); let content = r" pragma solidity ^0.100.10; contract A {} @@ -2135,7 +2140,7 @@ fn can_detect_invalid_version() { #[test] fn test_severity_warnings() { - let mut tmp = TempProject::dapptools().unwrap(); + let mut tmp = TempProject::::dapptools().unwrap(); // also treat warnings as error tmp.project_mut().compiler_severity_filter = Severity::Warning; @@ -2175,7 +2180,7 @@ fn test_severity_warnings() { #[test] fn can_recompile_with_changes() { - let mut tmp = TempProject::dapptools().unwrap(); + let mut tmp = TempProject::::dapptools().unwrap(); tmp.project_mut().paths.allowed_paths = BTreeSet::from([tmp.root().join("modules")]); let content = r#" @@ -2215,7 +2220,7 @@ fn can_recompile_with_changes() { #[test] fn can_recompile_with_lowercase_names() { - let tmp = TempProject::dapptools().unwrap(); + let tmp = TempProject::::dapptools().unwrap(); tmp.add_source( "deployProxy.sol", @@ -2275,7 +2280,7 @@ fn can_recompile_with_lowercase_names() { #[test] fn can_recompile_unchanged_with_empty_files() { - let tmp = TempProject::dapptools().unwrap(); + let tmp = TempProject::::dapptools().unwrap(); tmp.add_source( "A", @@ -2323,7 +2328,7 @@ fn can_recompile_unchanged_with_empty_files() { #[test] fn can_emit_empty_artifacts() { - let tmp = TempProject::dapptools().unwrap(); + let tmp = TempProject::::dapptools().unwrap(); let top_level = tmp .add_source( @@ -2386,7 +2391,7 @@ contract Contract { #[test] fn can_detect_contract_def_source_files() { - let tmp = TempProject::dapptools().unwrap(); + let tmp = TempProject::::dapptools().unwrap(); let mylib = tmp .add_source( @@ -2474,7 +2479,7 @@ fn can_detect_contract_def_source_files() { #[test] fn can_compile_sparse_with_link_references() { - let tmp = TempProject::dapptools().unwrap(); + let tmp = TempProject::::dapptools().unwrap(); tmp.add_source( "ATest.t.sol", @@ -2502,8 +2507,7 @@ fn can_compile_sparse_with_link_references() { ) .unwrap(); - let filter = SolcSparseFileFilter::new(TestFileFilter::default()); - let mut compiled = tmp.compile_sparse(Box::new(filter)).unwrap(); + let mut compiled = tmp.compile_sparse(Box::::default()).unwrap(); compiled.assert_success(); let mut output = compiled.clone().into_output(); @@ -2545,8 +2549,8 @@ fn can_compile_sparse_with_link_references() { #[test] fn can_sanitize_bytecode_hash() { - let mut tmp = TempProject::dapptools().unwrap(); - tmp.project_mut().settings.metadata = Some(BytecodeHash::Ipfs.into()); + let mut tmp = TempProject::::dapptools().unwrap(); + tmp.project_mut().settings.solc.metadata = Some(BytecodeHash::Ipfs.into()); tmp.add_source( "A", @@ -2580,7 +2584,7 @@ fn can_create_standard_json_input_with_external_file() { fs::create_dir_all(verif_dir.join("src")).unwrap(); fs::create_dir(&remapped_dir).unwrap(); - let mut verif_project = Project::builder() + let mut verif_project = ProjectBuilder::::new(Default::default()) .paths(ProjectPathsConfig::dapptools(&verif_dir).unwrap()) .build(Default::default()) .unwrap(); @@ -2631,7 +2635,7 @@ fn can_create_standard_json_input_with_external_file() { #[test] fn can_compile_std_json_input() { - let tmp = TempProject::dapptools_init().unwrap(); + let tmp = TempProject::::dapptools_init().unwrap(); tmp.assert_no_errors(); let source = tmp.list_source_files().into_iter().find(|p| p.ends_with("Dapp.t.sol")).unwrap(); let input = tmp.project().standard_json_input(source).unwrap(); @@ -2653,8 +2657,8 @@ fn can_compile_std_json_input() { #[test] #[cfg(unix)] fn can_create_standard_json_input_with_symlink() { - let mut project = TempProject::dapptools().unwrap(); - let dependency = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); + let dependency = TempProject::::dapptools().unwrap(); // File structure: // @@ -2723,8 +2727,8 @@ fn can_compile_model_checker_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/model-checker-sample"); let paths = ProjectPathsConfig::builder().sources(root); - let mut project = TempProject::::new(paths).unwrap(); - project.project_mut().settings.model_checker = Some(ModelCheckerSettings { + let mut project = TempProject::::new(paths).unwrap(); + project.project_mut().settings.solc.model_checker = Some(ModelCheckerSettings { engine: Some(CHC), timeout: Some(10000), ..Default::default() @@ -2778,7 +2782,7 @@ fn compile_project_with_options( severity_filter: Option, ignore_paths: Option>, ignore_error_code: Option, -) -> ProjectCompileOutput { +) -> ProjectCompileOutput { let mut builder = Project::builder().no_artifacts().paths(gen_test_data_licensing_warning()).ephemeral(); @@ -2841,7 +2845,7 @@ fn remove_solc_if_exists(version: &Version) { #[test] fn can_install_solc_and_compile_version() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let version = Version::new(0, 8, 10); project @@ -2864,7 +2868,7 @@ contract Contract {{ }} #[tokio::test(flavor = "multi_thread")] async fn can_install_solc_and_compile_std_json_input_async() { - let tmp = TempProject::dapptools_init().unwrap(); + let tmp = TempProject::::dapptools_init().unwrap(); tmp.assert_no_errors(); let source = tmp.list_source_files().into_iter().find(|p| p.ends_with("Dapp.t.sol")).unwrap(); let input = tmp.project().standard_json_input(source).unwrap(); @@ -2881,7 +2885,7 @@ async fn can_install_solc_and_compile_std_json_input_async() { #[test] fn can_purge_obsolete_artifacts() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); project.set_solc("0.8.10"); project .add_source( @@ -2912,9 +2916,9 @@ fn can_purge_obsolete_artifacts() { #[test] fn can_parse_notice() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); project.project_mut().artifacts.additional_values.userdoc = true; - project.project_mut().settings = project.project_mut().artifacts.settings(); + project.project_mut().settings.solc = project.project_mut().artifacts.solc_settings(); let contract = r" pragma solidity $VERSION; @@ -2991,10 +2995,10 @@ fn can_parse_notice() { #[test] fn can_parse_doc() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); project.project_mut().artifacts.additional_values.userdoc = true; project.project_mut().artifacts.additional_values.devdoc = true; - project.project_mut().settings = project.project_mut().artifacts.settings(); + project.project_mut().settings.solc = project.project_mut().artifacts.solc_settings(); let contract = r" // SPDX-License-Identifier: GPL-3.0-only @@ -3175,7 +3179,7 @@ contract NotERC20 is INotERC20 { #[test] fn test_relative_cache_entries() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let _a = project .add_source( "A", @@ -3236,7 +3240,7 @@ contract D { } #[test] fn test_failure_after_removing_file() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( "A", @@ -3279,7 +3283,7 @@ contract C { } #[test] fn can_handle_conflicting_files() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -3344,7 +3348,7 @@ fn can_handle_conflicting_files() { // #[test] fn can_handle_conflicting_files_recompile() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -3441,7 +3445,7 @@ fn can_handle_conflicting_files_recompile() { // #[test] fn can_handle_conflicting_files_case_sensitive_recompile() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -3546,7 +3550,7 @@ fn can_checkout_repo() { #[test] fn can_detect_config_changes() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); let remapping = project.paths().libraries[0].join("remapping"); project @@ -3579,26 +3583,27 @@ fn can_detect_config_changes() { let compiled = project.compile().unwrap(); compiled.assert_success(); - let cache_before = CompilerCache::::read(&project.paths().cache).unwrap(); + let cache_before = + CompilerCache::::read(&project.paths().cache).unwrap(); assert_eq!(cache_before.files.len(), 2); // nothing to compile let compiled = project.compile().unwrap(); assert!(compiled.is_unchanged()); - project.project_mut().settings.optimizer.enabled = Some(true); + project.project_mut().settings.solc.optimizer.enabled = Some(true); let compiled = project.compile().unwrap(); compiled.assert_success(); assert!(!compiled.is_unchanged()); - let cache_after = CompilerCache::::read(&project.paths().cache).unwrap(); + let cache_after = CompilerCache::::read(&project.paths().cache).unwrap(); assert_ne!(cache_before, cache_after); } #[test] fn can_add_basic_contract_and_library() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); let remapping = project.paths().libraries[0].join("remapping"); project @@ -3610,7 +3615,7 @@ fn can_add_basic_contract_and_library() { let lib = project.add_basic_source("Bar", "^0.8.0").unwrap(); - let graph = Graph::::resolve(project.paths()).unwrap(); + let graph = Graph::::resolve(project.paths()).unwrap(); assert_eq!(graph.files().len(), 2); assert!(graph.files().contains_key(&src)); assert!(graph.files().contains_key(&lib)); @@ -3624,7 +3629,7 @@ fn can_add_basic_contract_and_library() { // #[test] fn can_handle_nested_absolute_imports() { - let mut project = TempProject::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); let remapping = project.paths().libraries[0].join("myDepdendency"); project @@ -3676,7 +3681,7 @@ fn can_handle_nested_absolute_imports() { #[test] fn can_handle_nested_test_absolute_imports() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -3731,7 +3736,7 @@ contract ContractTest { // This is a repro and a regression test for https://github.com/foundry-rs/compilers/pull/45 #[test] fn dirty_files_discovery() { - let project = TempProject::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -3791,7 +3796,7 @@ fn test_deterministic_metadata() { let project = Project::builder() .locked_version(SolcLanguage::Solidity, Version::new(0, 8, 18)) .paths(paths) - .build(SolcCompiler::default()) + .build(MultiCompiler::default()) .unwrap(); let compiled = project.compile().unwrap(); @@ -3864,7 +3869,7 @@ fn yul_remappings_ignored() { name: "@openzeppelin".to_string(), path: root.to_string_lossy().to_string(), }); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let compiled = project.compile().unwrap(); compiled.assert_success(); @@ -3913,7 +3918,7 @@ fn test_can_compile_multi() { solc: Default::default(), }; - let compiler = MultiCompiler { solc: SolcCompiler::default(), vyper: VYPER.clone() }; + let compiler = MultiCompiler { solc: SolcCompiler::default(), vyper: Some(VYPER.clone()) }; let project = ProjectBuilder::::new(Default::default()) .settings(settings) From 560ad63249462e5c5504d87f18145854cab49bfa Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 29 May 2024 16:59:35 +0200 Subject: [PATCH 11/24] fix features --- Cargo.toml | 2 +- src/compile/mod.rs | 1 + src/compilers/multi.rs | 5 ++-- src/compilers/solc.rs | 57 ++++++++++++++++++++++++------------------ 4 files changed, 38 insertions(+), 27 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index ae3e5bef..c07efba4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -95,7 +95,7 @@ path = "tests/mocked.rs" required-features = ["full", "project-util"] [features] -default = ["rustls", "svm-solc"] +default = ["rustls"] full = ["async", "svm-solc"] diff --git a/src/compile/mod.rs b/src/compile/mod.rs index 4f5355b1..1f211feb 100644 --- a/src/compile/mod.rs +++ b/src/compile/mod.rs @@ -523,6 +523,7 @@ impl Solc { cmd } + #[cfg(feature = "svm-solc")] pub fn find_or_install(version: &Version) -> Result { let solc = if let Some(solc) = Self::find_svm_installed_version(version.to_string())? { solc diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index d377c19f..5d6b0a59 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -27,6 +27,7 @@ pub struct MultiCompiler { pub vyper: Option, } +#[cfg(feature = "svm-solc")] impl Default for MultiCompiler { fn default() -> Self { let vyper = Vyper::new("vyper").ok(); @@ -36,9 +37,9 @@ impl Default for MultiCompiler { } impl MultiCompiler { - pub fn new(vyper_path: Option) -> Result { + pub fn new(solc: SolcCompiler, vyper_path: Option) -> Result { let vyper = vyper_path.map(Vyper::new).transpose()?; - Ok(Self { solc: SolcCompiler::default(), vyper }) + Ok(Self { solc, vyper }) } } diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index 3a80a4c2..cf05073b 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -16,16 +16,18 @@ use crate::{ use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::{BTreeSet, HashSet}, + collections::{BTreeSet}, fmt, path::{Path, PathBuf}, }; -#[derive(Debug, Clone, Default)] -#[non_exhaustive] +#[derive(Debug, Clone)] +#[cfg_attr(feature = "svm-solc", derive(Default))] pub enum SolcCompiler { #[default] + #[cfg(feature = "svm-solc")] AutoDetect, + Specific(Solc), } @@ -59,6 +61,8 @@ impl Compiler for SolcCompiler { fn compile(&self, input: &Self::Input) -> Result> { let mut solc = match self { Self::Specific(solc) => solc.clone(), + + #[cfg(feature = "svm-solc")] Self::AutoDetect => Solc::find_or_install(&input.version)?, }; solc.base_path = input.base_path.clone(); @@ -77,28 +81,33 @@ impl Compiler for SolcCompiler { } fn available_versions(&self, _language: &Self::Language) -> Vec { - if let Self::Specific(solc) = self { - return vec![CompilerVersion::Installed(solc.version.clone())]; + match self { + Self::Specific(solc) => vec![CompilerVersion::Installed(solc.version.clone())], + + #[cfg(feature = "svm-solc")] + Self::AutoDetect => { + let mut all_versions = Solc::installed_versions() + .into_iter() + .map(CompilerVersion::Installed) + .collect::>(); + let mut uniques = all_versions + .iter() + .map(|v| { + let v = v.as_ref(); + (v.major, v.minor, v.patch) + }) + .collect::>(); + all_versions.extend( + Solc::released_versions() + .into_iter() + .filter(|v| uniques.insert((v.major, v.minor, v.patch))) + .map(CompilerVersion::Remote), + ); + all_versions.sort_unstable(); + all_versions + + } } - let mut all_versions = Solc::installed_versions() - .into_iter() - .map(CompilerVersion::Installed) - .collect::>(); - let mut uniques = all_versions - .iter() - .map(|v| { - let v = v.as_ref(); - (v.major, v.minor, v.patch) - }) - .collect::>(); - all_versions.extend( - Solc::released_versions() - .into_iter() - .filter(|v| uniques.insert((v.major, v.minor, v.patch))) - .map(CompilerVersion::Remote), - ); - all_versions.sort_unstable(); - all_versions } } From 7dbb1ce55ff0ff5fad478ccf1ff52fe426e622ba Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 29 May 2024 17:07:12 +0200 Subject: [PATCH 12/24] fmt --- src/compilers/solc.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index cf05073b..dda32af8 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -16,7 +16,7 @@ use crate::{ use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::{BTreeSet}, + collections::BTreeSet, fmt, path::{Path, PathBuf}, }; @@ -105,7 +105,6 @@ impl Compiler for SolcCompiler { ); all_versions.sort_unstable(); all_versions - } } } From 990585bea2a4ec5ee80f519cb3cdb615e4d2142e Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 29 May 2024 17:08:15 +0200 Subject: [PATCH 13/24] fix doc --- src/compile/output/mod.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compile/output/mod.rs b/src/compile/output/mod.rs index ae0233c7..864f29a4 100644 --- a/src/compile/output/mod.rs +++ b/src/compile/output/mod.rs @@ -832,9 +832,9 @@ impl AggregatedCompilerOutput { impl AggregatedCompilerOutput { /// Whether the output contains a compiler error /// - /// This adheres to the given `compiler_severity_filter` and also considers [Error] with the - /// given [Severity] as errors. For example [Severity::Warning] will consider [Error]s with - /// [Severity::Warning] and [Severity::Error] as errors. + /// This adheres to the given `compiler_severity_filter` and also considers [CompilationError] + /// with the given [Severity] as errors. For example [Severity::Warning] will consider + /// [CompilationError]s with [Severity::Warning] and [Severity::Error] as errors. pub fn has_error( &self, ignored_error_codes: &[u64], From a00c65930d7878dd56dac5100a57e9eef2ebeb50 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 29 May 2024 20:38:26 +0200 Subject: [PATCH 14/24] fix cache --- src/cache.rs | 4 +++- src/lib.rs | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index ac41698f..cb7b5108 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -461,7 +461,9 @@ impl CacheEntry { /// Returns `true` if the artifacts set contains the given version pub fn contains_version(&self, version: &Version) -> bool { - self.artifacts_versions().any(|(v, _)| v == version) + self.artifacts_versions().any(|(v, _)| { + v.major == version.major && v.minor == version.minor && v.patch == version.patch + }) } /// Iterator that yields all artifact files and their version diff --git a/src/lib.rs b/src/lib.rs index 17c0ef8d..da03ec65 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -440,7 +440,7 @@ impl Project { let mut temp_project = (*self).clone(); temp_project.no_artifacts = true; temp_project.settings.update_output_selection(|selection| { - *selection = OutputSelection::common_output_selection([]); + *selection = OutputSelection::common_output_selection(["abi".to_string()]); }); let output = temp_project.compile()?; From 527363e40f66019b391e23284fcfdc5647b5b5e9 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 29 May 2024 21:37:03 +0200 Subject: [PATCH 15/24] fix --- src/compile/mod.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/compile/mod.rs b/src/compile/mod.rs index 1f211feb..7192aa03 100644 --- a/src/compile/mod.rs +++ b/src/compile/mod.rs @@ -319,18 +319,20 @@ impl Solc { #[cfg(test)] crate::take_solc_installer_lock!(_lock); + let version = Version::new(version.major, version.minor, version.patch); + trace!("blocking installing solc version \"{}\"", version); - crate::report::solc_installation_start(version); + crate::report::solc_installation_start(&version); // The async version `svm::install` is used instead of `svm::blocking_intsall` // because the underlying `reqwest::blocking::Client` does not behave well // inside of a Tokio runtime. See: https://github.com/seanmonstar/reqwest/issues/1017 - match RuntimeOrHandle::new().block_on(svm::install(version)) { + match RuntimeOrHandle::new().block_on(svm::install(&version)) { Ok(path) => { - crate::report::solc_installation_success(version); + crate::report::solc_installation_success(&version); Ok(Solc::new_with_version(path, version.clone())) } Err(err) => { - crate::report::solc_installation_error(version, &err.to_string()); + crate::report::solc_installation_error(&version, &err.to_string()); Err(err) } } From 9cc31228a127b2dfaf0fc0aaff59ee6b6c884687 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Wed, 29 May 2024 22:49:28 +0200 Subject: [PATCH 16/24] fix --- src/cache.rs | 4 +--- src/compilers/mod.rs | 3 ++- src/compilers/solc.rs | 6 +++++- src/compilers/vyper/mod.rs | 6 +++++- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/src/cache.rs b/src/cache.rs index cb7b5108..ac41698f 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -461,9 +461,7 @@ impl CacheEntry { /// Returns `true` if the artifacts set contains the given version pub fn contains_version(&self, version: &Version) -> bool { - self.artifacts_versions().any(|(v, _)| { - v.major == version.major && v.minor == version.minor && v.patch == version.patch - }) + self.artifacts_versions().any(|(v, _)| v == version) } /// Iterator that yields all artifact files and their version diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index ea159318..816fad89 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -237,6 +237,7 @@ pub trait Compiler: Send + Sync + Clone { /// Returned input is always the one which was seen by the binary. fn compile(&self, input: &Self::Input) -> Result>; - /// Returns all versions available locally and remotely. + /// Returns all versions available locally and remotely. Should return versions with stripped + /// metadata. fn available_versions(&self, language: &Self::Language) -> Vec; } diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index dda32af8..90e37c29 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -82,7 +82,11 @@ impl Compiler for SolcCompiler { fn available_versions(&self, _language: &Self::Language) -> Vec { match self { - Self::Specific(solc) => vec![CompilerVersion::Installed(solc.version.clone())], + Self::Specific(solc) => vec![CompilerVersion::Installed(Version::new( + solc.version.major, + solc.version.minor, + solc.version.patch, + ))], #[cfg(feature = "svm-solc")] Self::AutoDetect => { diff --git a/src/compilers/vyper/mod.rs b/src/compilers/vyper/mod.rs index 9a3c6d99..746a07fd 100644 --- a/src/compilers/vyper/mod.rs +++ b/src/compilers/vyper/mod.rs @@ -168,6 +168,10 @@ impl Compiler for Vyper { } fn available_versions(&self, _language: &Self::Language) -> Vec { - vec![super::CompilerVersion::Installed(self.version.clone())] + vec![super::CompilerVersion::Installed(Version::new( + self.version.major, + self.version.minor, + self.version.patch, + ))] } } From 52d850d47ca66335374869898ea9a7ad6cd4d7e3 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 30 May 2024 01:27:28 +0200 Subject: [PATCH 17/24] update generics --- src/compilers/multi.rs | 6 +++++ src/config.rs | 47 +++++++++++++++++++++++++++++--- src/filter.rs | 2 +- src/flatten.rs | 38 +++++++++++++++----------- src/lib.rs | 26 ++++++------------ src/project_util/mod.rs | 7 ----- tests/project.rs | 59 +++++++++++++++++++---------------------- 7 files changed, 108 insertions(+), 77 deletions(-) diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index 5d6b0a59..795af52b 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -304,3 +304,9 @@ impl fmt::Display for MultiCompilerError { } } } + +impl From for SolcSettings { + fn from(settings: MultiCompilerSettings) -> Self { + settings.solc + } +} diff --git a/src/config.rs b/src/config.rs index 9b5f98a8..4b0b47ee 100644 --- a/src/config.rs +++ b/src/config.rs @@ -5,7 +5,7 @@ use crate::{ error::{Result, SolcError, SolcIoError}, flatten::{collect_ordered_deps, combine_version_pragmas}, remappings::Remapping, - resolver::{Graph, SolImportAlias}, + resolver::{parse::SolData, Graph, SolImportAlias}, utils, Source, Sources, }; use serde::{Deserialize, Serialize}; @@ -83,7 +83,14 @@ impl ProjectPathsConfig { } impl ProjectPathsConfig { - /// Flattens all file imports into a single string + /// Flattens the target solidity file into a single string suitable for verification. + /// + /// This method uses a dependency graph to resolve imported files and substitute + /// import directives with the contents of target files. It will strip the pragma + /// version directives and SDPX license identifiers from all imported files. + /// + /// NB: the SDPX license identifier will be removed from the imported file + /// only if it is found at the beginning of the file. pub fn flatten(&self, target: &Path) -> Result { trace!("flattening file"); let mut input_files = self.input_files(); @@ -96,7 +103,7 @@ impl ProjectPathsConfig { } let sources = Source::read_all_files(input_files)?; - let graph = Graph::resolve_sources(self, sources)?; + let graph = Graph::::resolve_sources(self, sources)?; let ordered_deps = collect_ordered_deps(&flatten_target, self, &graph)?; #[cfg(windows)] @@ -214,7 +221,7 @@ impl ProjectPathsConfig { } } -impl ProjectPathsConfig { +impl ProjectPathsConfig { /// Creates a new hardhat style config instance which points to the canonicalized root path pub fn hardhat(root: impl AsRef) -> Result { PathStyle::HardHat.paths(root) @@ -506,6 +513,38 @@ impl ProjectPathsConfig { utils::resolve_library(&self.libraries, import) } } + + pub fn with_language(self) -> ProjectPathsConfig { + let Self { + root, + cache, + artifacts, + build_infos, + sources, + tests, + scripts, + libraries, + remappings, + include_paths, + allowed_paths, + _l, + } = self; + + ProjectPathsConfig { + root, + cache, + artifacts, + build_infos, + sources, + tests, + scripts, + libraries, + remappings, + include_paths, + allowed_paths, + _l: PhantomData, + } + } } impl ProjectPathsConfig { diff --git a/src/filter.rs b/src/filter.rs index ea5535f2..2b5e86cd 100644 --- a/src/filter.rs +++ b/src/filter.rs @@ -48,7 +48,7 @@ impl FileFilter for TestFileFilter { } } -trait MaybeSolData { +pub trait MaybeSolData { fn sol_data(&self) -> Option<&SolData>; } diff --git a/src/flatten.rs b/src/flatten.rs index c9f2d4e9..317cc142 100644 --- a/src/flatten.rs +++ b/src/flatten.rs @@ -2,11 +2,12 @@ use crate::{ artifacts::{ ast::SourceLocation, visitor::{Visitor, Walk}, - ContractDefinitionPart, Error, ExternalInlineAssemblyReference, Identifier, IdentifierPath, + ContractDefinitionPart, ExternalInlineAssemblyReference, Identifier, IdentifierPath, MemberAccess, Source, SourceUnit, SourceUnitPart, Sources, }, - compilers::solc::SolcCompiler, + compilers::{Compiler, ParsedSource}, error::SolcError, + filter::MaybeSolData, resolver::parse::SolData, utils, ConfigurableArtifacts, Graph, Project, ProjectCompileOutput, ProjectPathsConfig, Result, }; @@ -175,11 +176,14 @@ impl Flattener { /// Compilation output is expected to contain all artifacts for all sources. /// Flattener caller is expected to resolve all imports of target file, compile them and pass /// into this function. - pub fn new( - project: &Project, - output: &ProjectCompileOutput, + pub fn new( + project: &Project, + output: &ProjectCompileOutput, target: &Path, - ) -> Result { + ) -> Result + where + C::ParsedSource: MaybeSolData, + { let input_files = output .artifacts_with_files() .map(|(file, _, _)| PathBuf::from(file)) @@ -188,7 +192,7 @@ impl Flattener { .collect::>(); let sources = Source::read_all_files(input_files)?; - let graph = Graph::resolve_sources(&project.paths, sources)?; + let graph = Graph::::resolve_sources(&project.paths, sources)?; let ordered_sources = collect_ordered_deps(&target.to_path_buf(), &project.paths, &graph)?; @@ -755,10 +759,10 @@ impl Flattener { } /// Performs DFS to collect all dependencies of a target -fn collect_deps( +fn collect_deps( path: &PathBuf, - paths: &ProjectPathsConfig, - graph: &Graph, + paths: &ProjectPathsConfig, + graph: &Graph, deps: &mut HashSet, ) -> Result<()> { if deps.insert(path.clone()) { @@ -771,9 +775,11 @@ fn collect_deps( .get(path) .ok_or_else(|| SolcError::msg(format!("cannot resolve file at {}", path.display())))?; - for import in &graph.node(*node_id).data.imports { - let path = paths.resolve_import(target_dir, import.data().path())?; - collect_deps(&path, paths, graph, deps)?; + if let Some(data) = graph.node(*node_id).data.sol_data() { + for import in &data.imports { + let path = paths.resolve_import(target_dir, import.data().path())?; + collect_deps(&path, paths, graph, deps)?; + } } } Ok(()) @@ -789,10 +795,10 @@ fn collect_deps( /// Instead, we sort files by the number of their dependencies (imports of any depth) in ascending /// order. If files have the same number of dependencies, we sort them alphabetically. /// Target file is always placed last. -pub fn collect_ordered_deps( +pub fn collect_ordered_deps( path: &PathBuf, - paths: &ProjectPathsConfig, - graph: &Graph, + paths: &ProjectPathsConfig, + graph: &Graph, ) -> Result> { let mut deps = HashSet::new(); collect_deps(path, paths, graph, &mut deps)?; diff --git a/src/lib.rs b/src/lib.rs index da03ec65..8426ec2f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,12 +25,11 @@ pub mod cache; pub mod flatten; pub mod hh; -use compilers::{multi::MultiCompiler, solc::SolcCompiler, Compiler, CompilerSettings}; +use compilers::{multi::MultiCompiler, Compiler, CompilerSettings}; pub use filter::SparseOutputFileFilter; pub use hh::{HardhatArtifact, HardhatArtifacts}; pub mod resolver; -use resolver::parse::SolData; pub use resolver::Graph; pub mod compilers; @@ -62,7 +61,7 @@ use crate::{ error::{SolcError, SolcIoError}, sources::{VersionedSourceFile, VersionedSourceFiles}, }; -use artifacts::{contract::Contract, output_selection::OutputSelection, Severity}; +use artifacts::{contract::Contract, output_selection::OutputSelection, Settings, Severity}; use compile::output::contracts::VersionedContracts; use error::Result; use semver::Version; @@ -153,7 +152,10 @@ impl Project { } } -impl Project { +impl Project +where + C::Settings: Into, +{ /// Returns standard-json-input to compile the target contract pub fn standard_json_input( &self, @@ -161,7 +163,7 @@ impl Project { ) -> Result { let target = target.as_ref(); trace!("Building standard-json-input for {:?}", target); - let graph = Graph::::resolve(&self.paths)?; + let graph = Graph::::resolve(&self.paths)?; let target_index = graph.files().get(target).ok_or_else(|| { SolcError::msg(format!("cannot resolve file at {:?}", target.display())) })?; @@ -184,7 +186,7 @@ impl Project { .map(|(path, source)| (rebase_path(root, path), source.clone())) .collect(); - let mut settings = self.settings.clone(); + let mut settings = self.settings.clone().into(); // strip the path to the project root from all remappings settings.remappings = self .paths @@ -198,18 +200,6 @@ impl Project { Ok(input) } - - /// Flattens the target solidity file into a single string suitable for verification. - /// - /// This method uses a dependency graph to resolve imported files and substitute - /// import directives with the contents of target files. It will strip the pragma - /// version directives and SDPX license identifiers from all imported files. - /// - /// NB: the SDPX license identifier will be removed from the imported file - /// only if it is found at the beginning of the file. - pub fn flatten(&self, target: &Path) -> Result { - self.paths.flatten(target) - } } impl Project { diff --git a/src/project_util/mod.rs b/src/project_util/mod.rs index 21c71408..f4cb7a93 100644 --- a/src/project_util/mod.rs +++ b/src/project_util/mod.rs @@ -4,7 +4,6 @@ use crate::{ artifacts::Settings, compilers::{ multi::{MultiCompiler, MultiCompilerSettings}, - solc::SolcCompiler, Compiler, }, config::ProjectPathsConfigBuilder, @@ -491,12 +490,6 @@ impl AsRef> } } -impl TempProject { - pub fn flatten(&self, target: &Path) -> Result { - self.project().flatten(target) - } -} - /// The cache file and all the artifacts it references #[derive(Debug, Clone)] pub struct ArtifactsSnapshot { diff --git a/tests/project.rs b/tests/project.rs index 5df1da11..2e943d58 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -529,12 +529,9 @@ fn copy_dir_all(src: impl AsRef, dst: impl AsRef) -> io::Result<()> // Runs both `flatten` implementations, asserts that their outputs match and runs additional checks // against the output. -fn test_flatteners( - project: &TempProject, - target: &Path, - additional_checks: fn(&str), -) { - let result = project.flatten(target).unwrap(); +fn test_flatteners(project: &TempProject, target: &Path, additional_checks: fn(&str)) { + let result = + project.project().paths.clone().with_language::().flatten(target).unwrap(); let solc_result = Flattener::new(project.project(), &project.compile().unwrap(), target).unwrap().flatten(); @@ -549,7 +546,7 @@ fn can_flatten_file_with_external_lib() { let paths = ProjectPathsConfig::builder() .sources(root.join("contracts")) .lib(root.join("node_modules")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let target = root.join("contracts").join("Greeter.sol"); @@ -564,7 +561,7 @@ fn can_flatten_file_with_external_lib() { fn can_flatten_file_in_dapp_sample() { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample"); let paths = ProjectPathsConfig::builder().sources(root.join("src")).lib(root.join("lib")); - let project = TempProject::::new(paths).unwrap(); + let project = TempProject::::new(paths).unwrap(); let target = root.join("src/Dapp.t.sol"); @@ -578,7 +575,7 @@ fn can_flatten_file_in_dapp_sample() { #[test] fn can_flatten_unique() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -637,7 +634,7 @@ contract A { } #[test] fn can_flatten_experimental_pragma() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -700,7 +697,7 @@ contract A { } #[test] fn can_flatten_on_solang_failure() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -729,7 +726,7 @@ contract Contract { ) .unwrap(); - let result = project.flatten(target.as_path()); + let result = project.paths().clone().with_language::().flatten(target.as_path()); assert!(result.is_ok()); let result = result.unwrap(); @@ -754,7 +751,7 @@ contract Contract { #[test] fn can_flatten_multiline() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -816,7 +813,7 @@ contract A { } #[test] fn can_flatten_remove_extra_spacing() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -875,7 +872,7 @@ contract A { } #[test] fn can_flatten_with_alias() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -1034,7 +1031,7 @@ contract Contract is ParentContract, #[test] fn can_flatten_with_version_pragma_after_imports() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( @@ -1108,7 +1105,7 @@ contract A { } #[test] fn can_flatten_with_duplicates() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1164,7 +1161,7 @@ contract Bar_1 is Foo {} #[test] fn can_flatten_complex_aliases_setup_with_duplicates() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1307,7 +1304,7 @@ contract D is A_0 { // https://github.com/foundry-rs/compilers/issues/34 #[test] fn can_flatten_34_repro() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); let target = project .add_source( "FlieA.sol", @@ -1374,7 +1371,7 @@ contract A { #[test] fn can_flatten_experimental_in_other_file() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1421,7 +1418,7 @@ contract B is A {} #[test] fn can_detect_type_error() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1444,7 +1441,7 @@ fn can_detect_type_error() { #[test] fn can_flatten_aliases_with_pragma_and_license_after_source() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1482,7 +1479,7 @@ contract B is A {} #[test] fn can_flatten_rename_inheritdocs() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1549,7 +1546,7 @@ contract B is A_1 { #[test] fn can_flatten_rename_inheritdocs_alias() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1602,7 +1599,7 @@ contract B is A { #[test] fn can_flatten_rename_user_defined_functions() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1698,7 +1695,7 @@ contract Foo { #[test] fn can_flatten_rename_global_functions() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1767,7 +1764,7 @@ contract Foo { #[test] fn can_flatten_rename_in_assembly() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -1834,7 +1831,7 @@ contract Foo { #[test] fn can_flatten_combine_pragmas() { - let project = TempProject::::dapptools().unwrap(); + let project = TempProject::::dapptools().unwrap(); project .add_source( @@ -2635,7 +2632,7 @@ fn can_create_standard_json_input_with_external_file() { #[test] fn can_compile_std_json_input() { - let tmp = TempProject::::dapptools_init().unwrap(); + let tmp = TempProject::::dapptools_init().unwrap(); tmp.assert_no_errors(); let source = tmp.list_source_files().into_iter().find(|p| p.ends_with("Dapp.t.sol")).unwrap(); let input = tmp.project().standard_json_input(source).unwrap(); @@ -2657,7 +2654,7 @@ fn can_compile_std_json_input() { #[test] #[cfg(unix)] fn can_create_standard_json_input_with_symlink() { - let mut project = TempProject::::dapptools().unwrap(); + let mut project = TempProject::::dapptools().unwrap(); let dependency = TempProject::::dapptools().unwrap(); // File structure: @@ -2868,7 +2865,7 @@ contract Contract {{ }} #[tokio::test(flavor = "multi_thread")] async fn can_install_solc_and_compile_std_json_input_async() { - let tmp = TempProject::::dapptools_init().unwrap(); + let tmp = TempProject::::dapptools_init().unwrap(); tmp.assert_no_errors(); let source = tmp.list_source_files().into_iter().find(|p| p.ends_with("Dapp.t.sol")).unwrap(); let input = tmp.project().standard_json_input(source).unwrap(); From d5129376f5a1882a86a1aa99cde9180b0c9f14f8 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 30 May 2024 16:56:42 +0200 Subject: [PATCH 18/24] docs --- src/compilers/multi.rs | 52 ++++++++++++++++++++++++------------------ src/compilers/solc.rs | 1 + 2 files changed, 31 insertions(+), 22 deletions(-) diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index 795af52b..af65f732 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -21,6 +21,7 @@ use std::{ path::{Path, PathBuf}, }; +/// Compiler capable of compiling both Solidity and Vyper sources. #[derive(Debug, Clone)] pub struct MultiCompiler { pub solc: SolcCompiler, @@ -43,6 +44,7 @@ impl MultiCompiler { } } +/// Languages supported by the [MultiCompiler]. #[derive(Debug, Clone, Hash, Eq, PartialEq)] pub enum MultiCompilerLanguage { Solc(SolcLanguage), @@ -61,6 +63,19 @@ impl From for MultiCompilerLanguage { } } +impl Language for MultiCompilerLanguage { + const FILE_EXTENSIONS: &'static [&'static str] = &["sol", "vy", "yul"]; +} + +impl fmt::Display for MultiCompilerLanguage { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Solc(lang) => lang.fmt(f), + Self::Vyper(lang) => lang.fmt(f), + } + } +} + #[derive(Debug, Clone)] pub enum MultiCompilerParsedSource { Solc(SolData), @@ -74,15 +89,11 @@ pub enum MultiCompilerError { Vyper(VyperCompilationError), } -impl Language for MultiCompilerLanguage { - const FILE_EXTENSIONS: &'static [&'static str] = &["sol", "vy", "yul"]; -} - -impl fmt::Display for MultiCompilerLanguage { +impl fmt::Display for MultiCompilerError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::Solc(lang) => lang.fmt(f), - Self::Vyper(lang) => lang.fmt(f), + Self::Solc(error) => error.fmt(f), + Self::Vyper(error) => error.fmt(f), } } } @@ -104,6 +115,18 @@ impl CompilerSettings for MultiCompilerSettings { } } +impl From for SolcSettings { + fn from(settings: MultiCompilerSettings) -> Self { + settings.solc + } +} + +impl From for VyperSettings { + fn from(settings: MultiCompilerSettings) -> Self { + settings.vyper + } +} + #[derive(Debug, Clone, Serialize)] #[serde(untagged)] pub enum MultiCompilerInput { @@ -295,18 +318,3 @@ impl CompilationError for MultiCompilerError { } } } - -impl fmt::Display for MultiCompilerError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Solc(error) => error.fmt(f), - Self::Vyper(error) => error.fmt(f), - } - } -} - -impl From for SolcSettings { - fn from(settings: MultiCompilerSettings) -> Self { - settings.solc - } -} diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index 90e37c29..5cf258a3 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -31,6 +31,7 @@ pub enum SolcCompiler { Specific(Solc), } +/// Languages supported by the Solc compiler. #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[non_exhaustive] pub enum SolcLanguage { From 7cc9d6a2e01ad9430f5e5621e539ba7c88d8d5ca Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 30 May 2024 20:49:08 +0200 Subject: [PATCH 19/24] docs --- src/artifacts/mod.rs | 2 ++ src/compile/mod.rs | 1 + src/compilers/mod.rs | 3 +++ src/compilers/multi.rs | 4 ++++ src/compilers/vyper/mod.rs | 2 ++ 5 files changed, 12 insertions(+) diff --git a/src/artifacts/mod.rs b/src/artifacts/mod.rs index 71f1edd5..2618867a 100644 --- a/src/artifacts/mod.rs +++ b/src/artifacts/mod.rs @@ -85,6 +85,8 @@ impl SolcInput { Self { language, sources, settings } } + /// Builds one or two inputs from given sources set. Returns two inputs in cases when there are + /// both Solidity and Yul sources. pub fn resolve_and_build(sources: Sources, settings: Settings) -> Vec { let mut solidity_sources = BTreeMap::new(); let mut yul_sources = BTreeMap::new(); diff --git a/src/compile/mod.rs b/src/compile/mod.rs index 7192aa03..1b3db26e 100644 --- a/src/compile/mod.rs +++ b/src/compile/mod.rs @@ -525,6 +525,7 @@ impl Solc { cmd } + /// Either finds an installed Solc version or installs it if it's not found. #[cfg(feature = "svm-solc")] pub fn find_or_install(version: &Version) -> Result { let solc = if let Some(solc) = Self::find_svm_installed_version(version.to_string())? { diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index 816fad89..921c78ae 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -95,8 +95,10 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug { /// Returns reference to sources included into this input. fn sources(&self) -> &Sources; + /// Returns language of the sources included into this input. fn language(&self) -> Self::Language; + /// Returns compiler version for which this input is intended. fn version(&self) -> &Version; /// Returns compiler name used by reporters to display output during compilation. @@ -211,6 +213,7 @@ impl Default for CompilerOutput { } } +/// Keeps a set of languages recognized by the compiler. pub trait Language: Hash + Eq + Clone + Debug + Display + 'static { /// Extensions of source files recognized by the language set. const FILE_EXTENSIONS: &'static [&'static str]; diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index af65f732..91385d29 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -76,12 +76,14 @@ impl fmt::Display for MultiCompilerLanguage { } } +/// Source parser for the [MultiCompiler]. Recognizes Solc and Vyper sources. #[derive(Debug, Clone)] pub enum MultiCompilerParsedSource { Solc(SolData), Vyper(VyperParsedSource), } +/// Compilation error which may occur when compiling Solidity or Vyper sources. #[derive(Debug, Clone, Serialize)] #[serde(untagged)] pub enum MultiCompilerError { @@ -98,6 +100,7 @@ impl fmt::Display for MultiCompilerError { } } +/// Settings for the [MultiCompiler]. Includes settings for both Solc and Vyper compilers. #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] pub struct MultiCompilerSettings { pub solc: SolcSettings, @@ -127,6 +130,7 @@ impl From for VyperSettings { } } +/// Input for the [MultiCompiler]. Either Solc or Vyper input. #[derive(Debug, Clone, Serialize)] #[serde(untagged)] pub enum MultiCompilerInput { diff --git a/src/compilers/vyper/mod.rs b/src/compilers/vyper/mod.rs index 746a07fd..7756cf61 100644 --- a/src/compilers/vyper/mod.rs +++ b/src/compilers/vyper/mod.rs @@ -28,6 +28,7 @@ pub type VyperCompilerOutput = CompilerOutput; /// File extensions that are recognized as Vyper source files. pub const VYPER_EXTENSIONS: &[&str] = &["vy"]; +/// Vyper language, used as [Compiler::Language] for the Vyper compiler. #[derive(Debug, Clone, Hash, Eq, PartialEq)] #[non_exhaustive] pub struct VyperLanguage; @@ -42,6 +43,7 @@ impl fmt::Display for VyperLanguage { } } +/// Vyper compiler. Wrapper aound vyper binary. #[derive(Debug, Clone)] pub struct Vyper { pub path: PathBuf, From 885b6dab432b5761f1213c80ea69edd8dd827d1d Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 30 May 2024 23:02:47 +0200 Subject: [PATCH 20/24] Cow<'static, str> --- src/compilers/mod.rs | 7 ++----- src/compilers/multi.rs | 6 ++---- src/compilers/solc.rs | 8 +++----- src/compilers/vyper/input.rs | 6 +++--- 4 files changed, 10 insertions(+), 17 deletions(-) diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index 921c78ae..2a09a0a5 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -11,10 +11,7 @@ use core::fmt; use semver::{Version, VersionReq}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ - collections::{BTreeMap, BTreeSet, HashSet}, - fmt::{Debug, Display}, - hash::Hash, - path::{Path, PathBuf}, + borrow::Cow, collections::{BTreeMap, BTreeSet, HashSet}, fmt::{Debug, Display}, hash::Hash, path::{Path, PathBuf} }; pub mod multi; @@ -102,7 +99,7 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug { fn version(&self) -> &Version; /// Returns compiler name used by reporters to display output during compilation. - fn compiler_name(&self) -> String; + fn compiler_name(&self) -> Cow<'static, str>; /// Method which might be invoked to add remappings to the input. fn with_remappings(self, _remappings: Vec) -> Self { diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index 91385d29..9ef5da8d 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -16,9 +16,7 @@ use crate::{ use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::BTreeSet, - fmt, - path::{Path, PathBuf}, + borrow::Cow, collections::BTreeSet, fmt, path::{Path, PathBuf} }; /// Compiler capable of compiling both Solidity and Vyper sources. @@ -158,7 +156,7 @@ impl CompilerInput for MultiCompilerInput { } } - fn compiler_name(&self) -> String { + fn compiler_name(&self) -> Cow<'static, str> { match self { Self::Solc(input) => input.compiler_name(), Self::Vyper(input) => input.compiler_name(), diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index 5cf258a3..21153466 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -16,9 +16,7 @@ use crate::{ use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - collections::BTreeSet, - fmt, - path::{Path, PathBuf}, + borrow::Cow, collections::BTreeSet, fmt, path::{Path, PathBuf} }; #[derive(Debug, Clone)] @@ -172,8 +170,8 @@ impl CompilerInput for SolcVersionedInput { self } - fn compiler_name(&self) -> String { - "Solc".to_string() + fn compiler_name(&self) -> Cow<'static, str> { + "Solc".into() } fn strip_prefix(&mut self, base: &Path) { diff --git a/src/compilers/vyper/input.rs b/src/compilers/vyper/input.rs index be2db846..cb327be1 100644 --- a/src/compilers/vyper/input.rs +++ b/src/compilers/vyper/input.rs @@ -1,4 +1,4 @@ -use std::path::Path; +use std::{borrow::Cow, path::Path}; use super::{settings::VyperSettings, VyperLanguage}; use crate::{artifacts::Sources, compilers::CompilerInput}; @@ -52,8 +52,8 @@ impl CompilerInput for VyperVersionedInput { &self.input.sources } - fn compiler_name(&self) -> String { - "Vyper".to_string() + fn compiler_name(&self) -> Cow<'static, str> { + "Vyper".into() } fn strip_prefix(&mut self, base: &Path) { From a62fc43ed5c6f8ddb00a88f1670a45c62156351a Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Thu, 30 May 2024 23:06:06 +0200 Subject: [PATCH 21/24] fmt --- src/compilers/mod.rs | 6 +++++- src/compilers/multi.rs | 5 ++++- src/compilers/solc.rs | 5 ++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index 2a09a0a5..37287976 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -11,7 +11,11 @@ use core::fmt; use semver::{Version, VersionReq}; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ - borrow::Cow, collections::{BTreeMap, BTreeSet, HashSet}, fmt::{Debug, Display}, hash::Hash, path::{Path, PathBuf} + borrow::Cow, + collections::{BTreeMap, BTreeSet, HashSet}, + fmt::{Debug, Display}, + hash::Hash, + path::{Path, PathBuf}, }; pub mod multi; diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index 9ef5da8d..0b8be854 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -16,7 +16,10 @@ use crate::{ use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - borrow::Cow, collections::BTreeSet, fmt, path::{Path, PathBuf} + borrow::Cow, + collections::BTreeSet, + fmt, + path::{Path, PathBuf}, }; /// Compiler capable of compiling both Solidity and Vyper sources. diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index 21153466..aae011e8 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -16,7 +16,10 @@ use crate::{ use semver::Version; use serde::{Deserialize, Serialize}; use std::{ - borrow::Cow, collections::BTreeSet, fmt, path::{Path, PathBuf} + borrow::Cow, + collections::BTreeSet, + fmt, + path::{Path, PathBuf}, }; #[derive(Debug, Clone)] From c2dc85dd0a8aaa36db6a6987c3871be26c557ed8 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 31 May 2024 01:48:51 +0200 Subject: [PATCH 22/24] vyper compat fixes --- src/compile/project.rs | 38 +++++++++++------------------------ src/compilers/mod.rs | 5 +---- src/compilers/multi.rs | 26 ++++++++++++------------ src/compilers/solc.rs | 4 ---- src/compilers/vyper/error.rs | 16 ++++++++++++++- src/compilers/vyper/input.rs | 22 +++++++++++++++----- src/compilers/vyper/mod.rs | 6 +++--- src/compilers/vyper/parser.rs | 24 +++++++++++++--------- 8 files changed, 76 insertions(+), 65 deletions(-) diff --git a/src/compile/project.rs b/src/compile/project.rs index 06fd1ff2..85133895 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -491,6 +491,18 @@ impl FilteredCompilerSources { let sources = sparse_output.sparse_sources(filtered_sources, &mut opt_settings, graph); + let actually_dirty = + sources.keys().filter(|f| dirty_files.contains(f)).cloned().collect::>(); + + if actually_dirty.is_empty() { + // nothing to compile for this particular language, all dirty files are in the + // other language set + trace!("skip {} run due to empty source set", version); + continue; + } + + trace!("calling {} with {} sources {:?}", version, sources.len(), sources.keys()); + let mut input = C::Input::build(sources, opt_settings, language.clone(), version.clone()) .with_base_path(paths.root.clone()) @@ -498,28 +510,8 @@ impl FilteredCompilerSources { .with_include_paths(include_paths.clone()) .with_remappings(paths.remappings.clone()); - let actually_dirty = input - .sources() - .keys() - .filter(|f| dirty_files.contains(f)) - .cloned() - .collect::>(); - input.strip_prefix(paths.root.as_path()); - if actually_dirty.is_empty() { - // nothing to compile for this particular language, all dirty files are in the - // other language set - trace!("skip {} run due to empty source set", version); - continue; - } - trace!( - "calling {} with {} sources {:?}", - version, - input.sources().len(), - input.sources().keys() - ); - jobs.push((input, actually_dirty)); } } @@ -606,12 +598,6 @@ fn compile_parallel, I: CompilerInput>( // set the reporter on this thread let _guard = report::set_scoped(&scoped_report); - trace!( - "calling solc `{}` with {} sources: {:?}", - input.version(), - input.sources().len(), - input.sources().keys() - ); let start = Instant::now(); report::compiler_spawn( &input.compiler_name(), diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index 37287976..7fdf8cc7 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -75,7 +75,7 @@ pub trait CompilerSettings: /// Returns minimal output selection which can be used to optimize compilation. fn minimal_output_selection() -> FileOutputSelection { - BTreeMap::from([("*".to_string(), vec![])]) + BTreeMap::from([("*".to_string(), vec!["abi".to_string()])]) } } @@ -93,9 +93,6 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug { version: Version, ) -> Self; - /// Returns reference to sources included into this input. - fn sources(&self) -> &Sources; - /// Returns language of the sources included into this input. fn language(&self) -> Self::Language; diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index 0b8be854..2722c0f0 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -2,7 +2,7 @@ use super::{ solc::{SolcCompiler, SolcLanguage, SolcVersionedInput}, vyper::{ error::VyperCompilationError, input::VyperVersionedInput, parser::VyperParsedSource, Vyper, - VyperLanguage, VyperSettings, + VyperLanguage, VyperSettings, VYPER_EXTENSIONS, }, CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, CompilerVersion, Language, ParsedSource, @@ -12,6 +12,7 @@ use crate::{ error::{Result, SolcError}, remappings::Remapping, resolver::parse::SolData, + SOLC_EXTENSIONS, }; use semver::Version; use serde::{Deserialize, Serialize}; @@ -65,7 +66,7 @@ impl From for MultiCompilerLanguage { } impl Language for MultiCompilerLanguage { - const FILE_EXTENSIONS: &'static [&'static str] = &["sol", "vy", "yul"]; + const FILE_EXTENSIONS: &'static [&'static str] = &["sol", "vy", "vyi", "yul"]; } impl fmt::Display for MultiCompilerLanguage { @@ -173,13 +174,6 @@ impl CompilerInput for MultiCompilerInput { } } - fn sources(&self) -> &Sources { - match self { - Self::Solc(input) => input.sources(), - Self::Vyper(input) => input.sources(), - } - } - fn strip_prefix(&mut self, base: &Path) { match self { Self::Solc(input) => input.strip_prefix(base), @@ -259,10 +253,16 @@ impl ParsedSource for MultiCompilerParsedSource { type Language = MultiCompilerLanguage; fn parse(content: &str, file: &std::path::Path) -> Result { - match file.extension().and_then(|e| e.to_str()) { - Some("sol" | "yul") => ::parse(content, file).map(Self::Solc), - Some("vy") => VyperParsedSource::parse(content, file).map(Self::Vyper), - _ => Err(SolcError::msg("unexpected file extension")), + let Some(extension) = file.extension().and_then(|e| e.to_str()) else { + return Err(SolcError::msg("failed to resolve file extension")); + }; + + if SOLC_EXTENSIONS.contains(&extension) { + ::parse(content, file).map(Self::Solc) + } else if VYPER_EXTENSIONS.contains(&extension) { + VyperParsedSource::parse(content, file).map(Self::Vyper) + } else { + Err(SolcError::msg("unexpected file extension")) } } diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index aae011e8..ef8b5112 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -155,10 +155,6 @@ impl CompilerInput for SolcVersionedInput { } } - fn sources(&self) -> &Sources { - &self.input.sources - } - fn language(&self) -> Self::Language { self.input.language.clone() } diff --git a/src/compilers/vyper/error.rs b/src/compilers/vyper/error.rs index 7c739660..17f7f00f 100644 --- a/src/compilers/vyper/error.rs +++ b/src/compilers/vyper/error.rs @@ -49,6 +49,20 @@ impl CompilationError for VyperCompilationError { impl fmt::Display for VyperCompilationError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.message) + if let Some(location) = &self.source_location { + write!(f, "Location: {}", location.file.display())?; + if let Some(line) = location.line { + write!(f, ":{}", line)?; + } + if let Some(offset) = location.offset { + write!(f, ":{}", offset)?; + } + writeln!(f)?; + } + if let Some(message) = &self.formatted_message { + write!(f, "{}", message) + } else { + write!(f, "{}", self.message) + } } } diff --git a/src/compilers/vyper/input.rs b/src/compilers/vyper/input.rs index cb327be1..9a3727e0 100644 --- a/src/compilers/vyper/input.rs +++ b/src/compilers/vyper/input.rs @@ -9,6 +9,7 @@ use serde::{Deserialize, Serialize}; pub struct VyperInput { pub language: String, pub sources: Sources, + pub interfaces: Sources, pub settings: VyperSettings, } @@ -22,7 +23,17 @@ pub struct VyperVersionedInput { impl VyperInput { pub fn new(sources: Sources, settings: VyperSettings) -> Self { - VyperInput { language: "Vyper".to_string(), sources, settings } + let mut new_sources = Sources::new(); + let mut interfaces = Sources::new(); + + for (path, content) in sources { + if path.extension().map_or(false, |ext| ext == "vyi") { + interfaces.insert(path, content); + } else { + new_sources.insert(path, content); + } + } + VyperInput { language: "Vyper".to_string(), sources: new_sources, interfaces, settings } } pub fn strip_prefix(&mut self, base: &Path) { @@ -31,6 +42,11 @@ impl VyperInput { .map(|(path, s)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), s)) .collect(); + self.interfaces = std::mem::take(&mut self.interfaces) + .into_iter() + .map(|(path, s)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), s)) + .collect(); + self.settings.strip_prefix(base) } } @@ -48,10 +64,6 @@ impl CompilerInput for VyperVersionedInput { Self { input: VyperInput::new(sources, settings), version } } - fn sources(&self) -> &Sources { - &self.input.sources - } - fn compiler_name(&self) -> Cow<'static, str> { "Vyper".into() } diff --git a/src/compilers/vyper/mod.rs b/src/compilers/vyper/mod.rs index 7756cf61..f984a93c 100644 --- a/src/compilers/vyper/mod.rs +++ b/src/compilers/vyper/mod.rs @@ -26,7 +26,7 @@ pub use settings::VyperSettings; pub type VyperCompilerOutput = CompilerOutput; /// File extensions that are recognized as Vyper source files. -pub const VYPER_EXTENSIONS: &[&str] = &["vy"]; +pub const VYPER_EXTENSIONS: &[&str] = &["vy", "vyi"]; /// Vyper language, used as [Compiler::Language] for the Vyper compiler. #[derive(Debug, Clone, Hash, Eq, PartialEq)] @@ -142,12 +142,12 @@ impl Vyper { let vyper = vyper.into(); let mut cmd = Command::new(vyper.clone()); cmd.arg("--version").stdin(Stdio::piped()).stderr(Stdio::piped()).stdout(Stdio::piped()); - debug!(?cmd, "getting Solc version"); + debug!(?cmd, "getting Vyper version"); let output = cmd.output().map_err(|e| SolcError::io(e, vyper))?; trace!(?output); if output.status.success() { let stdout = String::from_utf8_lossy(&output.stdout); - Ok(Version::from_str(stdout.trim())?) + Ok(Version::from_str(&stdout.trim().replace("rc", "-rc"))?) } else { Err(SolcError::solc_output(&output)) } diff --git a/src/compilers/vyper/parser.rs b/src/compilers/vyper/parser.rs index 3061d6ba..86d456d8 100644 --- a/src/compilers/vyper/parser.rs +++ b/src/compilers/vyper/parser.rs @@ -1,5 +1,5 @@ use crate::{ - compilers::ParsedSource, + compilers::{vyper::VYPER_EXTENSIONS, ParsedSource}, error::{Result, SolcError}, resolver::parse::capture_outer_and_inner, utils::RE_VYPER_VERSION, @@ -54,7 +54,11 @@ impl ParsedSource for VyperParsedSource { 'outer: for import in &self.imports { // skip built-in imports if import.level == 0 - && import.path.as_ref().map(|path| path.starts_with("vyper.")).unwrap_or_default() + && import + .path + .as_ref() + .map(|path| path.starts_with("vyper.") || path.starts_with("ethereum.ercs")) + .unwrap_or_default() { continue; } @@ -99,22 +103,24 @@ impl ParsedSource for VyperParsedSource { path = path.join(part); } - path.set_extension("vy"); - path }; for candidate_dir in candidate_dirs { let candidate = candidate_dir.join(&import_path); - - if candidate.exists() { - imports.push(candidate); - continue 'outer; + for extension in VYPER_EXTENSIONS { + let candidate = candidate.clone().with_extension(extension); + trace!("trying {}", candidate.display()); + if candidate.exists() { + imports.push(candidate); + continue 'outer; + } } } return Err(SolcError::msg(format!( - "failed to resolve import {} at {}", + "failed to resolve import {}{} at {}", + ".".repeat(import.level), import_path.display(), self.path.display() ))); From 7c184f862488de63253f10b16264597576240c78 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Fri, 31 May 2024 01:59:50 +0200 Subject: [PATCH 23/24] fix output selection handling for Vyper --- src/compilers/mod.rs | 8 +------- src/compilers/vyper/input.rs | 4 +++- src/compilers/vyper/settings.rs | 25 +++++++++++++++---------- src/filter.rs | 4 +--- 4 files changed, 20 insertions(+), 21 deletions(-) diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index 7fdf8cc7..ca86e0c4 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -1,7 +1,6 @@ use crate::{ artifacts::{ - output_selection::{FileOutputSelection, OutputSelection}, - Contract, FileToContractsMap, SourceFile, Sources, + output_selection::OutputSelection, Contract, FileToContractsMap, SourceFile, Sources, }, error::Result, remappings::Remapping, @@ -72,11 +71,6 @@ pub trait CompilerSettings: /// Ensures that all settings fields are equal except for `output_selection` which is required /// to be a subset of `cached.output_selection`. fn can_use_cached(&self, other: &Self) -> bool; - - /// Returns minimal output selection which can be used to optimize compilation. - fn minimal_output_selection() -> FileOutputSelection { - BTreeMap::from([("*".to_string(), vec!["abi".to_string()])]) - } } /// Input of a compiler, including sources and settings used for their compilation. diff --git a/src/compilers/vyper/input.rs b/src/compilers/vyper/input.rs index 9a3727e0..225b5d78 100644 --- a/src/compilers/vyper/input.rs +++ b/src/compilers/vyper/input.rs @@ -57,10 +57,12 @@ impl CompilerInput for VyperVersionedInput { fn build( sources: Sources, - settings: Self::Settings, + mut settings: Self::Settings, _language: Self::Language, version: Version, ) -> Self { + settings.sanitize_output_selection(); + Self { input: VyperInput::new(sources, settings), version } } diff --git a/src/compilers/vyper/settings.rs b/src/compilers/vyper/settings.rs index 4bf8a7be..47bdd15b 100644 --- a/src/compilers/vyper/settings.rs +++ b/src/compilers/vyper/settings.rs @@ -1,10 +1,7 @@ -use std::{collections::BTreeMap, path::Path}; +use std::path::Path; use crate::{ - artifacts::{ - output_selection::{FileOutputSelection, OutputSelection}, - serde_helpers, - }, + artifacts::{output_selection::OutputSelection, serde_helpers}, compilers::CompilerSettings, EvmVersion, }; @@ -54,6 +51,19 @@ impl VyperSettings { .collect(), ); } + + /// During caching we prune output selection for some of the sources, however, Vyper will reject + /// [] as an output selection, so we are adding "abi" as a default output selection which is + /// cheap to be produced. + pub fn sanitize_output_selection(&mut self) { + self.output_selection.0.values_mut().for_each(|selection| { + selection.values_mut().for_each(|selection| { + if selection.is_empty() { + selection.push("abi".to_string()) + } + }) + }); + } } impl CompilerSettings for VyperSettings { @@ -68,9 +78,4 @@ impl CompilerSettings for VyperSettings { && bytecode_metadata == &other.bytecode_metadata && output_selection.is_subset_of(&other.output_selection) } - - fn minimal_output_selection() -> FileOutputSelection { - // Vyper throws an error if empty selection is specified, so we are only requesting ABI. - BTreeMap::from([("*".to_string(), vec!["abi".to_string()])]) - } } diff --git a/src/filter.rs b/src/filter.rs index 2b5e86cd..d33e6e60 100644 --- a/src/filter.rs +++ b/src/filter.rs @@ -227,8 +227,6 @@ impl SparseOutputFilter { .remove("*") .unwrap_or_else(OutputSelection::default_file_output_selection); - let optimized = S::minimal_output_selection(); - for (file, kind) in sources.0.iter() { match kind { SourceCompilationKind::Complete(_) => { @@ -236,7 +234,7 @@ impl SparseOutputFilter { } SourceCompilationKind::Optimized(_) => { trace!("using pruned output selection for {}", file.display()); - selection.insert(format!("{}", file.display()), optimized.clone()); + selection.insert(format!("{}", file.display()), [].into()); } } } From b9d9e781a77c14372d2727607236c53f692040eb Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Sun, 2 Jun 2024 00:32:48 +0200 Subject: [PATCH 24/24] fix conflict --- src/compilers/mod.rs | 3 - src/compilers/solc/mod.rs | 202 -------------------------------------- 2 files changed, 205 deletions(-) delete mode 100644 src/compilers/solc/mod.rs diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index 15b741a6..ca86e0c4 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -58,9 +58,6 @@ impl fmt::Display for CompilerVersion { } } -pub mod solc; -pub mod vyper; - /// Compilation settings including evm_version, output_selection, etc. pub trait CompilerSettings: Default + Serialize + DeserializeOwned + Clone + Debug + Send + Sync + 'static diff --git a/src/compilers/solc/mod.rs b/src/compilers/solc/mod.rs deleted file mode 100644 index 084baa63..00000000 --- a/src/compilers/solc/mod.rs +++ /dev/null @@ -1,202 +0,0 @@ -use super::{ - CompilationError, Compiler, CompilerInput, CompilerOutput, CompilerSettings, ParsedSource, -}; -use crate::{ - artifacts::{ - output_selection::OutputSelection, Error, Settings as SolcSettings, SolcInput, Sources, - SOLIDITY, YUL, - }, - error::Result, - remappings::Remapping, - resolver::parse::SolData, - Solc, SOLC_EXTENSIONS, -}; -use itertools::Itertools; -use semver::Version; -use std::{ - collections::{BTreeMap, BTreeSet}, - path::{Path, PathBuf}, -}; - -#[cfg(feature = "svm-solc")] -mod version_manager; -#[cfg(feature = "svm-solc")] -pub use version_manager::SolcVersionManager; - -impl Compiler for Solc { - const FILE_EXTENSIONS: &'static [&'static str] = SOLC_EXTENSIONS; - - type Input = SolcInput; - type CompilationError = crate::artifacts::Error; - type ParsedSource = SolData; - type Settings = SolcSettings; - - fn compile(&self, input: &Self::Input) -> Result> { - let solc_output = self.compile(&input)?; - - let output = CompilerOutput { - errors: solc_output.errors, - contracts: solc_output.contracts, - sources: solc_output.sources, - }; - - Ok(output) - } - - fn version(&self) -> &Version { - &self.version - } - - fn with_allowed_paths(mut self, allowed_paths: BTreeSet) -> Self { - self.allow_paths = allowed_paths; - self - } - - fn with_base_path(mut self, base_path: PathBuf) -> Self { - self.base_path = Some(base_path); - self - } - - fn with_include_paths(mut self, include_paths: BTreeSet) -> Self { - self.include_paths = include_paths; - self - } -} - -impl CompilerInput for SolcInput { - type Settings = SolcSettings; - - /// Creates a new [CompilerInput]s with default settings and the given sources - /// - /// A [CompilerInput] expects a language setting, supported by solc are solidity or yul. - /// In case the `sources` is a mix of solidity and yul files, 2 CompilerInputs are returned - fn build(sources: Sources, mut settings: Self::Settings, version: &Version) -> Vec { - settings.sanitize(version); - if let Some(ref mut evm_version) = settings.evm_version { - settings.evm_version = evm_version.normalize_version_solc(version); - } - - let mut solidity_sources = BTreeMap::new(); - let mut yul_sources = BTreeMap::new(); - for (path, source) in sources { - if path.extension() == Some(std::ffi::OsStr::new("yul")) { - yul_sources.insert(path, source); - } else { - solidity_sources.insert(path, source); - } - } - let mut res = Vec::new(); - if !solidity_sources.is_empty() { - res.push(Self { - language: SOLIDITY.to_string(), - sources: solidity_sources, - settings: settings.clone(), - }); - } - if !yul_sources.is_empty() { - if !settings.remappings.is_empty() { - warn!("omitting remappings supplied for the yul sources"); - settings.remappings = vec![]; - } - - if let Some(debug) = settings.debug.as_mut() { - if debug.revert_strings.is_some() { - warn!("omitting revertStrings supplied for the yul sources"); - debug.revert_strings = None; - } - } - res.push(Self { language: YUL.to_string(), sources: yul_sources, settings }); - } - res - } - - fn sources(&self) -> &Sources { - &self.sources - } - - fn with_remappings(mut self, remappings: Vec) -> Self { - if self.language == YUL { - if !remappings.is_empty() { - warn!("omitting remappings supplied for the yul sources"); - } - } else { - self.settings.remappings = remappings; - } - self - } - - fn compiler_name(&self) -> String { - "Solc".to_string() - } - - fn strip_prefix(&mut self, base: &Path) { - self.strip_prefix(base) - } -} - -impl CompilerSettings for SolcSettings { - fn output_selection_mut(&mut self) -> &mut OutputSelection { - &mut self.output_selection - } - - fn can_use_cached(&self, other: &Self) -> bool { - let SolcSettings { - stop_after, - remappings, - optimizer, - model_checker, - metadata, - output_selection, - evm_version, - via_ir, - debug, - libraries, - } = self; - - *stop_after == other.stop_after - && *remappings == other.remappings - && *optimizer == other.optimizer - && *model_checker == other.model_checker - && *metadata == other.metadata - && *evm_version == other.evm_version - && *via_ir == other.via_ir - && *debug == other.debug - && *libraries == other.libraries - && output_selection.is_subset_of(&other.output_selection) - } -} - -impl ParsedSource for SolData { - fn parse(content: &str, file: &std::path::Path) -> Self { - SolData::parse(content, file) - } - - fn version_req(&self) -> Option<&semver::VersionReq> { - self.version_req.as_ref() - } - - fn resolve_imports(&self, _paths: &crate::ProjectPathsConfig) -> Result> { - return Ok(self.imports.iter().map(|i| i.data().path().to_path_buf()).collect_vec()); - } -} - -impl CompilationError for Error { - fn is_warning(&self) -> bool { - self.severity.is_warning() - } - fn is_error(&self) -> bool { - self.severity.is_error() - } - - fn source_location(&self) -> Option { - self.source_location.clone() - } - - fn severity(&self) -> crate::artifacts::error::Severity { - self.severity - } - - fn error_code(&self) -> Option { - self.error_code - } -}