Skip to content
This repository has been archived by the owner on Oct 19, 2024. It is now read-only.

Yul Rust Bindings #993

Closed
wants to merge 13 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ rustdoc-args = ["--cfg", "docsrs"]
features = ["full"]

[features]

celo = [
"ethers-core/celo",
"ethers-providers/celo",
Expand Down
4 changes: 4 additions & 0 deletions ethers-solc/src/artifacts/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,10 @@ impl CompilerInput {
Self { language: "Solidity".to_string(), sources, settings: Default::default() }
}

pub fn with_yul_sources(sources: Sources) -> Self {
Self { language: "Yul".to_string(), sources, settings: Default::default() }
}

/// Sets the settings for compilation
#[must_use]
pub fn settings(mut self, settings: Settings) -> Self {
Expand Down
80 changes: 80 additions & 0 deletions ethers-solc/src/compile/project.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,18 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
Ok(Self { edges, project, sources })
}

/// Since Yul only compiles sequentially atm
#[cfg(all(feature = "svm", feature = "async"))]
pub fn with_yul_sources(project: &'a Project<T>, sources: Sources) -> Result<Self> {
let graph = Graph::resolve_sources(&project.paths, sources)?;
let (versions, edges) = graph.into_sources_by_version(project.offline)?;
let sources_by_version = versions.get(&project.allowed_lib_paths)?;

let sources = CompilerSources::Sequential(sources_by_version);

Ok(Self { edges, project, sources })
}

/// Compiles the sources with a pinned `Solc` instance
pub fn with_sources_and_solc(
project: &'a Project<T>,
Expand Down Expand Up @@ -172,6 +184,11 @@ impl<'a, T: ArtifactOutput> ProjectCompiler<'a, T> {
self.preprocess()?.compile()?.write_artifacts()?.write_cache()
}

pub fn compile_yul(self) -> Result<ProjectCompileOutput<T>> {
// drive the compiler statemachine to completion
self.preprocess()?.compile_yul()?.write_artifacts()?.write_cache()
}

/// Does basic preprocessing
/// - sets proper source unit names
/// - check cache
Expand Down Expand Up @@ -204,6 +221,15 @@ impl<'a, T: ArtifactOutput> PreprocessedState<'a, T> {

Ok(CompiledState { output, cache })
}

/// advance to the next state by compiling all sources
fn compile_yul(self) -> Result<CompiledState<'a, T>> {
let PreprocessedState { sources, cache } = self;
let output =
sources.compile_yul(&cache.project().solc_config.settings, &cache.project().paths)?;

Ok(CompiledState { output, cache })
}
}

/// Represents the state after `solc` was successfully invoked
Expand Down Expand Up @@ -307,6 +333,18 @@ impl CompilerSources {
}
}

/// Compiles all the files with `Solc`
fn compile_yul(
self,
settings: &Settings,
paths: &ProjectPathsConfig,
) -> Result<AggregatedCompilerOutput> {
match self {
CompilerSources::Sequential(input) => compile_sequential_yul(input, settings, paths),
CompilerSources::Parallel(input, _j) => compile_sequential_yul(input, settings, paths),
}
}

#[cfg(test)]
#[allow(unused)]
fn sources(&self) -> &VersionedSources {
Expand All @@ -317,6 +355,48 @@ impl CompilerSources {
}
}

/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s
fn compile_sequential_yul(
input: VersionedSources,
settings: &Settings,
paths: &ProjectPathsConfig,
) -> Result<AggregatedCompilerOutput> {
let mut aggregated = AggregatedCompilerOutput::default();
tracing::trace!("compiling {} jobs sequentially", input.len());
for (solc, (version, sources)) in input {
if sources.is_empty() {
// nothing to compile
continue
}
tracing::trace!(
"compiling {} sources with solc \"{}\" {:?}",
sources.len(),
solc.as_ref().display(),
solc.args
);

let input = CompilerInput::with_yul_sources(sources)
.settings(settings.clone())
.normalize_evm_version(&version)
.with_remappings(paths.remappings.clone());

tracing::trace!(
"calling solc `{}` with {} sources {:?}",
version,
input.sources.len(),
input.sources.keys()
);

report::solc_spawn(&solc, &version, &input);
let output = solc.compile_exact(&input)?;
report::solc_success(&solc, &version, &output);
tracing::trace!("compiled input, output has error: {}", output.has_error());

aggregated.extend(version, output);
}
Ok(aggregated)
}

/// Compiles the input set sequentially and returns an aggregated set of the solc `CompilerOutput`s
fn compile_sequential(
input: VersionedSources,
Expand Down
23 changes: 22 additions & 1 deletion ethers-solc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,23 @@ impl<T: ArtifactOutput> Project<T> {
#[cfg(all(feature = "svm", feature = "async"))]
if self.auto_detect {
tracing::trace!("using solc auto detection to compile sources");
return self.svm_compile(sources)
return self.svm_compile(sources);
}

let solc = self.configure_solc(self.solc.clone());

self.compile_with_version(&solc, sources)
}

#[tracing::instrument(skip_all, name = "compile")]
pub fn compile_yul_project(&self) -> Result<ProjectCompileOutput<T>> {
let sources = self.paths.read_input_files()?;
tracing::trace!("found {} sources to compile: {:?}", sources.len(), sources.keys());

#[cfg(all(feature = "svm", feature = "async"))]
if self.auto_detect {
tracing::trace!("using solc auto detection to compile sources");
return self.svm_compile_yul(sources);
}

let solc = self.configure_solc(self.solc.clone());
Expand Down Expand Up @@ -235,6 +251,11 @@ impl<T: ArtifactOutput> Project<T> {
project::ProjectCompiler::with_sources(self, sources)?.compile()
}

#[cfg(all(feature = "svm", feature = "async"))]
pub fn svm_compile_yul(&self, sources: Sources) -> Result<ProjectCompileOutput<T>> {
project::ProjectCompiler::with_yul_sources(self, sources)?.compile_yul()
}

/// Convenience function to compile a single solidity file with the project's settings.
/// Same as [`Self::svm_compile()`] but with the given `file` as input.
///
Expand Down
4 changes: 4 additions & 0 deletions ethers-solc/src/project_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,10 @@ impl<T: ArtifactOutput> TempProject<T> {
self.project().compile()
}

pub fn compile_yul(&self) -> Result<ProjectCompileOutput<T>> {
self.project().compile_yul_project()
}

pub fn flatten(&self, target: &Path) -> Result<String> {
self.project().flatten(target)
}
Expand Down
92 changes: 92 additions & 0 deletions ethers-solc/tests/project.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,98 @@ fn can_compile_configured() {
assert!(artifact.ir_optimized.is_some());
}

#[test]
fn can_compile_yul() {
let mut project = TempProject::<ConfigurableArtifacts>::dapptools().unwrap();

// Really have to make this example smaller
let src = project
.add_source(
"Foo",
r#"
object "Foo" {
code {
datacopy(0, dataoffset("Runtime"), datasize("Runtime"))
return(0, datasize("Runtime"))
}
object "Runtime" {
code {
function mslice(position, length) -> result {
result := div(mload(position), exp(2, sub(256, mul(length, 8))))
}

function StoreCalldata.sig(pos) -> res {
res := mslice(StoreCalldata.sig.position(pos), 4)
}

function StoreCalldata.sig.position(_pos) -> _offset {
function StoreCalldata.sig.position._chunk0(pos) -> __r {
__r := 0x00
}
function StoreCalldata.sig.position._chunk1(pos) -> __r {
__r := pos
}
_offset := add(StoreCalldata.sig.position._chunk0(_pos), add(StoreCalldata.sig.position._chunk1(_pos), 0))

}
function StoreCalldata.val(pos) -> res {
res := mslice(StoreCalldata.val.position(pos), 32)
}
function StoreCalldata.val.position(_pos) -> _offset {
function StoreCalldata.val.position._chunk0(pos) -> __r {
__r := 0x04
}

function StoreCalldata.val.position._chunk1(pos) -> __r {
__r := pos
}


_offset := add(StoreCalldata.val.position._chunk0(_pos), add(StoreCalldata.val.position._chunk1(_pos), 0))
}
calldatacopy(0, 0, 36) // write calldata to memory
switch StoreCalldata.sig(0) // select signature from memory (at position 0)

case 0x6057361d { // new signature method
sstore(0, StoreCalldata.val(0)) // sstore calldata value
log2(0, 0, 0x69404ebde4a368ae324ed310becfefc3edfe9e5ebca74464e37ffffd8309a3c1, StoreCalldata.val(0))
}

case 0x6d4ce63c {
mstore(100, sload(0))
return (100, 32)
}
}
}
}
"#,
)
.unwrap();

let graph = Graph::resolve(project.paths()).unwrap();
let compiled = project.compile_yul().unwrap();

assert!(compiled.find("Foo").is_some());
let contract = compiled.find("Foo").unwrap();
assert!(!compiled.has_compiler_errors());

let bytecode = &contract.bytecode.as_ref().unwrap().object;
assert_eq!(
bytecode.as_bytes().unwrap().to_string(),
r#"0x61013c61001060003961013c6000f3fe6100c2565b6000600883026101000360020a82510490
5092915050565b6000610031600461002c84610038565b610004565b9050919050565b600061005056
5b6000919050565b6000819050919050565b600061005b83610046565b016100658361003f565b0190
50919050565b6000610082602061007d84610089565b610004565b9050919050565b60006100a5565b
600060049050919050565b6000819050919050565b60006100b08361009b565b016100ba8361009056
5b019050919050565b6024600080376100d2600061001c565b636057361d81146100ed57636d4ce63c8
11461012e5761013a565b6100f7600061006d565b600055610104600061006d565b7f69404ebde4a368a
e324ed310becfefc3edfe9e5ebca74464e37ffffd8309a3c1600080a261013a565b600054606452
60206064f35b50"#
.replace("\n", "")
.replace(" ", "")
);
}

#[test]
fn can_compile_dapp_detect_changes_in_libs() {
let mut project = TempProject::<ConfigurableArtifacts>::dapptools().unwrap();
Expand Down