From a8a5395f357ef26890af526f417418c49b032d17 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 11 Sep 2023 23:47:54 +0100 Subject: [PATCH 01/17] fix: Fix `update_acir` deleting all debug information (#2643) --- compiler/noirc_errors/src/debug_info.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/noirc_errors/src/debug_info.rs b/compiler/noirc_errors/src/debug_info.rs index f62ab08dd77..946841c279b 100644 --- a/compiler/noirc_errors/src/debug_info.rs +++ b/compiler/noirc_errors/src/debug_info.rs @@ -33,7 +33,7 @@ impl DebugInfo { let old_locations = mem::take(&mut self.locations); for (old_opcode_location, source_locations) in old_locations { - let _ = update_map.new_locations(old_opcode_location).map(|new_opcode_location| { + update_map.new_locations(old_opcode_location).for_each(|new_opcode_location| { self.locations.insert(new_opcode_location, source_locations.clone()); }); } From 9da822f59923a9953894c43afd1ddbeffa871dbf Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 12 Sep 2023 00:15:52 +0100 Subject: [PATCH 02/17] fix(wasm): Apply transformation map to circuit debug information in `noir_wasm` (#2635) --- Cargo.lock | 2 ++ compiler/wasm/Cargo.toml | 2 ++ compiler/wasm/src/compile.rs | 52 ++++++++++++------------------------ 3 files changed, 21 insertions(+), 35 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 872fdf6c79f..043387bde89 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2285,7 +2285,9 @@ dependencies = [ "fm", "getrandom", "gloo-utils", + "iter-extended", "log", + "nargo", "noirc_driver", "noirc_frontend", "serde", diff --git a/compiler/wasm/Cargo.toml b/compiler/wasm/Cargo.toml index efc50ef0476..1049dc92f47 100644 --- a/compiler/wasm/Cargo.toml +++ b/compiler/wasm/Cargo.toml @@ -13,6 +13,8 @@ crate-type = ["cdylib"] [dependencies] acvm.workspace = true fm.workspace = true +iter-extended.workspace = true +nargo.workspace = true noirc_driver.workspace = true noirc_frontend.workspace = true wasm-bindgen.workspace = true diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index a4e7f93ca79..f610ff7e79f 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -1,10 +1,9 @@ -use acvm::acir::circuit::Circuit; use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; +use iter_extended::try_vecmap; use log::debug; use noirc_driver::{ - add_dep, check_crate, compile_contracts, compile_no_check, prepare_crate, prepare_dependency, - CompileOptions, CompiledContract, + add_dep, compile_contracts, compile_main, prepare_crate, prepare_dependency, CompileOptions, }; use noirc_frontend::{graph::CrateGraph, hir::Context}; use serde::{Deserialize, Serialize}; @@ -111,7 +110,10 @@ pub fn compile(args: JsValue) -> JsValue { add_noir_lib(&mut context, dependency.as_str()); } - check_crate(&mut context, crate_id, false).expect("Crate check failed"); + // For now we default to plonk width = 3, though we can add it as a parameter + let np_language = acvm::Language::PLONKCSat { width: 3 }; + #[allow(deprecated)] + let is_opcode_supported = acvm::pwg::default_is_opcode_supported(np_language); if options.contracts { let compiled_contracts = @@ -119,41 +121,21 @@ pub fn compile(args: JsValue) -> JsValue { .expect("Contract compilation failed") .0; - let optimized_contracts: Vec = - compiled_contracts.into_iter().map(optimize_contract).collect(); + let optimized_contracts = try_vecmap(compiled_contracts, |contract| { + nargo::ops::optimize_contract(contract, np_language, &is_opcode_supported) + }) + .expect("Contract optimization failed"); ::from_serde(&optimized_contracts).unwrap() } else { - let main = context.get_main_function(&crate_id).expect("Could not find main function!"); - let mut compiled_program = - compile_no_check(&context, &options.compile_options, main).expect("Compilation failed"); + let compiled_program = compile_main(&mut context, crate_id, &options.compile_options) + .expect("Compilation failed") + .0; - compiled_program.circuit = optimize_circuit(compiled_program.circuit); - - ::from_serde(&compiled_program).unwrap() - } -} + let optimized_program = + nargo::ops::optimize_program(compiled_program, np_language, &is_opcode_supported) + .expect("Program optimization failed"); -fn optimize_contract(contract: CompiledContract) -> CompiledContract { - CompiledContract { - name: contract.name, - functions: contract - .functions - .into_iter() - .map(|mut func| { - func.bytecode = optimize_circuit(func.bytecode); - func - }) - .collect(), + ::from_serde(&optimized_program).unwrap() } } - -fn optimize_circuit(circuit: Circuit) -> Circuit { - // For now we default to plonk width = 3, though we can add it as a parameter - let language = acvm::Language::PLONKCSat { width: 3 }; - #[allow(deprecated)] - let opcode_supported = acvm::pwg::default_is_opcode_supported(language); - acvm::compiler::compile(circuit, language, opcode_supported) - .expect("Circuit optimization failed") - .0 -} From 3f8715534384b5bbf14a8538122235cbe7a19906 Mon Sep 17 00:00:00 2001 From: Yordan Madzhunkov <52652109+yordanmadzhunkov@users.noreply.github.com> Date: Tue, 12 Sep 2023 13:57:54 +0300 Subject: [PATCH 03/17] chore: Fix clippy warnings for rust version 1.67.0 (#2661) Co-authored-by: Yordan Madzhunkov --- .../src/brillig/brillig_gen/brillig_block.rs | 2 +- .../src/brillig/brillig_ir/debug_show.rs | 2 +- compiler/noirc_evaluator/src/errors.rs | 2 +- compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs | 6 +----- .../noirc_evaluator/src/ssa/opt/defunctionalize.rs | 3 +-- compiler/noirc_frontend/src/ast/statement.rs | 2 +- compiler/noirc_frontend/src/ast/traits.rs | 11 +++++------ .../noirc_frontend/src/hir/def_collector/errors.rs | 4 ++-- compiler/noirc_frontend/src/lexer/errors.rs | 2 +- compiler/noirc_frontend/src/lexer/token.rs | 2 +- compiler/noirc_frontend/src/parser/labels.rs | 2 +- tooling/acvm_backend_barretenberg/build.rs | 4 ++-- tooling/lsp/src/lib.rs | 8 ++++---- tooling/nargo/src/ops/foreign_calls.rs | 2 +- tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs | 2 +- tooling/nargo_cli/src/cli/fs/program.rs | 2 +- 16 files changed, 25 insertions(+), 31 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 2e31f618e8f..c54be4faa50 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -87,7 +87,7 @@ impl<'block> BrilligBlock<'block> { /// /// This is so that during linking there are no duplicates or labels being overwritten. fn create_block_label(function_id: FunctionId, block_id: BasicBlockId) -> String { - format!("{}-{}", function_id, block_id) + format!("{function_id}-{block_id}") } /// Converts an SSA terminator instruction into the necessary opcodes. diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index f330a85bc51..cc13b959095 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -99,7 +99,7 @@ impl DebugToString for BrilligBinaryOp { if *bit_size >= BRILLIG_MEMORY_ADDRESSING_BIT_SIZE { op.into() } else { - format!("{}:{}", op, bit_size) + format!("{op}:{bit_size}") } } } diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index da2a09d3093..2d0d73e9c87 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -46,7 +46,7 @@ pub enum RuntimeError { // assert(foo < bar) fails with "failed constraint: 0 = 1." fn format_failed_constraint(message: &Option) -> String { match message { - Some(message) => format!("Failed constraint: '{}'", message), + Some(message) => format!("Failed constraint: '{message}'"), None => "Failed constraint".to_owned(), } } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 81c79eda064..d4b7124b97f 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -994,11 +994,7 @@ impl Context { // If either side is a numeric type, then we expect their types to be // the same. (Type::Numeric(lhs_type), Type::Numeric(rhs_type)) => { - assert_eq!( - lhs_type, rhs_type, - "lhs and rhs types in {:?} are not the same", - binary - ); + assert_eq!(lhs_type, rhs_type, "lhs and rhs types in {binary:?} are not the same"); Type::Numeric(lhs_type) } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs index a2f4aedf7da..62b335be1e2 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs @@ -250,8 +250,7 @@ fn create_apply_functions( for (signature, variants) in variants_map.into_iter() { assert!( !variants.is_empty(), - "ICE: at least one variant should exist for a dynamic call {:?}", - signature + "ICE: at least one variant should exist for a dynamic call {signature:?}" ); let dispatches_to_multiple_functions = variants.len() > 1; diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index e48cf7b5457..51afa688082 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -247,7 +247,7 @@ impl Display for UseTree { write!(f, "{name}")?; while let Some(alias) = alias { - write!(f, " as {}", alias)?; + write!(f, " as {alias}")?; } Ok(()) diff --git a/compiler/noirc_frontend/src/ast/traits.rs b/compiler/noirc_frontend/src/ast/traits.rs index 93587c13b92..0120b70e5e8 100644 --- a/compiler/noirc_frontend/src/ast/traits.rs +++ b/compiler/noirc_frontend/src/ast/traits.rs @@ -144,21 +144,20 @@ impl Display for TraitItem { write!( f, - "fn {name}<{}>({}) -> {} where {}", - generics, parameters, return_type, where_clause + "fn {name}<{generics}>({parameters}) -> {return_type} where {where_clause}" )?; if let Some(body) = body { - write!(f, "{}", body) + write!(f, "{body}") } else { write!(f, ";") } } TraitItem::Constant { name, typ, default_value } => { - write!(f, "let {}: {}", name, typ)?; + write!(f, "let {name}: {typ}")?; if let Some(default_value) = default_value { - write!(f, "{};", default_value) + write!(f, "{default_value};") } else { write!(f, ";") } @@ -209,7 +208,7 @@ impl Display for TraitImplItem { TraitImplItem::Function(function) => function.fmt(f), TraitImplItem::Type { name, alias } => write!(f, "type {name} = {alias};"), TraitImplItem::Constant(name, typ, value) => { - write!(f, "let {}: {} = {};", name, typ, value) + write!(f, "let {name}: {typ} = {value};") } } } diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index 9be63533bb5..ec5de088574 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -118,7 +118,7 @@ impl From for Diagnostic { ), DefCollectorErrorKind::NonStructTraitImpl { trait_ident, span } => { Diagnostic::simple_error( - format!("Only struct types may implement trait `{}`", trait_ident), + format!("Only struct types may implement trait `{trait_ident}`"), "Only struct types may implement traits".into(), span, ) @@ -129,7 +129,7 @@ impl From for Diagnostic { span, ), DefCollectorErrorKind::TraitNotFound { trait_ident } => Diagnostic::simple_error( - format!("Trait {} not found", trait_ident), + format!("Trait {trait_ident} not found"), "".to_string(), trait_ident.span(), ), diff --git a/compiler/noirc_frontend/src/lexer/errors.rs b/compiler/noirc_frontend/src/lexer/errors.rs index 0b6440dec44..6b382d76f40 100644 --- a/compiler/noirc_frontend/src/lexer/errors.rs +++ b/compiler/noirc_frontend/src/lexer/errors.rs @@ -47,7 +47,7 @@ impl LexerErrorKind { ( "an unexpected character was found".to_string(), - format!(" expected {expected} , but got {}", found), + format!(" expected {expected} , but got {found}"), *span, ) }, diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index 89b44292093..adbf8f65758 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -351,7 +351,7 @@ impl fmt::Display for TestScope { match self { TestScope::None => write!(f, ""), TestScope::ShouldFailWith { reason } => match reason { - Some(failure_reason) => write!(f, "(should_fail_with = ({}))", failure_reason), + Some(failure_reason) => write!(f, "(should_fail_with = ({failure_reason}))"), None => write!(f, "should_fail"), }, } diff --git a/compiler/noirc_frontend/src/parser/labels.rs b/compiler/noirc_frontend/src/parser/labels.rs index b43c10fb9e7..fd082dfbe56 100644 --- a/compiler/noirc_frontend/src/parser/labels.rs +++ b/compiler/noirc_frontend/src/parser/labels.rs @@ -36,7 +36,7 @@ impl fmt::Display for ParsingRuleLabel { ParsingRuleLabel::Statement => write!(f, "statement"), ParsingRuleLabel::Term => write!(f, "term"), ParsingRuleLabel::TypeExpression => write!(f, "type expression"), - ParsingRuleLabel::TokenKind(token_kind) => write!(f, "{:?}", token_kind), + ParsingRuleLabel::TokenKind(token_kind) => write!(f, "{token_kind:?}"), } } } diff --git a/tooling/acvm_backend_barretenberg/build.rs b/tooling/acvm_backend_barretenberg/build.rs index 39ff3d14a3c..e4d213cfa38 100644 --- a/tooling/acvm_backend_barretenberg/build.rs +++ b/tooling/acvm_backend_barretenberg/build.rs @@ -21,12 +21,12 @@ fn main() -> Result<(), String> { let os = match build_target::target_os().unwrap() { os @ (Os::Linux | Os::MacOs) => os, Os::Windows => todo!("Windows is not currently supported"), - os_name => panic!("Unsupported OS {}", os_name), + os_name => panic!("Unsupported OS {os_name}"), }; let arch = match build_target::target_arch().unwrap() { arch @ (Arch::X86_64 | Arch::AARCH64) => arch, - arch_name => panic!("Unsupported Architecture {}", arch_name), + arch_name => panic!("Unsupported Architecture {arch_name}"), }; // Arm builds of linux are not supported diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index d08a604b77a..00381e79a82 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -170,7 +170,7 @@ fn on_code_lens_request( // We can reconsider this when we can build a file without the need for a Nargo.toml file to resolve deps let _ = state.client.log_message(LogMessageParams { typ: MessageType::WARNING, - message: format!("{}", err), + message: format!("{err}"), }); return future::ready(Ok(None)); } @@ -181,7 +181,7 @@ fn on_code_lens_request( // If we found a manifest, but the workspace is invalid, we raise an error about it return future::ready(Err(ResponseError::new( ErrorCode::REQUEST_FAILED, - format!("{}", err), + format!("{err}"), ))); } }; @@ -388,7 +388,7 @@ fn on_did_save_text_document( // We can reconsider this when we can build a file without the need for a Nargo.toml file to resolve deps let _ = state.client.log_message(LogMessageParams { typ: MessageType::WARNING, - message: format!("{}", err), + message: format!("{err}"), }); return ControlFlow::Continue(()); } @@ -399,7 +399,7 @@ fn on_did_save_text_document( // If we found a manifest, but the workspace is invalid, we raise an error about it return ControlFlow::Break(Err(ResponseError::new( ErrorCode::REQUEST_FAILED, - format!("{}", err), + format!("{err}"), ) .into())); } diff --git a/tooling/nargo/src/ops/foreign_calls.rs b/tooling/nargo/src/ops/foreign_calls.rs index 8eac516a7e9..db8cdceb20a 100644 --- a/tooling/nargo/src/ops/foreign_calls.rs +++ b/tooling/nargo/src/ops/foreign_calls.rs @@ -73,7 +73,7 @@ impl ForeignCall { ], }) } - None => panic!("unexpected foreign call {:?}", foreign_call_name), + None => panic!("unexpected foreign call {foreign_call_name:?}"), } } diff --git a/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs index 4a2536d2f92..38ff6d3b744 100644 --- a/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs +++ b/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs @@ -9,7 +9,7 @@ pub(crate) struct LsCommand; pub(crate) fn run(_args: LsCommand) -> Result<(), CliError> { for backend in get_available_backends() { - println!("{}", backend); + println!("{backend}"); } Ok(()) diff --git a/tooling/nargo_cli/src/cli/fs/program.rs b/tooling/nargo_cli/src/cli/fs/program.rs index 3f7107de667..ac5e6c5c32f 100644 --- a/tooling/nargo_cli/src/cli/fs/program.rs +++ b/tooling/nargo_cli/src/cli/fs/program.rs @@ -31,7 +31,7 @@ pub(crate) fn save_debug_artifact_to_file>( circuit_name: &str, circuit_dir: P, ) -> PathBuf { - let artifact_name = format!("debug_{}", circuit_name); + let artifact_name = format!("debug_{circuit_name}"); save_build_artifact_to_file(debug_artifact, &artifact_name, circuit_dir) } From 87ad7d7f55c70e966cb9d31ca42ccf0c9f9db451 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 12 Sep 2023 12:29:03 +0100 Subject: [PATCH 04/17] chore: use `DebugArtifact`s instead of `FileManager` to report errors (#2641) --- Cargo.lock | 1 + compiler/fm/src/lib.rs | 2 +- tooling/nargo/Cargo.toml | 1 + tooling/nargo/src/artifacts/debug.rs | 32 +++++++- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 2 +- tooling/nargo_cli/src/cli/compile_cmd.rs | 76 ++++++++++--------- tooling/nargo_cli/src/cli/execute_cmd.rs | 27 ++++--- tooling/nargo_cli/src/cli/info_cmd.rs | 7 +- tooling/nargo_cli/src/cli/prove_cmd.rs | 4 +- tooling/nargo_cli/src/cli/verify_cmd.rs | 2 +- 10 files changed, 97 insertions(+), 57 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 043387bde89..563681123ed 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2173,6 +2173,7 @@ version = "0.11.1" dependencies = [ "acvm", "base64", + "codespan-reporting", "fm", "iter-extended", "noirc_abi", diff --git a/compiler/fm/src/lib.rs b/compiler/fm/src/lib.rs index 41a9d9ed0a8..f615555601c 100644 --- a/compiler/fm/src/lib.rs +++ b/compiler/fm/src/lib.rs @@ -6,7 +6,7 @@ mod file_map; mod file_reader; -pub use file_map::{File, FileId, FileMap}; +pub use file_map::{File, FileId, FileMap, PathString}; use file_reader::is_stdlib_asset; use std::{ diff --git a/tooling/nargo/Cargo.toml b/tooling/nargo/Cargo.toml index cefc7ffb154..9783fe5d8ff 100644 --- a/tooling/nargo/Cargo.toml +++ b/tooling/nargo/Cargo.toml @@ -22,3 +22,4 @@ iter-extended.workspace = true serde.workspace = true thiserror.workspace = true base64.workspace = true +codespan-reporting.workspace = true diff --git a/tooling/nargo/src/artifacts/debug.rs b/tooling/nargo/src/artifacts/debug.rs index c60b0752e50..2a201a82c48 100644 --- a/tooling/nargo/src/artifacts/debug.rs +++ b/tooling/nargo/src/artifacts/debug.rs @@ -1,11 +1,13 @@ +use codespan_reporting::files::{Error, Files, SimpleFile}; use noirc_errors::debug_info::DebugInfo; use serde::{Deserialize, Serialize}; use std::{ collections::{BTreeMap, BTreeSet}, + ops::Range, path::PathBuf, }; -use fm::{FileId, FileManager}; +use fm::{FileId, FileManager, PathString}; /// For a given file, we store the source code and the path to the file /// so consumers of the debug artifact can reconstruct the original source code structure. @@ -52,3 +54,31 @@ impl DebugArtifact { Self { debug_symbols, file_map } } } + +impl<'a> Files<'a> for DebugArtifact { + type FileId = FileId; + type Name = PathString; + type Source = &'a str; + + fn name(&self, file_id: Self::FileId) -> Result { + self.file_map.get(&file_id).ok_or(Error::FileMissing).map(|file| file.path.clone().into()) + } + + fn source(&'a self, file_id: Self::FileId) -> Result { + self.file_map.get(&file_id).ok_or(Error::FileMissing).map(|file| file.source.as_ref()) + } + + fn line_index(&self, file_id: Self::FileId, byte_index: usize) -> Result { + self.file_map.get(&file_id).ok_or(Error::FileMissing).and_then(|file| { + SimpleFile::new(PathString::from(file.path.clone()), file.source.clone()) + .line_index((), byte_index) + }) + } + + fn line_range(&self, file_id: Self::FileId, line_index: usize) -> Result, Error> { + self.file_map.get(&file_id).ok_or(Error::FileMissing).and_then(|file| { + SimpleFile::new(PathString::from(file.path.clone()), file.source.clone()) + .line_range((), line_index) + }) + } +} diff --git a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 36e660e02e0..4589c0f7cfb 100644 --- a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -81,7 +81,7 @@ fn smart_contract_for_package( let preprocessed_program = if circuit_build_path.exists() { read_program_from_file(circuit_build_path)? } else { - let (_, program) = + let (program, _) = compile_package(package, compile_options, np_language, &is_opcode_supported)?; PreprocessedProgram { diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index a56dbc13517..251af055107 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -14,7 +14,6 @@ use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelec use noirc_driver::{ compile_main, CompilationResult, CompileOptions, CompiledContract, CompiledProgram, }; -use noirc_errors::debug_info::DebugInfo; use noirc_frontend::graph::CrateName; use clap::Args; @@ -69,17 +68,22 @@ pub(crate) fn run( for package in &workspace { // If `contract` package type, we're compiling every function in a 'contract' rather than just 'main'. if package.is_contract() { - let (file_manager, contracts) = compile_contracts( + let contracts_with_debug_artifacts = compile_contracts( package, &args.compile_options, np_language, &is_opcode_supported, )?; - save_contracts(&file_manager, contracts, package, &circuit_dir, args.output_debug); + save_contracts( + contracts_with_debug_artifacts, + package, + &circuit_dir, + args.output_debug, + ); } else { - let (file_manager, program) = + let (program, debug_artifact) = compile_package(package, &args.compile_options, np_language, &is_opcode_supported)?; - save_program(&file_manager, program, package, &circuit_dir, args.output_debug); + save_program(debug_artifact, program, package, &circuit_dir, args.output_debug); } } @@ -91,7 +95,7 @@ pub(crate) fn compile_package( compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result<(FileManager, CompiledProgram), CliError> { +) -> Result<(CompiledProgram, DebugArtifact), CliError> { if package.is_library() { return Err(CompileError::LibraryCrate(package.name.clone()).into()); } @@ -105,7 +109,10 @@ pub(crate) fn compile_package( nargo::ops::optimize_program(program, np_language, &is_opcode_supported) .expect("Backend does not support an opcode that is in the IR"); - Ok((context.file_manager, optimized_program)) + let debug_artifact = + DebugArtifact::new(vec![optimized_program.debug.clone()], &context.file_manager); + + Ok((optimized_program, debug_artifact)) } pub(crate) fn compile_contracts( @@ -113,7 +120,7 @@ pub(crate) fn compile_contracts( compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result<(FileManager, Vec), CliError> { +) -> Result, CliError> { let (mut context, crate_id) = prepare_package(package); let result = noirc_driver::compile_contracts(&mut context, crate_id, compile_options); let contracts = report_errors(result, &context.file_manager, compile_options.deny_warnings)?; @@ -121,11 +128,19 @@ pub(crate) fn compile_contracts( let optimized_contracts = try_vecmap(contracts, |contract| { nargo::ops::optimize_contract(contract, np_language, &is_opcode_supported) })?; - Ok((context.file_manager, optimized_contracts)) + + let contracts_with_debug_artifacts = vecmap(optimized_contracts, |contract| { + let debug_infos = vecmap(&contract.functions, |func| func.debug.clone()); + let debug_artifact = DebugArtifact::new(debug_infos, &context.file_manager); + + (contract, debug_artifact) + }); + + Ok(contracts_with_debug_artifacts) } fn save_program( - file_manager: &FileManager, + debug_artifact: DebugArtifact, program: CompiledProgram, package: &Package, circuit_dir: &Path, @@ -140,15 +155,13 @@ fn save_program( save_program_to_file(&preprocessed_program, &package.name, circuit_dir); if output_debug { - let debug_artifact = DebugArtifact::new(vec![program.debug], file_manager); let circuit_name: String = (&package.name).into(); save_debug_artifact_to_file(&debug_artifact, &circuit_name, circuit_dir); } } fn save_contracts( - file_manager: &FileManager, - contracts: Vec, + contracts: Vec<(CompiledContract, DebugArtifact)>, package: &Package, circuit_dir: &Path, output_debug: bool, @@ -157,35 +170,29 @@ fn save_contracts( // As can be seen here, It seems like a leaky abstraction where ContractFunctions (essentially CompiledPrograms) // are compiled via nargo-core and then the PreprocessedContract is constructed here. // This is due to EACH function needing it's own CRS, PKey, and VKey from the backend. - let preprocessed_contracts: Vec<(PreprocessedContract, Vec)> = - vecmap(contracts, |contract| { - let preprocess_result = vecmap(contract.functions, |func| { - ( - PreprocessedContractFunction { - name: func.name, - function_type: func.function_type, - is_internal: func.is_internal, - abi: func.abi, - - bytecode: func.bytecode, - }, - func.debug, - ) - }); - - let (preprocessed_contract_functions, debug_infos): (Vec<_>, Vec<_>) = - preprocess_result.into_iter().unzip(); + let preprocessed_contracts: Vec<(PreprocessedContract, DebugArtifact)> = + vecmap(contracts, |(contract, debug_artifact)| { + let preprocessed_functions = + vecmap(contract.functions, |func| PreprocessedContractFunction { + name: func.name, + function_type: func.function_type, + is_internal: func.is_internal, + abi: func.abi, + + bytecode: func.bytecode, + }); ( PreprocessedContract { name: contract.name, backend: String::from(BACKEND_IDENTIFIER), - functions: preprocessed_contract_functions, + functions: preprocessed_functions, }, - debug_infos, + debug_artifact, ) }); - for (contract, debug_infos) in preprocessed_contracts { + + for (contract, debug_artifact) in preprocessed_contracts { save_contract_to_file( &contract, &format!("{}-{}", package.name, contract.name), @@ -193,7 +200,6 @@ fn save_contracts( ); if output_debug { - let debug_artifact = DebugArtifact::new(debug_infos, file_manager); save_debug_artifact_to_file( &debug_artifact, &format!("{}-{}", package.name, contract.name), diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index ccbc5d03776..bce14027587 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -3,7 +3,8 @@ use acvm::acir::{circuit::Circuit, native_types::WitnessMap}; use acvm::pwg::{ErrorLocation, OpcodeResolutionError}; use acvm::Language; use clap::Args; -use fm::FileManager; + +use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::{ExecutionError, NargoError}; use nargo::package::Package; @@ -11,7 +12,7 @@ use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelec use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::{Abi, InputMap}; use noirc_driver::{CompileOptions, CompiledProgram}; -use noirc_errors::{debug_info::DebugInfo, CustomDiagnostic}; +use noirc_errors::CustomDiagnostic; use noirc_frontend::graph::CrateName; use super::compile_cmd::compile_package; @@ -86,15 +87,15 @@ fn execute_package( np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> Result<(Option, WitnessMap), CliError> { - let (context, compiled_program) = + let (compiled_program, debug_artifact) = compile_package(package, compile_options, np_language, &is_opcode_supported)?; - let CompiledProgram { abi, circuit, debug } = compiled_program; + let CompiledProgram { abi, circuit, .. } = compiled_program; // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &abi)?; let solved_witness = - execute_program(backend, circuit, &abi, &inputs_map, Some((debug, context)))?; + execute_program(backend, circuit, &abi, &inputs_map, Some(debug_artifact))?; let public_abi = abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; @@ -143,14 +144,16 @@ fn extract_opcode_error_from_nargo_error( /// the resolved call stack and any other relevant error information returned from the ACVM. fn report_error_with_opcode_locations( opcode_err_info: Option<(Vec, &ExecutionError)>, - debug: &DebugInfo, - file_manager: &FileManager, + debug_artifact: &DebugArtifact, ) { if let Some((opcode_locations, opcode_err)) = opcode_err_info { let source_locations: Vec<_> = opcode_locations .iter() .flat_map(|opcode_location| { - let locations = debug.opcode_location(opcode_location); + // This assumes that we're executing the circuit which corresponds to the first `DebugInfo`. + // This holds for all binary crates as there is only one `DebugInfo`. + assert_eq!(debug_artifact.debug_symbols.len(), 1); + let locations = debug_artifact.debug_symbols[0].opcode_location(opcode_location); locations.unwrap_or_default() }) .collect(); @@ -184,7 +187,7 @@ fn report_error_with_opcode_locations( CustomDiagnostic::simple_error(message, String::new(), location.span) .in_file(location.file) .with_call_stack(source_locations) - .report(file_manager.as_file_map(), false); + .report(debug_artifact, false); } } } @@ -194,7 +197,7 @@ pub(crate) fn execute_program( circuit: Circuit, abi: &Abi, inputs_map: &InputMap, - debug_data: Option<(DebugInfo, FileManager)>, + debug_data: Option, ) -> Result { #[allow(deprecated)] let blackbox_solver = acvm::blackbox_solver::BarretenbergSolver::new(); @@ -206,9 +209,9 @@ pub(crate) fn execute_program( match solved_witness_err { Ok(solved_witness) => Ok(solved_witness), Err(err) => { - if let Some((debug, file_manager)) = debug_data { + if let Some(debug_data) = debug_data { let opcode_err_info = extract_opcode_error_from_nargo_error(&err); - report_error_with_opcode_locations(opcode_err_info, &debug, &file_manager); + report_error_with_opcode_locations(opcode_err_info, &debug_data); } Err(crate::errors::CliError::NargoError(err)) diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index 83e54fc109c..b030c21f312 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -171,7 +171,7 @@ fn count_opcodes_and_gates_in_program( np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> Result { - let (_, compiled_program) = + let (compiled_program, _) = compile_package(package, compile_options, np_language, &is_opcode_supported)?; let (language, _) = backend.get_backend_info()?; @@ -190,11 +190,10 @@ fn count_opcodes_and_gates_in_contracts( np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> Result, CliError> { - let (_, contracts) = - compile_contracts(package, compile_options, np_language, &is_opcode_supported)?; + let contracts = compile_contracts(package, compile_options, np_language, &is_opcode_supported)?; let (language, _) = backend.get_backend_info()?; - try_vecmap(contracts, |contract| { + try_vecmap(contracts, |(contract, _)| { let functions = try_vecmap(contract.functions, |function| -> Result<_, BackendError> { Ok(FunctionInfo { name: function.name, diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs index 417a0872d0a..b2a2ff1856b 100644 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -101,14 +101,14 @@ pub(crate) fn prove_package( (program, None) } else { - let (context, program) = + let (program, debug_artifact) = compile_package(package, compile_options, np_language, &is_opcode_supported)?; let preprocessed_program = PreprocessedProgram { backend: String::from(BACKEND_IDENTIFIER), abi: program.abi, bytecode: program.circuit, }; - (preprocessed_program, Some((program.debug, context))) + (preprocessed_program, Some(debug_artifact)) }; let PreprocessedProgram { abi, bytecode, .. } = preprocessed_program; diff --git a/tooling/nargo_cli/src/cli/verify_cmd.rs b/tooling/nargo_cli/src/cli/verify_cmd.rs index d9cea3ae586..4b57c70875f 100644 --- a/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -85,7 +85,7 @@ fn verify_package( let preprocessed_program = if circuit_build_path.exists() { read_program_from_file(circuit_build_path)? } else { - let (_, program) = + let (program, _) = compile_package(package, compile_options, np_language, &is_opcode_supported)?; PreprocessedProgram { From 1114871d538767c053d71c67577890dd29f0b490 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 12 Sep 2023 12:50:35 +0100 Subject: [PATCH 05/17] fix: remove duplicate file extension in stack trace (#2655) --- compiler/noirc_errors/src/reporter.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/noirc_errors/src/reporter.rs b/compiler/noirc_errors/src/reporter.rs index bb8f169912a..d695b2007bc 100644 --- a/compiler/noirc_errors/src/reporter.rs +++ b/compiler/noirc_errors/src/reporter.rs @@ -199,7 +199,7 @@ fn stack_trace<'files>( let source = files.source(call_item.file).expect("should get file source"); let (line, column) = location(source.as_ref(), call_item.span.start()); - result += &format!("{}. {}.nr:{}:{}\n", i + 1, path, line, column); + result += &format!("{}. {}:{}:{}\n", i + 1, path, line, column); } result From 7f6fe46f8bc00f24ff8d14b3a517e27b50db4ee5 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 12 Sep 2023 12:58:59 +0100 Subject: [PATCH 06/17] fix: fix compilation using `aztec` feature flag (#2663) --- .../src/hir/def_map/aztec_library.rs | 33 ++++++++++--------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs b/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs index efd947a6c04..ef8c1872f1d 100644 --- a/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs +++ b/compiler/noirc_frontend/src/hir/def_map/aztec_library.rs @@ -2,12 +2,13 @@ use acvm::FieldElement; use noirc_errors::{CustomDiagnostic, Span}; use crate::graph::CrateId; +use crate::token::SecondaryAttribute; use crate::{ - hir::Context, token::Attribute, BlockExpression, CallExpression, CastExpression, Distinctness, - Expression, ExpressionKind, ForExpression, FunctionReturnType, Ident, ImportStatement, - IndexExpression, LetStatement, Literal, MemberAccessExpression, MethodCallExpression, - NoirFunction, ParsedModule, Path, PathKind, Pattern, Statement, UnresolvedType, - UnresolvedTypeData, Visibility, + hir::Context, BlockExpression, CallExpression, CastExpression, Distinctness, Expression, + ExpressionKind, ForExpression, FunctionReturnType, Ident, ImportStatement, IndexExpression, + LetStatement, Literal, MemberAccessExpression, MethodCallExpression, NoirFunction, + ParsedModule, Path, PathKind, Pattern, Statement, UnresolvedType, UnresolvedTypeData, + Visibility, }; use noirc_errors::FileDiagnostic; @@ -188,17 +189,19 @@ fn check_for_aztec_dependency( fn transform_module(functions: &mut [NoirFunction]) -> bool { let mut has_annotated_functions = false; for func in functions.iter_mut() { - if let Some(Attribute::Custom(custom_attribute)) = func.def.attribute.as_ref() { - match custom_attribute.as_str() { - "aztec(private)" => { - transform_function("Private", func); - has_annotated_functions = true; + for secondary_attribute in func.def.attributes.secondary.clone() { + if let SecondaryAttribute::Custom(custom_attribute) = secondary_attribute { + match custom_attribute.as_str() { + "aztec(private)" => { + transform_function("Private", func); + has_annotated_functions = true; + } + "aztec(public)" => { + transform_function("Public", func); + has_annotated_functions = true; + } + _ => continue, } - "aztec(public)" => { - transform_function("Public", func); - has_annotated_functions = true; - } - _ => continue, } } } From b80cd409a0f3fb8b5faa9d774f5d683499b1fba2 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 12 Sep 2023 13:32:50 +0100 Subject: [PATCH 07/17] chore: defer reporting of errors until after compilation and optimization is finished (#2659) --- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 4 +- tooling/nargo_cli/src/cli/compile_cmd.rs | 82 +++++++++++++++---- tooling/nargo_cli/src/cli/execute_cmd.rs | 4 +- tooling/nargo_cli/src/cli/info_cmd.rs | 12 ++- tooling/nargo_cli/src/cli/prove_cmd.rs | 4 +- tooling/nargo_cli/src/cli/verify_cmd.rs | 4 +- 6 files changed, 82 insertions(+), 28 deletions(-) diff --git a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 4589c0f7cfb..6199bf0761d 100644 --- a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -2,7 +2,7 @@ use std::path::PathBuf; use super::NargoConfig; use super::{ - compile_cmd::compile_package, + compile_cmd::compile_bin_package, fs::{create_named_dir, program::read_program_from_file, write_to_file}, }; use crate::backends::Backend; @@ -82,7 +82,7 @@ fn smart_contract_for_package( read_program_from_file(circuit_build_path)? } else { let (program, _) = - compile_package(package, compile_options, np_language, &is_opcode_supported)?; + compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; PreprocessedProgram { backend: String::from(BACKEND_IDENTIFIER), diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 251af055107..7a06b43937a 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -11,9 +11,7 @@ use nargo::artifacts::program::PreprocessedProgram; use nargo::package::Package; use nargo::prepare_package; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{ - compile_main, CompilationResult, CompileOptions, CompiledContract, CompiledProgram, -}; +use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, CompiledProgram}; use noirc_frontend::graph::CrateName; use clap::Args; @@ -68,12 +66,18 @@ pub(crate) fn run( for package in &workspace { // If `contract` package type, we're compiling every function in a 'contract' rather than just 'main'. if package.is_contract() { - let contracts_with_debug_artifacts = compile_contracts( + let (file_manager, compilation_result) = compile_contracts( package, &args.compile_options, np_language, &is_opcode_supported, + ); + let contracts_with_debug_artifacts = report_errors( + compilation_result, + &file_manager, + args.compile_options.deny_warnings, )?; + save_contracts( contracts_with_debug_artifacts, package, @@ -81,8 +85,14 @@ pub(crate) fn run( args.output_debug, ); } else { - let (program, debug_artifact) = - compile_package(package, &args.compile_options, np_language, &is_opcode_supported)?; + let (file_manager, compilation_result) = + compile_program(package, &args.compile_options, np_language, &is_opcode_supported); + + let (program, debug_artifact) = report_errors( + compilation_result, + &file_manager, + args.compile_options.deny_warnings, + )?; save_program(debug_artifact, program, package, &circuit_dir, args.output_debug); } } @@ -90,7 +100,7 @@ pub(crate) fn run( Ok(()) } -pub(crate) fn compile_package( +pub(crate) fn compile_bin_package( package: &Package, compile_options: &CompileOptions, np_language: Language, @@ -100,9 +110,43 @@ pub(crate) fn compile_package( return Err(CompileError::LibraryCrate(package.name.clone()).into()); } + let (file_manager, compilation_result) = + compile_program(package, compile_options, np_language, &is_opcode_supported); + + let (program, debug_artifact) = + report_errors(compilation_result, &file_manager, compile_options.deny_warnings)?; + + Ok((program, debug_artifact)) +} + +pub(crate) fn compile_contract_package( + package: &Package, + compile_options: &CompileOptions, + np_language: Language, + is_opcode_supported: &impl Fn(&Opcode) -> bool, +) -> Result, CliError> { + let (file_manager, compilation_result) = + compile_contracts(package, compile_options, np_language, &is_opcode_supported); + let contracts_with_debug_artifacts = + report_errors(compilation_result, &file_manager, compile_options.deny_warnings)?; + Ok(contracts_with_debug_artifacts) +} + +fn compile_program( + package: &Package, + compile_options: &CompileOptions, + np_language: Language, + is_opcode_supported: &impl Fn(&Opcode) -> bool, +) -> (FileManager, CompilationResult<(CompiledProgram, DebugArtifact)>) { let (mut context, crate_id) = prepare_package(package); - let result = compile_main(&mut context, crate_id, compile_options); - let program = report_errors(result, &context.file_manager, compile_options.deny_warnings)?; + + let (program, warnings) = + match noirc_driver::compile_main(&mut context, crate_id, compile_options) { + Ok(program_and_warnings) => program_and_warnings, + Err(errors) => { + return (context.file_manager, Err(errors)); + } + }; // Apply backend specific optimizations. let optimized_program = @@ -112,22 +156,28 @@ pub(crate) fn compile_package( let debug_artifact = DebugArtifact::new(vec![optimized_program.debug.clone()], &context.file_manager); - Ok((optimized_program, debug_artifact)) + (context.file_manager, Ok(((optimized_program, debug_artifact), warnings))) } -pub(crate) fn compile_contracts( +fn compile_contracts( package: &Package, compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result, CliError> { +) -> (FileManager, CompilationResult>) { let (mut context, crate_id) = prepare_package(package); - let result = noirc_driver::compile_contracts(&mut context, crate_id, compile_options); - let contracts = report_errors(result, &context.file_manager, compile_options.deny_warnings)?; + let (contracts, warnings) = + match noirc_driver::compile_contracts(&mut context, crate_id, compile_options) { + Ok(contracts_and_warnings) => contracts_and_warnings, + Err(errors) => { + return (context.file_manager, Err(errors)); + } + }; let optimized_contracts = try_vecmap(contracts, |contract| { nargo::ops::optimize_contract(contract, np_language, &is_opcode_supported) - })?; + }) + .expect("Backend does not support an opcode that is in the IR"); let contracts_with_debug_artifacts = vecmap(optimized_contracts, |contract| { let debug_infos = vecmap(&contract.functions, |func| func.debug.clone()); @@ -136,7 +186,7 @@ pub(crate) fn compile_contracts( (contract, debug_artifact) }); - Ok(contracts_with_debug_artifacts) + (context.file_manager, Ok((contracts_with_debug_artifacts, warnings))) } fn save_program( diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index bce14027587..b16f7007182 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -15,7 +15,7 @@ use noirc_driver::{CompileOptions, CompiledProgram}; use noirc_errors::CustomDiagnostic; use noirc_frontend::graph::CrateName; -use super::compile_cmd::compile_package; +use super::compile_cmd::compile_bin_package; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; use crate::backends::Backend; @@ -88,7 +88,7 @@ fn execute_package( is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> Result<(Option, WitnessMap), CliError> { let (compiled_program, debug_artifact) = - compile_package(package, compile_options, np_language, &is_opcode_supported)?; + compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; let CompiledProgram { abi, circuit, .. } = compiled_program; // Parse the initial witness values from Prover.toml diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index b030c21f312..e51a0256426 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -11,9 +11,12 @@ use prettytable::{row, table, Row}; use serde::Serialize; use crate::backends::Backend; -use crate::{cli::compile_cmd::compile_package, errors::CliError}; +use crate::errors::CliError; -use super::{compile_cmd::compile_contracts, NargoConfig}; +use super::{ + compile_cmd::{compile_bin_package, compile_contract_package}, + NargoConfig, +}; /// Provides detailed information on a circuit /// @@ -172,7 +175,7 @@ fn count_opcodes_and_gates_in_program( is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> Result { let (compiled_program, _) = - compile_package(package, compile_options, np_language, &is_opcode_supported)?; + compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; let (language, _) = backend.get_backend_info()?; Ok(ProgramInfo { @@ -190,7 +193,8 @@ fn count_opcodes_and_gates_in_contracts( np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> Result, CliError> { - let contracts = compile_contracts(package, compile_options, np_language, &is_opcode_supported)?; + let contracts = + compile_contract_package(package, compile_options, np_language, &is_opcode_supported)?; let (language, _) = backend.get_backend_info()?; try_vecmap(contracts, |(contract, _)| { diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs index b2a2ff1856b..e953732da2f 100644 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -11,7 +11,7 @@ use noirc_abi::input_parser::Format; use noirc_driver::CompileOptions; use noirc_frontend::graph::CrateName; -use super::compile_cmd::compile_package; +use super::compile_cmd::compile_bin_package; use super::fs::{ inputs::{read_inputs_from_file, write_inputs_to_file}, program::read_program_from_file, @@ -102,7 +102,7 @@ pub(crate) fn prove_package( (program, None) } else { let (program, debug_artifact) = - compile_package(package, compile_options, np_language, &is_opcode_supported)?; + compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; let preprocessed_program = PreprocessedProgram { backend: String::from(BACKEND_IDENTIFIER), abi: program.abi, diff --git a/tooling/nargo_cli/src/cli/verify_cmd.rs b/tooling/nargo_cli/src/cli/verify_cmd.rs index 4b57c70875f..9d1a98da4da 100644 --- a/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -1,6 +1,6 @@ use super::NargoConfig; use super::{ - compile_cmd::compile_package, + compile_cmd::compile_bin_package, fs::{inputs::read_inputs_from_file, load_hex_data, program::read_program_from_file}, }; use crate::{backends::Backend, errors::CliError}; @@ -86,7 +86,7 @@ fn verify_package( read_program_from_file(circuit_build_path)? } else { let (program, _) = - compile_package(package, compile_options, np_language, &is_opcode_supported)?; + compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; PreprocessedProgram { backend: String::from(BACKEND_IDENTIFIER), From 4776d0792b83bdb09d8da46168314cb8e691bdf2 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 12 Sep 2023 14:34:39 +0100 Subject: [PATCH 08/17] chore: refactor `execute_cmd` (#2656) --- tooling/nargo_cli/src/cli/execute_cmd.rs | 34 ++++++++++-------------- tooling/nargo_cli/src/cli/prove_cmd.rs | 2 +- 2 files changed, 15 insertions(+), 21 deletions(-) diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index b16f7007182..a08cfb09995 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -1,7 +1,6 @@ -use acvm::acir::circuit::{Opcode, OpcodeLocation}; +use acvm::acir::circuit::OpcodeLocation; use acvm::acir::{circuit::Circuit, native_types::WitnessMap}; use acvm::pwg::{ErrorLocation, OpcodeResolutionError}; -use acvm::Language; use clap::Args; use nargo::artifacts::debug::DebugArtifact; @@ -53,17 +52,18 @@ pub(crate) fn run( if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; let selection = args.package.map_or(default_selection, PackageSelection::Selected); let workspace = resolve_workspace_from_toml(&toml_path, selection)?; - let witness_dir = &workspace.target_directory_path(); + let target_dir = &workspace.target_directory_path(); let (np_language, is_opcode_supported) = backend.get_backend_info()?; for package in &workspace { - let (return_value, solved_witness) = execute_package( - backend, + let (compiled_program, debug_artifact) = + compile_bin_package(package, &args.compile_options, np_language, &is_opcode_supported)?; + + let (return_value, solved_witness) = execute_program_and_decode( + compiled_program, + debug_artifact, package, &args.prover_name, - &args.compile_options, - np_language, - &is_opcode_supported, )?; println!("[{}] Circuit witness successfully solved", package.name); @@ -71,7 +71,7 @@ pub(crate) fn run( println!("[{}] Circuit output: {return_value:?}", package.name); } if let Some(witness_name) = &args.witness_name { - let witness_path = save_witness_to_dir(solved_witness, witness_name, witness_dir)?; + let witness_path = save_witness_to_dir(solved_witness, witness_name, target_dir)?; println!("[{}] Witness saved to {}", package.name, witness_path.display()); } @@ -79,23 +79,18 @@ pub(crate) fn run( Ok(()) } -fn execute_package( - backend: &Backend, +fn execute_program_and_decode( + program: CompiledProgram, + debug_artifact: DebugArtifact, package: &Package, prover_name: &str, - compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> Result<(Option, WitnessMap), CliError> { - let (compiled_program, debug_artifact) = - compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; - let CompiledProgram { abi, circuit, .. } = compiled_program; + let CompiledProgram { abi, circuit, .. } = program; // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &abi)?; - let solved_witness = - execute_program(backend, circuit, &abi, &inputs_map, Some(debug_artifact))?; + let solved_witness = execute_program(circuit, &abi, &inputs_map, Some(debug_artifact))?; let public_abi = abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; @@ -193,7 +188,6 @@ fn report_error_with_opcode_locations( } pub(crate) fn execute_program( - _backend: &Backend, circuit: Circuit, abi: &Abi, inputs_map: &InputMap, diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs index e953732da2f..c451b78add5 100644 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -117,7 +117,7 @@ pub(crate) fn prove_package( let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &abi)?; - let solved_witness = execute_program(backend, bytecode.clone(), &abi, &inputs_map, debug_data)?; + let solved_witness = execute_program(bytecode.clone(), &abi, &inputs_map, debug_data)?; // Write public inputs into Verifier.toml let public_abi = abi.public_abi(); From 29142f433bb3efd9025fa852b1d649b48b45d8e0 Mon Sep 17 00:00:00 2001 From: Jonathan Bursztyn Date: Tue, 12 Sep 2023 16:03:01 +0100 Subject: [PATCH 09/17] chore: add abi_wasm to release-please (#2664) --- .../workflows/{release-abi_wasm.yml => publish-abi_wasm.yml} | 5 ++++- release-please-config.json | 5 +++++ 2 files changed, 9 insertions(+), 1 deletion(-) rename .github/workflows/{release-abi_wasm.yml => publish-abi_wasm.yml} (94%) diff --git a/.github/workflows/release-abi_wasm.yml b/.github/workflows/publish-abi_wasm.yml similarity index 94% rename from .github/workflows/release-abi_wasm.yml rename to .github/workflows/publish-abi_wasm.yml index 07cccba568b..e7167c681c5 100644 --- a/.github/workflows/release-abi_wasm.yml +++ b/.github/workflows/publish-abi_wasm.yml @@ -1,6 +1,9 @@ -name: Release ABI Wasm +name: Publish ABI Wasm on: + push: + tags: + - "*" workflow_dispatch: jobs: diff --git a/release-please-config.json b/release-please-config.json index 14558039a6e..e06379f5ae7 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -17,6 +17,11 @@ "type": "json", "path": "compiler/wasm/package.json", "jsonpath": "$.version" + }, + { + "type": "json", + "path": "tooling/noirc_abi_wasm/package.json", + "jsonpath": "$.version" } ] } From aebab34520b31502bb8bf0c028aa2ea8bb33142b Mon Sep 17 00:00:00 2001 From: Blaine Bublitz Date: Tue, 12 Sep 2023 08:39:00 -0700 Subject: [PATCH 10/17] fix(wasm): Avoid requesting stdlib paths from the source-resolver (#2650) Co-authored-by: Koby --- .github/workflows/test-integration.yml | 8 ++-- .vscode/extensions.json | 3 +- .vscode/settings.json | 3 ++ compiler/fm/src/file_reader.rs | 12 ++++-- .../browser/compile_prove_verify.test.ts | 11 +++--- .../web-test-runner.config.mjs | 19 +++++----- compiler/integration-tests/yarn.lock | 38 ++----------------- compiler/wasm/test/browser/index.test.ts | 2 +- 8 files changed, 37 insertions(+), 59 deletions(-) diff --git a/.github/workflows/test-integration.yml b/.github/workflows/test-integration.yml index f92036fe1e6..96a177a9bde 100644 --- a/.github/workflows/test-integration.yml +++ b/.github/workflows/test-integration.yml @@ -4,7 +4,7 @@ on: workflow_dispatch: schedule: - cron: "0 2 * * *" # Run nightly at 2 AM UTC - + jobs: wasm-packages-build-test: runs-on: ubuntu-latest @@ -100,8 +100,8 @@ jobs: working-directory: ./compiler/integration-tests run: | yarn test:browser - - - name: Alert on nightly test failure + + - name: Alert on nightly test failure uses: JasonEtco/create-an-issue@v2 if: ${{ failure() && github.event_name == 'schedule' }} env: @@ -110,4 +110,4 @@ jobs: WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} with: update_existing: true - filename: .github/NIGHTLY_TEST_FAILURE.md \ No newline at end of file + filename: .github/NIGHTLY_TEST_FAILURE.md diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 64ae238015f..710e88b34df 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -6,7 +6,8 @@ "mkhl.direnv", "jnoortheen.nix-ide", "rust-lang.rust-analyzer", - "redhat.vscode-yaml" + "redhat.vscode-yaml", + "esbenp.prettier-vscode" ], // List of extensions recommended by VS Code that should not be recommended for users of this workspace. "unwantedRecommendations": [] diff --git a/.vscode/settings.json b/.vscode/settings.json index 6c6ec87be51..171d36f4e04 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -16,4 +16,7 @@ "yaml.schemas": { "https://json.schemastore.org/github-workflow.json": "${workspaceRoot}/.github/workflows/*.yml" }, + "[javascript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + } } diff --git a/compiler/fm/src/file_reader.rs b/compiler/fm/src/file_reader.rs index df4e49b919a..1a9b31ed949 100644 --- a/compiler/fm/src/file_reader.rs +++ b/compiler/fm/src/file_reader.rs @@ -1,5 +1,5 @@ use rust_embed::RustEmbed; -use std::io::Error; +use std::io::{Error, ErrorKind}; use std::path::Path; // Based on the environment, we either read files using the rust standard library or we @@ -34,8 +34,6 @@ cfg_if::cfg_if! { } pub(crate) fn read_file_to_string(path_to_file: &Path) -> Result { - use std::io::ErrorKind; - let path_str = path_to_file.to_str().unwrap(); match StdLibAssets::get(path_str) { @@ -43,6 +41,10 @@ cfg_if::cfg_if! { Ok(std::str::from_utf8(std_lib_asset.data.as_ref()).unwrap().to_string()) }, + None if is_stdlib_asset(path_to_file) => { + Err(Error::new(ErrorKind::NotFound, "invalid stdlib path")) + } + None => match read_file(path_str) { Ok(buffer) => Ok(buffer), Err(_) => Err(Error::new(ErrorKind::Other, "could not read file using wasm")), @@ -60,6 +62,10 @@ cfg_if::cfg_if! { Ok(std::str::from_utf8(std_lib_asset.data.as_ref()).unwrap().to_string()) }, + None if is_stdlib_asset(path_to_file) => { + Err(Error::new(ErrorKind::NotFound, "invalid stdlib path")) + } + None => std::fs::read_to_string(path_to_file) } diff --git a/compiler/integration-tests/test/integration/browser/compile_prove_verify.test.ts b/compiler/integration-tests/test/integration/browser/compile_prove_verify.test.ts index 5d90bc85fd5..e48a75acb51 100644 --- a/compiler/integration-tests/test/integration/browser/compile_prove_verify.test.ts +++ b/compiler/integration-tests/test/integration/browser/compile_prove_verify.test.ts @@ -12,7 +12,7 @@ import initACVM, { compressWitness, } from "@noir-lang/acvm_js"; -// @ts-ignore +// @ts-ignore import { Barretenberg, RawBuffer, Crs } from '@aztec/bb.js'; import * as TOML from 'smol-toml' @@ -49,11 +49,11 @@ const numberOfThreads = navigator.hardwareConcurrency || 1; let suite = Mocha.Suite.create(mocha.suite, "Noir end to end test"); -suite.timeout(60*10e3);//10mins +suite.timeout(60*20e3);//20mins test_cases.forEach((testInfo) => { const test_name = testInfo.case.split("/").pop(); - const mochaTest = new Mocha.Test(`${test_name} (Compile, Execute, Proove, Verify)`, async () => { + const mochaTest = new Mocha.Test(`${test_name} (Compile, Execute, Prove, Verify)`, async () => { const base_relative_path = "../../../../.."; const test_case = testInfo.case; @@ -67,7 +67,7 @@ test_cases.forEach((testInfo) => { expect(noir_source).to.be.a.string; initialiseResolver((id: String) => { - console.log("Resoving:", id); + console.log("Resolving:", id); return noir_source; }); @@ -132,6 +132,7 @@ test_cases.forEach((testInfo) => { const acirComposer = await api.acirNewAcirComposer(CIRCUIT_SIZE); + // This took ~6.5 minutes! const proof = await api.acirCreateProof( acirComposer, acirUint8Array, @@ -139,7 +140,7 @@ test_cases.forEach((testInfo) => { isRecursive ); - + // And this took ~5 minutes! const verified = await api.acirVerifyProof(acirComposer, proof, isRecursive); expect(verified).to.be.true; diff --git a/compiler/integration-tests/web-test-runner.config.mjs b/compiler/integration-tests/web-test-runner.config.mjs index 8d7b77df63b..1f8b74c3b3b 100644 --- a/compiler/integration-tests/web-test-runner.config.mjs +++ b/compiler/integration-tests/web-test-runner.config.mjs @@ -1,20 +1,19 @@ -import { fileURLToPath } from 'url'; +import { fileURLToPath } from "url"; import { esbuildPlugin } from "@web/dev-server-esbuild"; -import { webdriverLauncher } from '@web/test-runner-webdriver'; +import { webdriverLauncher } from "@web/test-runner-webdriver"; export default { browsers: [ webdriverLauncher({ - automationProtocol: 'webdriver', + automationProtocol: "webdriver", capabilities: { - browserName: 'firefox', - 'moz:firefoxOptions': { - args: ['-headless'], + browserName: "firefox", + "moz:firefoxOptions": { + args: ["-headless"], }, }, }), - -], + ], plugins: [ esbuildPlugin({ ts: true, @@ -27,6 +26,6 @@ export default { ui: "bdd", }, }, - rootDir: fileURLToPath(new URL('./../..', import.meta.url)), - + rootDir: fileURLToPath(new URL("./../..", import.meta.url)), + testsFinishTimeout: 60 * 20e3, // 20 minutes }; diff --git a/compiler/integration-tests/yarn.lock b/compiler/integration-tests/yarn.lock index f15cdd37080..4afb20e071e 100644 --- a/compiler/integration-tests/yarn.lock +++ b/compiler/integration-tests/yarn.lock @@ -231,7 +231,7 @@ fastq "^1.6.0" "@noir-lang/acvm_js@./../../.packages/acvm_js": - version "0.25.0" + version "0.26.0" "@noir-lang/noir-source-resolver@^1.1.4": version "1.1.4" @@ -239,7 +239,7 @@ integrity sha512-jmUcEoRXRSyhPyOqeDGi3E/7rYRVyqiNR0YQkwqH2G/WyvlHL2o2Ltk2N9iYKMNm1la0ri35Nz9OvIeeXjI4wA== "@noir-lang/noir_wasm@./../../.packages/noir_wasm": - version "0.10.5" + version "0.11.1" "@noir-lang/noirc_abi@./../../.packages/noirc_abi_wasm": version "0.8.0" @@ -832,7 +832,7 @@ picomatch "^2.2.2" source-map "^0.7.3" -"@web/test-runner-core@^0.11.0", "@web/test-runner-core@^0.11.1": +"@web/test-runner-core@^0.11.1": version "0.11.4" resolved "https://registry.yarnpkg.com/@web/test-runner-core/-/test-runner-core-0.11.4.tgz#590994c3fc69337e2c5411bf11c293dd061cc07a" integrity sha512-E7BsKAP8FAAEsfj4viASjmuaYfOw4UlKP9IFqo4W20eVyd1nbUWU3Amq4Jksh7W/j811qh3VaFNjDfCwklQXMg== @@ -874,17 +874,6 @@ picomatch "^2.2.2" v8-to-istanbul "^9.0.1" -"@web/test-runner-coverage-v8@^0.7.0": - version "0.7.1" - resolved "https://registry.yarnpkg.com/@web/test-runner-coverage-v8/-/test-runner-coverage-v8-0.7.1.tgz#b09c73f3e49ef6256cb589a5d7b09d1e28aef9b2" - integrity sha512-R0laTOxbLg7kVKHCBILEmja3w1ihlwkB+eRc7J06/ByyZoQVWxkM9SrTAUx7qCFI6o738Jj24a6TfIDbu3CwSA== - dependencies: - "@web/test-runner-core" "^0.11.0" - istanbul-lib-coverage "^3.0.0" - lru-cache "^8.0.4" - picomatch "^2.2.2" - v8-to-istanbul "^9.0.1" - "@web/test-runner-mocha@^0.7.5": version "0.7.5" resolved "https://registry.yarnpkg.com/@web/test-runner-mocha/-/test-runner-mocha-0.7.5.tgz#696f8cb7f5118a72bd7ac5778367ae3bd3fb92cd" @@ -893,15 +882,6 @@ "@types/mocha" "^8.2.0" "@web/test-runner-core" "^0.10.20" -"@web/test-runner-playwright@^0.10.0": - version "0.10.1" - resolved "https://registry.yarnpkg.com/@web/test-runner-playwright/-/test-runner-playwright-0.10.1.tgz#f9fc29dbd771bcb65dcebe826b257d10fb8a8ec5" - integrity sha512-/sEfuKc60UT0gXdn7M6lFddh+nCepO73gLPe2Og7jfoFv2tDkkk41RYBG75jx11RMVOJ6+i1peluLZSVxLlcEg== - dependencies: - "@web/test-runner-core" "^0.11.0" - "@web/test-runner-coverage-v8" "^0.7.0" - playwright "^1.22.2" - "@web/test-runner-webdriver@^0.7.0": version "0.7.0" resolved "https://registry.yarnpkg.com/@web/test-runner-webdriver/-/test-runner-webdriver-0.7.0.tgz#cbe64cddbc84e4a7739f2211b3aa85bbe83bf9cd" @@ -3189,18 +3169,6 @@ picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.3.1: resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== -playwright-core@1.37.1: - version "1.37.1" - resolved "https://registry.yarnpkg.com/playwright-core/-/playwright-core-1.37.1.tgz#cb517d52e2e8cb4fa71957639f1cd105d1683126" - integrity sha512-17EuQxlSIYCmEMwzMqusJ2ztDgJePjrbttaefgdsiqeLWidjYz9BxXaTaZWxH1J95SHGk6tjE+dwgWILJoUZfA== - -playwright@^1.22.2: - version "1.37.1" - resolved "https://registry.yarnpkg.com/playwright/-/playwright-1.37.1.tgz#6e488d82d7d98b9127c5db9c701f9c956ab47e76" - integrity sha512-bgUXRrQKhT48zHdxDYQTpf//0xDfDd5hLeEhjuSw8rXEGoT9YeElpfvs/izonTNY21IQZ7d3s22jLxYaAnubbQ== - dependencies: - playwright-core "1.37.1" - portfinder@^1.0.32: version "1.0.32" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.32.tgz#2fe1b9e58389712429dc2bea5beb2146146c7f81" diff --git a/compiler/wasm/test/browser/index.test.ts b/compiler/wasm/test/browser/index.test.ts index 9cc49069bfd..da00d595aa0 100644 --- a/compiler/wasm/test/browser/index.test.ts +++ b/compiler/wasm/test/browser/index.test.ts @@ -31,5 +31,5 @@ describe("noir wasm compilation", () => { expect(wasmCircuitBase64).to.equal(cliCircuitBase64); - }).timeout(10e3); + }).timeout(20e3); // 20 seconds }); From 16e5e4ddb33209a84e29dc4bea5813baba8bd5f3 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 12 Sep 2023 17:23:54 +0100 Subject: [PATCH 11/17] feat: Compile workspace packages in parallel (#2612) --- Cargo.lock | 1 + tooling/nargo_cli/Cargo.toml | 1 + tooling/nargo_cli/src/cli/compile_cmd.rs | 81 ++++++++++++++---------- 3 files changed, 51 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 563681123ed..e84901d48ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2216,6 +2216,7 @@ dependencies = [ "pprof", "predicates 2.1.5", "prettytable-rs", + "rayon", "rustc_version", "serde", "serde_json", diff --git a/tooling/nargo_cli/Cargo.toml b/tooling/nargo_cli/Cargo.toml index e015e0caefd..ec15975613b 100644 --- a/tooling/nargo_cli/Cargo.toml +++ b/tooling/nargo_cli/Cargo.toml @@ -33,6 +33,7 @@ toml.workspace = true serde.workspace = true serde_json.workspace = true prettytable-rs = "0.10" +rayon = "1.7.0" thiserror.workspace = true tower.workspace = true async-lsp = { version = "0.0.5", default-features = false, features = [ diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 7a06b43937a..61d3b64a47a 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -23,6 +23,7 @@ use super::fs::program::{ save_contract_to_file, save_debug_artifact_to_file, save_program_to_file, }; use super::NargoConfig; +use rayon::prelude::*; // TODO(#1388): pull this from backend. const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; @@ -63,38 +64,54 @@ pub(crate) fn run( let circuit_dir = workspace.target_directory_path(); let (np_language, is_opcode_supported) = backend.get_backend_info()?; - for package in &workspace { - // If `contract` package type, we're compiling every function in a 'contract' rather than just 'main'. - if package.is_contract() { - let (file_manager, compilation_result) = compile_contracts( - package, - &args.compile_options, - np_language, - &is_opcode_supported, - ); - let contracts_with_debug_artifacts = report_errors( - compilation_result, - &file_manager, - args.compile_options.deny_warnings, - )?; - - save_contracts( - contracts_with_debug_artifacts, - package, - &circuit_dir, - args.output_debug, - ); - } else { - let (file_manager, compilation_result) = - compile_program(package, &args.compile_options, np_language, &is_opcode_supported); - - let (program, debug_artifact) = report_errors( - compilation_result, - &file_manager, - args.compile_options.deny_warnings, - )?; - save_program(debug_artifact, program, package, &circuit_dir, args.output_debug); - } + + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace + .members + .iter() + .filter(|package| !package.is_library()) + .partition(|package| package.is_binary()); + + // Compile all of the packages in parallel. + let program_results: Vec<(FileManager, CompilationResult<(CompiledProgram, DebugArtifact)>)> = + binary_packages + .par_iter() + .map(|package| { + compile_program(package, &args.compile_options, np_language, &is_opcode_supported) + }) + .collect(); + #[allow(clippy::type_complexity)] + let contract_results: Vec<( + FileManager, + CompilationResult>, + )> = contract_packages + .par_iter() + .map(|package| { + compile_contracts(package, &args.compile_options, np_language, &is_opcode_supported) + }) + .collect(); + + // Report any warnings/errors which were encountered during compilation. + let compiled_programs: Vec<(CompiledProgram, DebugArtifact)> = program_results + .into_iter() + .map(|(file_manager, compilation_result)| { + report_errors(compilation_result, &file_manager, args.compile_options.deny_warnings) + }) + .collect::>()?; + let compiled_contracts: Vec> = contract_results + .into_iter() + .map(|(file_manager, compilation_result)| { + report_errors(compilation_result, &file_manager, args.compile_options.deny_warnings) + }) + .collect::>()?; + + // Save build artifacts to disk. + for (package, (program, debug_artifact)) in binary_packages.into_iter().zip(compiled_programs) { + save_program(debug_artifact, program, package, &circuit_dir, args.output_debug); + } + for (package, contracts_with_debug_artifacts) in + contract_packages.into_iter().zip(compiled_contracts) + { + save_contracts(contracts_with_debug_artifacts, package, &circuit_dir, args.output_debug); } Ok(()) From 4bc42eb9d0fff458196a053038e80b328fb9fcdf Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 12 Sep 2023 20:22:03 +0100 Subject: [PATCH 12/17] chore: Embed a file map into `CompiledProgram`/`CompiledContract` (#2666) --- compiler/noirc_driver/Cargo.toml | 2 +- compiler/noirc_driver/src/contract.rs | 11 ++- compiler/noirc_driver/src/debug.rs | 45 +++++++++++ compiler/noirc_driver/src/lib.rs | 12 ++- compiler/noirc_driver/src/program.rs | 6 ++ tooling/nargo/src/artifacts/debug.rs | 10 +-- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 2 +- tooling/nargo_cli/src/cli/compile_cmd.rs | 79 +++++++++---------- tooling/nargo_cli/src/cli/execute_cmd.rs | 14 ++-- tooling/nargo_cli/src/cli/info_cmd.rs | 4 +- tooling/nargo_cli/src/cli/prove_cmd.rs | 6 +- tooling/nargo_cli/src/cli/verify_cmd.rs | 2 +- 12 files changed, 122 insertions(+), 71 deletions(-) create mode 100644 compiler/noirc_driver/src/debug.rs diff --git a/compiler/noirc_driver/Cargo.toml b/compiler/noirc_driver/Cargo.toml index 2afc7a4cb53..bd38371f2ad 100644 --- a/compiler/noirc_driver/Cargo.toml +++ b/compiler/noirc_driver/Cargo.toml @@ -15,4 +15,4 @@ noirc_abi.workspace = true acvm.workspace = true fm.workspace = true serde.workspace = true -base64.workspace = true \ No newline at end of file +base64.workspace = true diff --git a/compiler/noirc_driver/src/contract.rs b/compiler/noirc_driver/src/contract.rs index a1820ff2e47..69a92764318 100644 --- a/compiler/noirc_driver/src/contract.rs +++ b/compiler/noirc_driver/src/contract.rs @@ -1,8 +1,13 @@ -use crate::program::{deserialize_circuit, serialize_circuit}; +use serde::{Deserialize, Serialize}; +use std::collections::BTreeMap; + use acvm::acir::circuit::Circuit; +use fm::FileId; use noirc_abi::Abi; use noirc_errors::debug_info::DebugInfo; -use serde::{Deserialize, Serialize}; + +use super::debug::DebugFile; +use crate::program::{deserialize_circuit, serialize_circuit}; /// Describes the types of smart contract functions that are allowed. /// Unlike the similar enum in noirc_frontend, 'open' and 'unconstrained' @@ -28,6 +33,8 @@ pub struct CompiledContract { /// Each of the contract's functions are compiled into a separate `CompiledProgram` /// stored in this `Vector`. pub functions: Vec, + + pub file_map: BTreeMap, } /// Each function in the contract will be compiled diff --git a/compiler/noirc_driver/src/debug.rs b/compiler/noirc_driver/src/debug.rs new file mode 100644 index 00000000000..9808c9b54a2 --- /dev/null +++ b/compiler/noirc_driver/src/debug.rs @@ -0,0 +1,45 @@ +use fm::{FileId, FileManager}; +use noirc_errors::debug_info::DebugInfo; +use serde::{Deserialize, Serialize}; +use std::{ + collections::{BTreeMap, BTreeSet}, + path::PathBuf, +}; + +/// For a given file, we store the source code and the path to the file +/// so consumers of the debug artifact can reconstruct the original source code structure. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct DebugFile { + pub source: String, + pub path: PathBuf, +} + +pub(crate) fn filter_relevant_files( + debug_symbols: &[DebugInfo], + file_manager: &FileManager, +) -> BTreeMap { + let files_with_debug_symbols: BTreeSet = debug_symbols + .iter() + .flat_map(|function_symbols| { + function_symbols + .locations + .values() + .filter_map(|call_stack| call_stack.last().map(|location| location.file)) + }) + .collect(); + + let mut file_map = BTreeMap::new(); + + for file_id in files_with_debug_symbols { + let file_source = file_manager.fetch_file(file_id).source(); + + file_map.insert( + file_id, + DebugFile { + source: file_source.to_string(), + path: file_manager.path(file_id).to_path_buf(), + }, + ); + } + file_map +} diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index a608879ce77..ec569e0e182 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -4,6 +4,7 @@ #![warn(clippy::semicolon_if_nothing_returned)] use clap::Args; +use debug::filter_relevant_files; use fm::FileId; use noirc_abi::{AbiParameter, AbiType}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; @@ -17,9 +18,11 @@ use serde::{Deserialize, Serialize}; use std::path::Path; mod contract; +mod debug; mod program; pub use contract::{CompiledContract, ContractFunction, ContractFunctionType}; +pub use debug::DebugFile; pub use program::CompiledProgram; const STD_CRATE_NAME: &str = "std"; @@ -248,7 +251,10 @@ fn compile_contract( } if errors.is_empty() { - Ok(CompiledContract { name: contract.name, functions }) + let debug_infos: Vec<_> = functions.iter().map(|function| function.debug.clone()).collect(); + let file_map = filter_relevant_files(&debug_infos, &context.file_manager); + + Ok(CompiledContract { name: contract.name, functions, file_map }) } else { Err(errors) } @@ -269,5 +275,7 @@ pub fn compile_no_check( let (circuit, debug, abi) = create_circuit(context, program, options.show_ssa, options.show_brillig)?; - Ok(CompiledProgram { circuit, debug, abi }) + let file_map = filter_relevant_files(&[debug.clone()], &context.file_manager); + + Ok(CompiledProgram { circuit, debug, abi, file_map }) } diff --git a/compiler/noirc_driver/src/program.rs b/compiler/noirc_driver/src/program.rs index 9323f90d522..1ed2b0ddddc 100644 --- a/compiler/noirc_driver/src/program.rs +++ b/compiler/noirc_driver/src/program.rs @@ -1,15 +1,21 @@ +use std::collections::BTreeMap; + use acvm::acir::circuit::Circuit; +use fm::FileId; use base64::Engine; use noirc_errors::debug_info::DebugInfo; use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use super::debug::DebugFile; + #[derive(Debug, Serialize, Deserialize, Clone)] pub struct CompiledProgram { #[serde(serialize_with = "serialize_circuit", deserialize_with = "deserialize_circuit")] pub circuit: Circuit, pub abi: noirc_abi::Abi, pub debug: DebugInfo, + pub file_map: BTreeMap, } pub(crate) fn serialize_circuit(circuit: &Circuit, s: S) -> Result diff --git a/tooling/nargo/src/artifacts/debug.rs b/tooling/nargo/src/artifacts/debug.rs index 2a201a82c48..3c173f34876 100644 --- a/tooling/nargo/src/artifacts/debug.rs +++ b/tooling/nargo/src/artifacts/debug.rs @@ -1,22 +1,14 @@ use codespan_reporting::files::{Error, Files, SimpleFile}; +use noirc_driver::DebugFile; use noirc_errors::debug_info::DebugInfo; use serde::{Deserialize, Serialize}; use std::{ collections::{BTreeMap, BTreeSet}, ops::Range, - path::PathBuf, }; use fm::{FileId, FileManager, PathString}; -/// For a given file, we store the source code and the path to the file -/// so consumers of the debug artifact can reconstruct the original source code structure. -#[derive(Debug, Serialize, Deserialize)] -pub struct DebugFile { - pub source: String, - pub path: PathBuf, -} - /// A Debug Artifact stores, for a given program, the debug info for every function /// along with a map of file Id to the source code so locations in debug info can be mapped to source code they point to. #[derive(Debug, Serialize, Deserialize)] diff --git a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 6199bf0761d..16ff311f704 100644 --- a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -81,7 +81,7 @@ fn smart_contract_for_package( let preprocessed_program = if circuit_build_path.exists() { read_program_from_file(circuit_build_path)? } else { - let (program, _) = + let program = compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; PreprocessedProgram { diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 61d3b64a47a..d979cfe5efd 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -72,32 +72,28 @@ pub(crate) fn run( .partition(|package| package.is_binary()); // Compile all of the packages in parallel. - let program_results: Vec<(FileManager, CompilationResult<(CompiledProgram, DebugArtifact)>)> = - binary_packages - .par_iter() - .map(|package| { - compile_program(package, &args.compile_options, np_language, &is_opcode_supported) - }) - .collect(); - #[allow(clippy::type_complexity)] - let contract_results: Vec<( - FileManager, - CompilationResult>, - )> = contract_packages + let program_results: Vec<(FileManager, CompilationResult)> = binary_packages .par_iter() .map(|package| { - compile_contracts(package, &args.compile_options, np_language, &is_opcode_supported) + compile_program(package, &args.compile_options, np_language, &is_opcode_supported) }) .collect(); + let contract_results: Vec<(FileManager, CompilationResult>)> = + contract_packages + .par_iter() + .map(|package| { + compile_contracts(package, &args.compile_options, np_language, &is_opcode_supported) + }) + .collect(); // Report any warnings/errors which were encountered during compilation. - let compiled_programs: Vec<(CompiledProgram, DebugArtifact)> = program_results + let compiled_programs: Vec = program_results .into_iter() .map(|(file_manager, compilation_result)| { report_errors(compilation_result, &file_manager, args.compile_options.deny_warnings) }) .collect::>()?; - let compiled_contracts: Vec> = contract_results + let compiled_contracts: Vec> = contract_results .into_iter() .map(|(file_manager, compilation_result)| { report_errors(compilation_result, &file_manager, args.compile_options.deny_warnings) @@ -105,13 +101,11 @@ pub(crate) fn run( .collect::>()?; // Save build artifacts to disk. - for (package, (program, debug_artifact)) in binary_packages.into_iter().zip(compiled_programs) { - save_program(debug_artifact, program, package, &circuit_dir, args.output_debug); + for (package, program) in binary_packages.into_iter().zip(compiled_programs) { + save_program(program, package, &circuit_dir, args.output_debug); } - for (package, contracts_with_debug_artifacts) in - contract_packages.into_iter().zip(compiled_contracts) - { - save_contracts(contracts_with_debug_artifacts, package, &circuit_dir, args.output_debug); + for (package, compiled_contracts) in contract_packages.into_iter().zip(compiled_contracts) { + save_contracts(compiled_contracts, package, &circuit_dir, args.output_debug); } Ok(()) @@ -122,7 +116,7 @@ pub(crate) fn compile_bin_package( compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result<(CompiledProgram, DebugArtifact), CliError> { +) -> Result { if package.is_library() { return Err(CompileError::LibraryCrate(package.name.clone()).into()); } @@ -130,10 +124,9 @@ pub(crate) fn compile_bin_package( let (file_manager, compilation_result) = compile_program(package, compile_options, np_language, &is_opcode_supported); - let (program, debug_artifact) = - report_errors(compilation_result, &file_manager, compile_options.deny_warnings)?; + let program = report_errors(compilation_result, &file_manager, compile_options.deny_warnings)?; - Ok((program, debug_artifact)) + Ok(program) } pub(crate) fn compile_contract_package( @@ -141,7 +134,7 @@ pub(crate) fn compile_contract_package( compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result, CliError> { +) -> Result, CliError> { let (file_manager, compilation_result) = compile_contracts(package, compile_options, np_language, &is_opcode_supported); let contracts_with_debug_artifacts = @@ -154,7 +147,7 @@ fn compile_program( compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult<(CompiledProgram, DebugArtifact)>) { +) -> (FileManager, CompilationResult) { let (mut context, crate_id) = prepare_package(package); let (program, warnings) = @@ -170,10 +163,7 @@ fn compile_program( nargo::ops::optimize_program(program, np_language, &is_opcode_supported) .expect("Backend does not support an opcode that is in the IR"); - let debug_artifact = - DebugArtifact::new(vec![optimized_program.debug.clone()], &context.file_manager); - - (context.file_manager, Ok(((optimized_program, debug_artifact), warnings))) + (context.file_manager, Ok((optimized_program, warnings))) } fn compile_contracts( @@ -181,7 +171,7 @@ fn compile_contracts( compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult>) { +) -> (FileManager, CompilationResult>) { let (mut context, crate_id) = prepare_package(package); let (contracts, warnings) = match noirc_driver::compile_contracts(&mut context, crate_id, compile_options) { @@ -196,18 +186,10 @@ fn compile_contracts( }) .expect("Backend does not support an opcode that is in the IR"); - let contracts_with_debug_artifacts = vecmap(optimized_contracts, |contract| { - let debug_infos = vecmap(&contract.functions, |func| func.debug.clone()); - let debug_artifact = DebugArtifact::new(debug_infos, &context.file_manager); - - (contract, debug_artifact) - }); - - (context.file_manager, Ok((contracts_with_debug_artifacts, warnings))) + (context.file_manager, Ok((optimized_contracts, warnings))) } fn save_program( - debug_artifact: DebugArtifact, program: CompiledProgram, package: &Package, circuit_dir: &Path, @@ -222,13 +204,15 @@ fn save_program( save_program_to_file(&preprocessed_program, &package.name, circuit_dir); if output_debug { + let debug_artifact = + DebugArtifact { debug_symbols: vec![program.debug], file_map: program.file_map }; let circuit_name: String = (&package.name).into(); save_debug_artifact_to_file(&debug_artifact, &circuit_name, circuit_dir); } } fn save_contracts( - contracts: Vec<(CompiledContract, DebugArtifact)>, + contracts: Vec, package: &Package, circuit_dir: &Path, output_debug: bool, @@ -238,7 +222,16 @@ fn save_contracts( // are compiled via nargo-core and then the PreprocessedContract is constructed here. // This is due to EACH function needing it's own CRS, PKey, and VKey from the backend. let preprocessed_contracts: Vec<(PreprocessedContract, DebugArtifact)> = - vecmap(contracts, |(contract, debug_artifact)| { + vecmap(contracts, |contract| { + let debug_artifact = DebugArtifact { + debug_symbols: contract + .functions + .iter() + .map(|function| function.debug.clone()) + .collect(), + file_map: contract.file_map, + }; + let preprocessed_functions = vecmap(contract.functions, |func| PreprocessedContractFunction { name: func.name, diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index a08cfb09995..8c434f8fe21 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -56,15 +56,11 @@ pub(crate) fn run( let (np_language, is_opcode_supported) = backend.get_backend_info()?; for package in &workspace { - let (compiled_program, debug_artifact) = + let compiled_program = compile_bin_package(package, &args.compile_options, np_language, &is_opcode_supported)?; - let (return_value, solved_witness) = execute_program_and_decode( - compiled_program, - debug_artifact, - package, - &args.prover_name, - )?; + let (return_value, solved_witness) = + execute_program_and_decode(compiled_program, package, &args.prover_name)?; println!("[{}] Circuit witness successfully solved", package.name); if let Some(return_value) = return_value { @@ -81,11 +77,11 @@ pub(crate) fn run( fn execute_program_and_decode( program: CompiledProgram, - debug_artifact: DebugArtifact, package: &Package, prover_name: &str, ) -> Result<(Option, WitnessMap), CliError> { - let CompiledProgram { abi, circuit, .. } = program; + let CompiledProgram { abi, circuit, debug, file_map } = program; + let debug_artifact = DebugArtifact { debug_symbols: vec![debug], file_map }; // Parse the initial witness values from Prover.toml let (inputs_map, _) = diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index e51a0256426..49ae1327c5a 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -174,7 +174,7 @@ fn count_opcodes_and_gates_in_program( np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> Result { - let (compiled_program, _) = + let compiled_program = compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; let (language, _) = backend.get_backend_info()?; @@ -197,7 +197,7 @@ fn count_opcodes_and_gates_in_contracts( compile_contract_package(package, compile_options, np_language, &is_opcode_supported)?; let (language, _) = backend.get_backend_info()?; - try_vecmap(contracts, |(contract, _)| { + try_vecmap(contracts, |contract| { let functions = try_vecmap(contract.functions, |function| -> Result<_, BackendError> { Ok(FunctionInfo { name: function.name, diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs index c451b78add5..03146d3919c 100644 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -3,6 +3,7 @@ use std::path::{Path, PathBuf}; use acvm::acir::circuit::Opcode; use acvm::Language; use clap::Args; +use nargo::artifacts::debug::DebugArtifact; use nargo::artifacts::program::PreprocessedProgram; use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; use nargo::package::Package; @@ -101,13 +102,16 @@ pub(crate) fn prove_package( (program, None) } else { - let (program, debug_artifact) = + let program = compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; let preprocessed_program = PreprocessedProgram { backend: String::from(BACKEND_IDENTIFIER), abi: program.abi, bytecode: program.circuit, }; + let debug_artifact = + DebugArtifact { debug_symbols: vec![program.debug], file_map: program.file_map }; + (preprocessed_program, Some(debug_artifact)) }; diff --git a/tooling/nargo_cli/src/cli/verify_cmd.rs b/tooling/nargo_cli/src/cli/verify_cmd.rs index 9d1a98da4da..452d58ff667 100644 --- a/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -85,7 +85,7 @@ fn verify_package( let preprocessed_program = if circuit_build_path.exists() { read_program_from_file(circuit_build_path)? } else { - let (program, _) = + let program = compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; PreprocessedProgram { From dc3358b7e12ba25bedf3aa47a82b2e994a41e8c0 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 12 Sep 2023 22:10:30 +0100 Subject: [PATCH 13/17] chore!: Restrict packages to contain at most a single contract (#2668) --- Cargo.lock | 1 - compiler/noirc_driver/src/lib.rs | 34 ++++-- compiler/wasm/Cargo.toml | 1 - compiler/wasm/src/compile.rs | 19 ++- tooling/nargo_cli/src/cli/compile_cmd.rs | 111 ++++++++---------- tooling/nargo_cli/src/cli/info_cmd.rs | 24 ++-- .../multiple_contracts/Nargo.toml | 7 ++ .../multiple_contracts/src/main.nr | 4 + 8 files changed, 99 insertions(+), 102 deletions(-) create mode 100644 tooling/nargo_cli/tests/compile_failure/multiple_contracts/Nargo.toml create mode 100644 tooling/nargo_cli/tests/compile_failure/multiple_contracts/src/main.nr diff --git a/Cargo.lock b/Cargo.lock index e84901d48ae..0486220f416 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2287,7 +2287,6 @@ dependencies = [ "fm", "getrandom", "gloo-utils", - "iter-extended", "log", "nargo", "noirc_driver", diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index ec569e0e182..1b627adb3e4 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -169,20 +169,27 @@ pub fn compile_main( } /// Run the frontend to check the crate for errors then compile all contracts if there were none -pub fn compile_contracts( +pub fn compile_contract( context: &mut Context, crate_id: CrateId, options: &CompileOptions, -) -> CompilationResult> { +) -> CompilationResult { let (_, warnings) = check_crate(context, crate_id, options.deny_warnings)?; // TODO: We probably want to error if contracts is empty let contracts = context.get_all_contracts(&crate_id); + let mut compiled_contracts = vec![]; let mut errors = warnings; + if contracts.len() > 1 { + let err = CustomDiagnostic::from_message("Packages are limited to a single contract") + .in_file(FileId::default()); + return Err(vec![err]); + }; + for contract in contracts { - match compile_contract(context, contract, options) { + match compile_contract_inner(context, contract, options) { Ok(contract) => compiled_contracts.push(contract), Err(mut more_errors) => errors.append(&mut more_errors), } @@ -191,19 +198,20 @@ pub fn compile_contracts( if has_errors(&errors, options.deny_warnings) { Err(errors) } else { + assert_eq!(compiled_contracts.len(), 1); + let compiled_contract = compiled_contracts.remove(0); + if options.print_acir { - for compiled_contract in &compiled_contracts { - for contract_function in &compiled_contract.functions { - println!( - "Compiled ACIR for {}::{} (unoptimized):", - compiled_contract.name, contract_function.name - ); - println!("{}", contract_function.bytecode); - } + for contract_function in &compiled_contract.functions { + println!( + "Compiled ACIR for {}::{} (unoptimized):", + compiled_contract.name, contract_function.name + ); + println!("{}", contract_function.bytecode); } } // errors here is either empty or contains only warnings - Ok((compiled_contracts, errors)) + Ok((compiled_contract, errors)) } } @@ -217,7 +225,7 @@ fn has_errors(errors: &[FileDiagnostic], deny_warnings: bool) -> bool { } /// Compile all of the functions associated with a Noir contract. -fn compile_contract( +fn compile_contract_inner( context: &Context, contract: Contract, options: &CompileOptions, diff --git a/compiler/wasm/Cargo.toml b/compiler/wasm/Cargo.toml index 1049dc92f47..c6126818434 100644 --- a/compiler/wasm/Cargo.toml +++ b/compiler/wasm/Cargo.toml @@ -13,7 +13,6 @@ crate-type = ["cdylib"] [dependencies] acvm.workspace = true fm.workspace = true -iter-extended.workspace = true nargo.workspace = true noirc_driver.workspace = true noirc_frontend.workspace = true diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index f610ff7e79f..c98e586ab45 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -1,9 +1,8 @@ use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; -use iter_extended::try_vecmap; use log::debug; use noirc_driver::{ - add_dep, compile_contracts, compile_main, prepare_crate, prepare_dependency, CompileOptions, + add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, }; use noirc_frontend::{graph::CrateGraph, hir::Context}; use serde::{Deserialize, Serialize}; @@ -116,17 +115,15 @@ pub fn compile(args: JsValue) -> JsValue { let is_opcode_supported = acvm::pwg::default_is_opcode_supported(np_language); if options.contracts { - let compiled_contracts = - compile_contracts(&mut context, crate_id, &options.compile_options) - .expect("Contract compilation failed") - .0; + let compiled_contract = compile_contract(&mut context, crate_id, &options.compile_options) + .expect("Contract compilation failed") + .0; - let optimized_contracts = try_vecmap(compiled_contracts, |contract| { - nargo::ops::optimize_contract(contract, np_language, &is_opcode_supported) - }) - .expect("Contract optimization failed"); + let optimized_contract = + nargo::ops::optimize_contract(compiled_contract, np_language, &is_opcode_supported) + .expect("Contract optimization failed"); - ::from_serde(&optimized_contracts).unwrap() + ::from_serde(&optimized_contract).unwrap() } else { let compiled_program = compile_main(&mut context, crate_id, &options.compile_options) .expect("Compilation failed") diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index d979cfe5efd..0574dfdf768 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -3,7 +3,7 @@ use std::path::Path; use acvm::acir::circuit::Opcode; use acvm::Language; use fm::FileManager; -use iter_extended::{try_vecmap, vecmap}; +use iter_extended::vecmap; use nargo::artifacts::contract::PreprocessedContract; use nargo::artifacts::contract::PreprocessedContractFunction; use nargo::artifacts::debug::DebugArtifact; @@ -78,11 +78,11 @@ pub(crate) fn run( compile_program(package, &args.compile_options, np_language, &is_opcode_supported) }) .collect(); - let contract_results: Vec<(FileManager, CompilationResult>)> = + let contract_results: Vec<(FileManager, CompilationResult)> = contract_packages .par_iter() .map(|package| { - compile_contracts(package, &args.compile_options, np_language, &is_opcode_supported) + compile_contract(package, &args.compile_options, np_language, &is_opcode_supported) }) .collect(); @@ -93,7 +93,7 @@ pub(crate) fn run( report_errors(compilation_result, &file_manager, args.compile_options.deny_warnings) }) .collect::>()?; - let compiled_contracts: Vec> = contract_results + let compiled_contracts: Vec = contract_results .into_iter() .map(|(file_manager, compilation_result)| { report_errors(compilation_result, &file_manager, args.compile_options.deny_warnings) @@ -104,8 +104,8 @@ pub(crate) fn run( for (package, program) in binary_packages.into_iter().zip(compiled_programs) { save_program(program, package, &circuit_dir, args.output_debug); } - for (package, compiled_contracts) in contract_packages.into_iter().zip(compiled_contracts) { - save_contracts(compiled_contracts, package, &circuit_dir, args.output_debug); + for (package, compiled_contract) in contract_packages.into_iter().zip(compiled_contracts) { + save_contract(compiled_contract, package, &circuit_dir, args.output_debug); } Ok(()) @@ -134,12 +134,12 @@ pub(crate) fn compile_contract_package( compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result, CliError> { +) -> Result { let (file_manager, compilation_result) = - compile_contracts(package, compile_options, np_language, &is_opcode_supported); - let contracts_with_debug_artifacts = + compile_contract(package, compile_options, np_language, &is_opcode_supported); + let contract_and_debug_artifact = report_errors(compilation_result, &file_manager, compile_options.deny_warnings)?; - Ok(contracts_with_debug_artifacts) + Ok(contract_and_debug_artifact) } fn compile_program( @@ -166,27 +166,26 @@ fn compile_program( (context.file_manager, Ok((optimized_program, warnings))) } -fn compile_contracts( +fn compile_contract( package: &Package, compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult>) { +) -> (FileManager, CompilationResult) { let (mut context, crate_id) = prepare_package(package); - let (contracts, warnings) = - match noirc_driver::compile_contracts(&mut context, crate_id, compile_options) { + let (contract, warnings) = + match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { Ok(contracts_and_warnings) => contracts_and_warnings, Err(errors) => { return (context.file_manager, Err(errors)); } }; - let optimized_contracts = try_vecmap(contracts, |contract| { + let optimized_contract = nargo::ops::optimize_contract(contract, np_language, &is_opcode_supported) - }) - .expect("Backend does not support an opcode that is in the IR"); + .expect("Backend does not support an opcode that is in the IR"); - (context.file_manager, Ok((optimized_contracts, warnings))) + (context.file_manager, Ok((optimized_contract, warnings))) } fn save_program( @@ -211,8 +210,8 @@ fn save_program( } } -fn save_contracts( - contracts: Vec, +fn save_contract( + contract: CompiledContract, package: &Package, circuit_dir: &Path, output_debug: bool, @@ -221,51 +220,37 @@ fn save_contracts( // As can be seen here, It seems like a leaky abstraction where ContractFunctions (essentially CompiledPrograms) // are compiled via nargo-core and then the PreprocessedContract is constructed here. // This is due to EACH function needing it's own CRS, PKey, and VKey from the backend. - let preprocessed_contracts: Vec<(PreprocessedContract, DebugArtifact)> = - vecmap(contracts, |contract| { - let debug_artifact = DebugArtifact { - debug_symbols: contract - .functions - .iter() - .map(|function| function.debug.clone()) - .collect(), - file_map: contract.file_map, - }; - - let preprocessed_functions = - vecmap(contract.functions, |func| PreprocessedContractFunction { - name: func.name, - function_type: func.function_type, - is_internal: func.is_internal, - abi: func.abi, - - bytecode: func.bytecode, - }); - - ( - PreprocessedContract { - name: contract.name, - backend: String::from(BACKEND_IDENTIFIER), - functions: preprocessed_functions, - }, - debug_artifact, - ) - }); - - for (contract, debug_artifact) in preprocessed_contracts { - save_contract_to_file( - &contract, - &format!("{}-{}", package.name, contract.name), + let debug_artifact = DebugArtifact { + debug_symbols: contract.functions.iter().map(|function| function.debug.clone()).collect(), + file_map: contract.file_map, + }; + + let preprocessed_functions = vecmap(contract.functions, |func| PreprocessedContractFunction { + name: func.name, + function_type: func.function_type, + is_internal: func.is_internal, + abi: func.abi, + bytecode: func.bytecode, + }); + + let preprocessed_contract = PreprocessedContract { + name: contract.name, + backend: String::from(BACKEND_IDENTIFIER), + functions: preprocessed_functions, + }; + + save_contract_to_file( + &preprocessed_contract, + &format!("{}-{}", package.name, preprocessed_contract.name), + circuit_dir, + ); + + if output_debug { + save_debug_artifact_to_file( + &debug_artifact, + &format!("{}-{}", package.name, preprocessed_contract.name), circuit_dir, ); - - if output_debug { - save_debug_artifact_to_file( - &debug_artifact, - &format!("{}-{}", package.name, contract.name), - circuit_dir, - ); - } } } diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index 49ae1327c5a..3359a61aa76 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -64,7 +64,7 @@ pub(crate) fn run( np_language, &is_opcode_supported, )?; - info_report.contracts.extend(contract_info); + info_report.contracts.push(contract_info); } else { let program_info = count_opcodes_and_gates_in_program( backend, @@ -192,20 +192,18 @@ fn count_opcodes_and_gates_in_contracts( compile_options: &CompileOptions, np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result, CliError> { - let contracts = +) -> Result { + let contract = compile_contract_package(package, compile_options, np_language, &is_opcode_supported)?; let (language, _) = backend.get_backend_info()?; - try_vecmap(contracts, |contract| { - let functions = try_vecmap(contract.functions, |function| -> Result<_, BackendError> { - Ok(FunctionInfo { - name: function.name, - acir_opcodes: function.bytecode.opcodes.len(), - circuit_size: backend.get_exact_circuit_size(&function.bytecode)?, - }) - })?; + let functions = try_vecmap(contract.functions, |function| -> Result<_, BackendError> { + Ok(FunctionInfo { + name: function.name, + acir_opcodes: function.bytecode.opcodes.len(), + circuit_size: backend.get_exact_circuit_size(&function.bytecode)?, + }) + })?; - Ok(ContractInfo { name: contract.name, language, functions }) - }) + Ok(ContractInfo { name: contract.name, language, functions }) } diff --git a/tooling/nargo_cli/tests/compile_failure/multiple_contracts/Nargo.toml b/tooling/nargo_cli/tests/compile_failure/multiple_contracts/Nargo.toml new file mode 100644 index 00000000000..c71c86c664b --- /dev/null +++ b/tooling/nargo_cli/tests/compile_failure/multiple_contracts/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "multiple_contracts" +type = "contract" +authors = [""] +compiler_version = "0.9.0" + +[dependencies] diff --git a/tooling/nargo_cli/tests/compile_failure/multiple_contracts/src/main.nr b/tooling/nargo_cli/tests/compile_failure/multiple_contracts/src/main.nr new file mode 100644 index 00000000000..0562ca9ccd5 --- /dev/null +++ b/tooling/nargo_cli/tests/compile_failure/multiple_contracts/src/main.nr @@ -0,0 +1,4 @@ +contract Foo {} + + +contract Bar {} From c9b710f2918e69ad2cabd237f82c17764f9e121d Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 13 Sep 2023 01:42:12 +0100 Subject: [PATCH 14/17] chore(ci): use mock backend for all tests (#2670) --- tooling/acvm_backend_barretenberg/src/cli/info.rs | 8 +------- .../test-binaries/mock_backend/src/info_cmd.rs | 1 + tooling/nargo_cli/build.rs | 3 +++ 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/tooling/acvm_backend_barretenberg/src/cli/info.rs b/tooling/acvm_backend_barretenberg/src/cli/info.rs index 680d9569030..5d9e662b6b7 100644 --- a/tooling/acvm_backend_barretenberg/src/cli/info.rs +++ b/tooling/acvm_backend_barretenberg/src/cli/info.rs @@ -72,8 +72,7 @@ impl InfoCommand { #[test] fn info_command() -> Result<(), BackendError> { - use acvm::acir::circuit::black_box_functions::BlackBoxFunc; - use acvm::acir::circuit::opcodes::{BlackBoxFuncCall, Opcode}; + use acvm::acir::circuit::opcodes::Opcode; use acvm::acir::native_types::Expression; @@ -85,10 +84,5 @@ fn info_command() -> Result<(), BackendError> { assert!(matches!(language, Language::PLONKCSat { width: 3 })); assert!(is_opcode_supported(&Opcode::Arithmetic(Expression::default()))); - assert!(!is_opcode_supported(&Opcode::BlackBoxFuncCall( - #[allow(deprecated)] - BlackBoxFuncCall::dummy(BlackBoxFunc::Keccak256) - ))); - Ok(()) } diff --git a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/info_cmd.rs b/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/info_cmd.rs index e3314c10911..043cef5934c 100644 --- a/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/info_cmd.rs +++ b/tooling/acvm_backend_barretenberg/test-binaries/mock_backend/src/info_cmd.rs @@ -14,6 +14,7 @@ const INFO_RESPONSE: &str = r#"{ "range", "sha256", "blake2s", + "keccak256", "schnorr_verify", "pedersen", "hash_to_field_128_security", diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index ad8988fba8e..ff941e41f36 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -70,6 +70,7 @@ fn execution_success_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); + cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("execute"); @@ -158,6 +159,7 @@ fn compile_success_contract_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); + cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("compile"); @@ -195,6 +197,7 @@ fn compile_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); + cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("execute"); From f173c05cbff96dfc48a22cc2f1f76396b968d5a0 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 13 Sep 2023 01:43:43 +0100 Subject: [PATCH 15/17] feat: compile circuits and query circuit sizes in parallel for `nargo info` (#2665) --- tooling/nargo_cli/src/cli/compile_cmd.rs | 61 +++++++-------- tooling/nargo_cli/src/cli/info_cmd.rs | 96 +++++++++++------------- 2 files changed, 72 insertions(+), 85 deletions(-) diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 0574dfdf768..c769cb68ba5 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -63,26 +63,44 @@ pub(crate) fn run( let workspace = resolve_workspace_from_toml(&toml_path, selection)?; let circuit_dir = workspace.target_directory_path(); - let (np_language, is_opcode_supported) = backend.get_backend_info()?; - let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace - .members - .iter() + .into_iter() .filter(|package| !package.is_library()) + .cloned() .partition(|package| package.is_binary()); + let (compiled_programs, compiled_contracts) = + compile_workspace(backend, &binary_packages, &contract_packages, &args.compile_options)?; + + // Save build artifacts to disk. + for (package, program) in binary_packages.into_iter().zip(compiled_programs) { + save_program(program, &package, &circuit_dir, args.output_debug); + } + for (package, contract) in contract_packages.into_iter().zip(compiled_contracts) { + save_contract(contract, &package, &circuit_dir, args.output_debug); + } + + Ok(()) +} + +pub(super) fn compile_workspace( + backend: &Backend, + binary_packages: &[Package], + contract_packages: &[Package], + compile_options: &CompileOptions, +) -> Result<(Vec, Vec), CliError> { + let (np_language, is_opcode_supported) = backend.get_backend_info()?; + // Compile all of the packages in parallel. let program_results: Vec<(FileManager, CompilationResult)> = binary_packages .par_iter() - .map(|package| { - compile_program(package, &args.compile_options, np_language, &is_opcode_supported) - }) + .map(|package| compile_program(package, compile_options, np_language, &is_opcode_supported)) .collect(); let contract_results: Vec<(FileManager, CompilationResult)> = contract_packages .par_iter() .map(|package| { - compile_contract(package, &args.compile_options, np_language, &is_opcode_supported) + compile_contract(package, compile_options, np_language, &is_opcode_supported) }) .collect(); @@ -90,25 +108,17 @@ pub(crate) fn run( let compiled_programs: Vec = program_results .into_iter() .map(|(file_manager, compilation_result)| { - report_errors(compilation_result, &file_manager, args.compile_options.deny_warnings) + report_errors(compilation_result, &file_manager, compile_options.deny_warnings) }) .collect::>()?; let compiled_contracts: Vec = contract_results .into_iter() .map(|(file_manager, compilation_result)| { - report_errors(compilation_result, &file_manager, args.compile_options.deny_warnings) + report_errors(compilation_result, &file_manager, compile_options.deny_warnings) }) .collect::>()?; - // Save build artifacts to disk. - for (package, program) in binary_packages.into_iter().zip(compiled_programs) { - save_program(program, package, &circuit_dir, args.output_debug); - } - for (package, compiled_contract) in contract_packages.into_iter().zip(compiled_contracts) { - save_contract(compiled_contract, package, &circuit_dir, args.output_debug); - } - - Ok(()) + Ok((compiled_programs, compiled_contracts)) } pub(crate) fn compile_bin_package( @@ -129,19 +139,6 @@ pub(crate) fn compile_bin_package( Ok(program) } -pub(crate) fn compile_contract_package( - package: &Package, - compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result { - let (file_manager, compilation_result) = - compile_contract(package, compile_options, np_language, &is_opcode_supported); - let contract_and_debug_artifact = - report_errors(compilation_result, &file_manager, compile_options.deny_warnings)?; - Ok(contract_and_debug_artifact) -} - fn compile_program( package: &Package, compile_options: &CompileOptions, diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index 3359a61aa76..ffa522d25b4 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,22 +1,19 @@ -use acvm::acir::circuit::Opcode; use acvm::Language; use acvm_backend_barretenberg::BackendError; use clap::Args; -use iter_extended::{try_vecmap, vecmap}; +use iter_extended::vecmap; use nargo::package::Package; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::CompileOptions; +use noirc_driver::{CompileOptions, CompiledContract, CompiledProgram}; use noirc_frontend::graph::CrateName; use prettytable::{row, table, Row}; +use rayon::prelude::*; use serde::Serialize; use crate::backends::Backend; use crate::errors::CliError; -use super::{ - compile_cmd::{compile_bin_package, compile_contract_package}, - NargoConfig, -}; +use super::{compile_cmd::compile_workspace, NargoConfig}; /// Provides detailed information on a circuit /// @@ -52,30 +49,30 @@ pub(crate) fn run( let selection = args.package.map_or(default_selection, PackageSelection::Selected); let workspace = resolve_workspace_from_toml(&toml_path, selection)?; - let mut info_report = InfoReport::default(); - - let (np_language, is_opcode_supported) = backend.get_backend_info()?; - for package in &workspace { - if package.is_contract() { - let contract_info = count_opcodes_and_gates_in_contracts( - backend, - package, - &args.compile_options, - np_language, - &is_opcode_supported, - )?; - info_report.contracts.push(contract_info); - } else { - let program_info = count_opcodes_and_gates_in_program( - backend, - package, - &args.compile_options, - np_language, - &is_opcode_supported, - )?; - info_report.programs.push(program_info); - } - } + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace + .into_iter() + .filter(|package| !package.is_library()) + .cloned() + .partition(|package| package.is_binary()); + + let (compiled_programs, compiled_contracts) = + compile_workspace(backend, &binary_packages, &contract_packages, &args.compile_options)?; + + let (np_language, _) = backend.get_backend_info()?; + let program_info = binary_packages + .into_par_iter() + .zip(compiled_programs) + .map(|(package, program)| { + count_opcodes_and_gates_in_program(backend, program, &package, np_language) + }) + .collect::>()?; + + let contract_info = compiled_contracts + .into_par_iter() + .map(|contract| count_opcodes_and_gates_in_contract(backend, contract, np_language)) + .collect::>()?; + + let info_report = InfoReport { programs: program_info, contracts: contract_info }; if args.json { // Expose machine-readable JSON data. @@ -169,15 +166,10 @@ impl From for Vec { fn count_opcodes_and_gates_in_program( backend: &Backend, + compiled_program: CompiledProgram, package: &Package, - compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, + language: Language, ) -> Result { - let compiled_program = - compile_bin_package(package, compile_options, np_language, &is_opcode_supported)?; - let (language, _) = backend.get_backend_info()?; - Ok(ProgramInfo { name: package.name.to_string(), language, @@ -186,24 +178,22 @@ fn count_opcodes_and_gates_in_program( }) } -fn count_opcodes_and_gates_in_contracts( +fn count_opcodes_and_gates_in_contract( backend: &Backend, - package: &Package, - compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, + contract: CompiledContract, + language: Language, ) -> Result { - let contract = - compile_contract_package(package, compile_options, np_language, &is_opcode_supported)?; - let (language, _) = backend.get_backend_info()?; - - let functions = try_vecmap(contract.functions, |function| -> Result<_, BackendError> { - Ok(FunctionInfo { - name: function.name, - acir_opcodes: function.bytecode.opcodes.len(), - circuit_size: backend.get_exact_circuit_size(&function.bytecode)?, + let functions = contract + .functions + .into_par_iter() + .map(|function| -> Result<_, BackendError> { + Ok(FunctionInfo { + name: function.name, + acir_opcodes: function.bytecode.opcodes.len(), + circuit_size: backend.get_exact_circuit_size(&function.bytecode)?, + }) }) - })?; + .collect::>()?; Ok(ContractInfo { name: contract.name, language, functions }) } From 14662ef726926ff08eb8f687363fa90feebcb157 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 13 Sep 2023 10:18:09 +0100 Subject: [PATCH 16/17] chore(ci): switch to using `Swatinem/rust-cache` action (#2671) --- .github/workflows/formatting.yml | 27 ++++------------- .github/workflows/publish.yml | 50 ++++++++------------------------ .github/workflows/test.yml | 27 ++++------------- .github/workflows/wasm.yml | 26 ++++------------- README.md | 1 - 5 files changed, 30 insertions(+), 101 deletions(-) diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 69bfe8d7ecf..8d29886e40c 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -17,13 +17,6 @@ jobs: name: cargo clippy runs-on: ${{ matrix.runner }} timeout-minutes: 30 - env: - CACHED_PATHS: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - target/ strategy: fail-fast: false @@ -36,13 +29,6 @@ jobs: - name: Checkout uses: actions/checkout@v4 - - name: Restore cargo cache - uses: actions/cache/restore@v3 - id: cache - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ matrix.target }}-cargo-test-${{ hashFiles('**/Cargo.lock') }} - - name: Setup toolchain uses: dtolnay/rust-toolchain@master with: @@ -50,15 +36,14 @@ jobs: targets: ${{ matrix.target }} components: clippy, rustfmt + - uses: Swatinem/rust-cache@v2 + with: + key: ${{ matrix.target }} + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + - name: Run `cargo clippy` run: cargo clippy --workspace --locked --release - name: Run `cargo fmt` run: cargo fmt --all --check - - - uses: actions/cache/save@v3 - # Write a cache entry even if the tests fail but don't create any for the merge queue. - if: ${{ always() && steps.cache.outputs.cache-hit != 'true' && github.event_name != 'merge_group' }} - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ steps.cache.outputs.cache-primary-key }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 34f2db7e639..b69970048bd 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -29,12 +29,6 @@ jobs: runs-on: macos-latest env: CROSS_CONFIG: ${{ github.workspace }}/.github/Cross.toml - CACHED_PATHS: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - target/ strategy: matrix: target: [x86_64-apple-darwin, aarch64-apple-darwin] @@ -52,28 +46,21 @@ jobs: echo "SDKROOT=$(xcrun -sdk macosx$(sw_vers -productVersion) --show-sdk-path)" >> $GITHUB_ENV echo "MACOSX_DEPLOYMENT_TARGET=$(xcrun -sdk macosx$(sw_vers -productVersion) --show-sdk-platform-version)" >> $GITHUB_ENV - - uses: actions/cache/restore@v3 - id: cache - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ matrix.target }}-cargo-${{ hashFiles('**/Cargo.lock') }} - - name: Setup toolchain uses: dtolnay/rust-toolchain@1.66.0 with: targets: ${{ matrix.target }} + - uses: Swatinem/rust-cache@v2 + with: + key: ${{ matrix.target }} + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + - name: Build environment and Compile run: | cargo build --package nargo_cli --release --target ${{ matrix.target }} --no-default-features --features "${{ inputs.features }}" - - uses: actions/cache/save@v3 - # Don't create cache entries for the merge queue. - if: ${{ steps.cache.outputs.cache-hit != 'true' && github.event_name != 'merge_group' }} - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ steps.cache.outputs.cache-primary-key }} - - name: Package artifacts run: | mkdir dist @@ -111,12 +98,6 @@ jobs: runs-on: ubuntu-22.04 env: CROSS_CONFIG: ${{ github.workspace }}/.github/Cross.toml - CACHED_PATHS: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - target/ strategy: fail-fast: false matrix: @@ -128,17 +109,17 @@ jobs: with: ref: ${{ inputs.tag || env.GITHUB_REF }} - - uses: actions/cache/restore@v3 - id: cache - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ matrix.target }}-cargo-${{ hashFiles('**/Cargo.lock') }} - - name: Setup toolchain uses: dtolnay/rust-toolchain@1.66.0 with: targets: ${{ matrix.target }} + - uses: Swatinem/rust-cache@v2 + with: + key: ${{ matrix.target }} + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + - name: Install Cross uses: taiki-e/install-action@v2 with: @@ -147,13 +128,6 @@ jobs: - name: Build Nargo run: cross build --package nargo_cli --release --target=${{ matrix.target }} --no-default-features --features "${{ inputs.features }}" - - uses: actions/cache/save@v3 - # Don't create cache entries for the merge queue. - if: ${{ steps.cache.outputs.cache-hit != 'true' && github.event_name != 'merge_group' }} - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ steps.cache.outputs.cache-primary-key }} - - name: Package artifacts run: | mkdir dist diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e691c4ebd2e..e5a94aaac4b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -17,13 +17,6 @@ jobs: name: Test on ${{ matrix.os }} runs-on: ${{ matrix.runner }} timeout-minutes: 30 - env: - CACHED_PATHS: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - target/ strategy: fail-fast: false @@ -37,24 +30,16 @@ jobs: - name: Checkout uses: actions/checkout@v4 - - name: Restore nix store cache - uses: actions/cache/restore@v3 - id: cache - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ matrix.target }}-cargo-test-${{ hashFiles('**/Cargo.lock') }} - - name: Setup toolchain uses: dtolnay/rust-toolchain@1.66.0 with: targets: ${{ matrix.target }} + - uses: Swatinem/rust-cache@v2 + with: + key: ${{ matrix.target }} + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + - name: Run tests run: cargo test --workspace --locked --release - - - uses: actions/cache/save@v3 - # Write a cache entry even if the tests fail but don't create any for the merge queue. - if: ${{ always() && steps.cache.outputs.cache-hit != 'true' && github.event_name != 'merge_group' }} - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ steps.cache.outputs.cache-primary-key }} diff --git a/.github/workflows/wasm.yml b/.github/workflows/wasm.yml index a9d7d4490c4..f02e71be4e6 100644 --- a/.github/workflows/wasm.yml +++ b/.github/workflows/wasm.yml @@ -14,13 +14,6 @@ concurrency: jobs: build-nargo: runs-on: ubuntu-22.04 - env: - CACHED_PATHS: | - ~/.cargo/bin/ - ~/.cargo/registry/index/ - ~/.cargo/registry/cache/ - ~/.cargo/git/db/ - target/ strategy: matrix: target: [x86_64-unknown-linux-gnu] @@ -29,25 +22,18 @@ jobs: - name: Checkout Noir repo uses: actions/checkout@v4 - - uses: actions/cache/restore@v3 - id: cache - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ matrix.target }}-cargo-${{ hashFiles('**/Cargo.lock') }} - - name: Setup toolchain uses: dtolnay/rust-toolchain@1.66.0 + - uses: Swatinem/rust-cache@v2 + with: + key: ${{ matrix.target }} + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + - name: Build Nargo run: cargo build --package nargo_cli --release - - uses: actions/cache/save@v3 - # Don't create cache entries for the merge queue. - if: ${{ steps.cache.outputs.cache-hit != 'true' && github.event_name != 'merge_group' }} - with: - path: ${{ env.CACHED_PATHS }} - key: ${{ steps.cache.outputs.cache-primary-key }} - - name: Package artifacts run: | mkdir dist diff --git a/README.md b/README.md index 6958840fae8..22ace1fd3b4 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,6 @@ ACIR Supported OPCODES: - Sha256 - Blake2s - Schnorr signature verification -- MerkleMembership - Pedersen - HashToField From 8e93a5f4ccbcdb4fcd0f662d31ba4dd548aeae10 Mon Sep 17 00:00:00 2001 From: Jonathan Bursztyn Date: Wed, 13 Sep 2023 10:20:45 +0100 Subject: [PATCH 17/17] chore: fix npm token for abi_wasm publishing (#2633) Co-authored-by: Koby Hall <102518238+kobyhallx@users.noreply.github.com> --- .github/workflows/publish-abi_wasm.yml | 8 +++++++- tooling/noirc_abi_wasm/package.json | 5 ++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-abi_wasm.yml b/.github/workflows/publish-abi_wasm.yml index e7167c681c5..ff0ea7f0cc7 100644 --- a/.github/workflows/publish-abi_wasm.yml +++ b/.github/workflows/publish-abi_wasm.yml @@ -16,7 +16,13 @@ jobs: - name: Checkout sources uses: actions/checkout@v3 - - uses: cachix/install-nix-action@v20 + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + registry-url: "https://registry.npmjs.org" + node-version: 18.15 + + - uses: cachix/install-nix-action@v22 with: nix_path: nixpkgs=channel:nixos-23.05 github_access_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/tooling/noirc_abi_wasm/package.json b/tooling/noirc_abi_wasm/package.json index e32fb584e53..c315aa5819c 100644 --- a/tooling/noirc_abi_wasm/package.json +++ b/tooling/noirc_abi_wasm/package.json @@ -3,12 +3,15 @@ "collaborators": [ "The Noir Team " ], - "version": "0.8.0", + "version": "0.10.3", "files": [ "nodejs", "web", "package.json" ], + "publishConfig": { + "access": "public" + }, "main": "./nodejs/noirc_abi_wasm.js", "types": "./web/noirc_abi_wasm.d.ts", "module": "./web/noirc_abi_wasm.js",