From 4964bc4846ebfc768b80208bde012b438e3f59ea Mon Sep 17 00:00:00 2001 From: AztecBot Date: Mon, 22 Apr 2024 09:24:23 +0000 Subject: [PATCH] chore: Improve `compute_note_hash_and_nullifier` autogeneration and `NoteProcessor` warnings (https://github.com/AztecProtocol/aztec-packages/pull/5838) Closes https://github.com/AztecProtocol/aztec-packages/issues/5669, https://github.com/AztecProtocol/aztec-packages/issues/5670, https://github.com/AztecProtocol/aztec-packages/issues/4649 Correctly determines the signature of the autogenerated `compute_note_hash_and_nullifier` by looking up the serialized content size of notes used in the contract. Furthermore, that langth is compared against MAX_NOTE_FIELDS_LEN and a compile time error is emited if the user attempts to use a note that is bigger than currently supported. Finally changed the `NoteProcessor` warns to be errors (even thought this particular one shouldn't get that far anymore!) --- .aztec-sync-commit | 2 +- Cargo.lock | 28 +--- Cargo.toml | 8 -- acvm-repo/acir/Cargo.toml | 6 - acvm-repo/acir/benches/serialization.rs | 123 ------------------ acvm-repo/acvm_js/build.sh | 5 +- aztec_macros/src/lib.rs | 13 +- .../compute_note_hash_and_nullifier.rs | 94 +++++++++++-- aztec_macros/src/transforms/functions.rs | 19 +-- aztec_macros/src/transforms/storage.rs | 60 +++------ aztec_macros/src/utils/hir_utils.rs | 84 +++++++++++- compiler/noirc_frontend/src/tests.rs | 2 +- noir_stdlib/src/collections/bounded_vec.nr | 61 --------- scripts/benchmark_start.sh | 3 - scripts/benchmark_stop.sh | 3 - tooling/backend_interface/src/cli/info.rs | 2 +- tooling/bb_abstraction_leaks/build.rs | 2 +- tooling/debugger/ignored-tests.txt | 2 - tooling/nargo_cli/Cargo.toml | 8 +- .../noir_js_backend_barretenberg/package.json | 2 +- yarn.lock | 13 +- 21 files changed, 219 insertions(+), 321 deletions(-) delete mode 100644 acvm-repo/acir/benches/serialization.rs delete mode 100755 scripts/benchmark_start.sh delete mode 100755 scripts/benchmark_stop.sh diff --git a/.aztec-sync-commit b/.aztec-sync-commit index 1cb130e7c58..24fa2c2c433 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -beab8c93857536e07fa37994213fc664a5864013 +566f25c25744501ce1ae31243820ef549d9b1f30 diff --git a/Cargo.lock b/Cargo.lock index e83c10a1932..ee83f7f8ddf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,10 +10,8 @@ dependencies = [ "base64 0.21.2", "bincode", "brillig", - "criterion", "flate2", "fxhash", - "pprof 0.13.0", "serde", "serde-big-array", "serde-generate", @@ -626,7 +624,7 @@ dependencies = [ "lazy_static", "noir_grumpkin", "num-bigint", - "pprof 0.12.1", + "pprof", "thiserror", "wasm-bindgen-futures", "wasmer", @@ -2868,7 +2866,7 @@ dependencies = [ "notify", "notify-debouncer-full", "paste", - "pprof 0.13.0", + "pprof", "predicates 2.1.5", "prettytable-rs", "rayon", @@ -3549,28 +3547,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "pprof" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef5c97c51bd34c7e742402e216abdeb44d415fbe6ae41d56b114723e953711cb" -dependencies = [ - "backtrace", - "cfg-if 1.0.0", - "criterion", - "findshlibs", - "inferno", - "libc", - "log", - "nix 0.26.4", - "once_cell", - "parking_lot 0.12.1", - "smallvec", - "symbolic-demangle", - "tempfile", - "thiserror", -] - [[package]] name = "ppv-lite86" version = "0.2.17" diff --git a/Cargo.toml b/Cargo.toml index 6a939878f9f..5dd453415aa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -104,14 +104,6 @@ chumsky = { git = "https://github.com/jfecher/chumsky", rev = "ad9d312", default "ahash", "std", ] } - -# Benchmarking -criterion = "0.5.0" -# Note that using the "frame-pointer" feature breaks framegraphs on linux -# https://github.com/tikv/pprof-rs/pull/172 -pprof = { version = "0.13", features = ["flamegraph","criterion"] } - - dirs = "4" serde = { version = "1.0.136", features = ["derive"] } serde_json = "1.0" diff --git a/acvm-repo/acir/Cargo.toml b/acvm-repo/acir/Cargo.toml index d6990f83281..4fae9ea20ff 100644 --- a/acvm-repo/acir/Cargo.toml +++ b/acvm-repo/acir/Cargo.toml @@ -29,14 +29,8 @@ strum_macros = "0.24" serde-reflection = "0.3.6" serde-generate = "0.25.1" fxhash.workspace = true -criterion.workspace = true -pprof.workspace = true [features] default = ["bn254"] bn254 = ["acir_field/bn254", "brillig/bn254"] bls12_381 = ["acir_field/bls12_381", "brillig/bls12_381"] - -[[bench]] -name = "serialization" -harness = false diff --git a/acvm-repo/acir/benches/serialization.rs b/acvm-repo/acir/benches/serialization.rs deleted file mode 100644 index 73e3916a73b..00000000000 --- a/acvm-repo/acir/benches/serialization.rs +++ /dev/null @@ -1,123 +0,0 @@ -use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; -use std::{collections::BTreeSet, time::Duration}; - -use acir::{ - circuit::{Circuit, ExpressionWidth, Opcode, Program, PublicInputs}, - native_types::{Expression, Witness}, - FieldElement, -}; - -use pprof::criterion::{Output, PProfProfiler}; - -const SIZES: [usize; 9] = [10, 50, 100, 500, 1000, 5000, 10000, 50000, 100000]; - -fn sample_program(num_opcodes: usize) -> Program { - let assert_zero_opcodes: Vec = (0..num_opcodes) - .map(|i| { - Opcode::AssertZero(Expression { - mul_terms: vec![( - FieldElement::from(2 * i), - Witness(i as u32), - Witness(i as u32 + 10), - )], - linear_combinations: vec![ - (FieldElement::from(2 * i), Witness(i as u32)), - (FieldElement::from(3 * i), Witness(i as u32 + 1)), - ], - q_c: FieldElement::from(i), - }) - }) - .collect(); - - Program { - functions: vec![Circuit { - current_witness_index: 4000, - opcodes: assert_zero_opcodes.to_vec(), - expression_width: ExpressionWidth::Bounded { width: 3 }, - private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3), Witness(4)]), - public_parameters: PublicInputs(BTreeSet::from([Witness(5)])), - return_values: PublicInputs(BTreeSet::from([Witness(6)])), - assert_messages: Vec::new(), - recursive: false, - }], - } -} - -fn bench_serialization(c: &mut Criterion) { - let mut group = c.benchmark_group("serialize_program"); - for size in SIZES.iter() { - let program = sample_program(*size); - - group.throughput(Throughput::Elements(*size as u64)); - group.bench_with_input(BenchmarkId::from_parameter(size), &program, |b, program| { - b.iter(|| Program::serialize_program(program)); - }); - } - group.finish(); - - let mut group = c.benchmark_group("serialize_program_json"); - for size in SIZES.iter() { - let program = sample_program(*size); - - group.throughput(Throughput::Elements(*size as u64)); - group.bench_with_input(BenchmarkId::from_parameter(size), &program, |b, program| { - b.iter(|| { - let mut bytes = Vec::new(); - let mut serializer = serde_json::Serializer::new(&mut bytes); - Program::serialize_program_base64(program, &mut serializer) - }); - }); - } - group.finish(); -} - -fn bench_deserialization(c: &mut Criterion) { - let mut group = c.benchmark_group("deserialize_program"); - for size in SIZES.iter() { - let program = sample_program(*size); - let serialized_program = Program::serialize_program(&program); - - group.throughput(Throughput::Elements(*size as u64)); - group.bench_with_input( - BenchmarkId::from_parameter(size), - &serialized_program, - |b, program| { - b.iter(|| Program::deserialize_program(program)); - }, - ); - } - group.finish(); - - let mut group = c.benchmark_group("deserialize_program_json"); - for size in SIZES.iter() { - let program = sample_program(*size); - - let serialized_program = { - let mut bytes = Vec::new(); - let mut serializer = serde_json::Serializer::new(&mut bytes); - Program::serialize_program_base64(&program, &mut serializer).expect("should succeed"); - bytes - }; - - group.throughput(Throughput::Elements(*size as u64)); - group.bench_with_input( - BenchmarkId::from_parameter(size), - &serialized_program, - |b, program| { - b.iter(|| { - let mut deserializer = serde_json::Deserializer::from_slice(program); - Program::deserialize_program_base64(&mut deserializer) - }); - }, - ); - } - group.finish(); -} - -criterion_group!( - name = benches; - config = Criterion::default().sample_size(40).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); - targets = bench_serialization, bench_deserialization -); - -criterion_main!(benches); diff --git a/acvm-repo/acvm_js/build.sh b/acvm-repo/acvm_js/build.sh index 4486a214c9c..58724dee02c 100755 --- a/acvm-repo/acvm_js/build.sh +++ b/acvm-repo/acvm_js/build.sh @@ -25,7 +25,6 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') @@ -49,5 +48,5 @@ BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm run_or_fail cargo build --lib --release --target $TARGET --package ${pname} run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_or_fail wasm-opt $NODE_WASM -o $NODE_WASM -O -run_or_fail wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O +run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O +run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index dff3193a327..17ae999fb8f 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -93,17 +93,18 @@ fn transform_module( // Check for a user defined storage struct let maybe_storage_struct_name = check_for_storage_definition(module)?; + let storage_defined = maybe_storage_struct_name.is_some(); - if let Some(storage_struct_name) = maybe_storage_struct_name { - if !check_for_storage_implementation(module, &storage_struct_name) { - generate_storage_implementation(module, &storage_struct_name)?; + if let Some(ref storage_struct_name) = maybe_storage_struct_name { + if !check_for_storage_implementation(module, storage_struct_name) { + generate_storage_implementation(module, storage_struct_name)?; } // Make sure we're only generating the storage layout for the root crate // In case we got a contract importing other contracts for their interface, we // don't want to generate the storage layout for them if crate_id == context.root_crate_id() { - generate_storage_layout(module, storage_struct_name)?; + generate_storage_layout(module, storage_struct_name.clone())?; } } @@ -164,14 +165,14 @@ fn transform_module( transform_function( fn_type, func, - storage_defined, + maybe_storage_struct_name.clone(), is_initializer, insert_init_check, is_internal, )?; has_transformed_module = true; } else if storage_defined && func.def.is_unconstrained { - transform_unconstrained(func); + transform_unconstrained(func, maybe_storage_struct_name.clone().unwrap()); has_transformed_module = true; } } diff --git a/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs b/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs index 4ff97a5dcae..c8e7e807d87 100644 --- a/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs +++ b/aztec_macros/src/transforms/compute_note_hash_and_nullifier.rs @@ -7,7 +7,10 @@ use noirc_frontend::{ use crate::utils::{ errors::AztecMacroError, - hir_utils::{collect_crate_functions, fetch_notes, get_contract_module_data, inject_fn}, + hir_utils::{ + collect_crate_functions, collect_traits, fetch_notes, get_contract_module_data, + get_global_numberic_const, get_serialized_length, inject_fn, + }, }; // Check if "compute_note_hash_and_nullifier(AztecAddress,Field,Field,Field,[Field; N]) -> [Field; 4]" is defined @@ -59,13 +62,68 @@ pub fn inject_compute_note_hash_and_nullifier( return Ok(()); } + let traits: Vec<_> = collect_traits(context); + + // Get MAX_NOTE_FIELDS_LENGTH global to check if the notes in our contract are too long. + let max_note_length_const = get_global_numberic_const(context, "MAX_NOTE_FIELDS_LENGTH") + .map_err(|err| { + ( + AztecMacroError::CouldNotImplementComputeNoteHashAndNullifier { + secondary_message: Some(err.primary_message), + }, + file_id, + ) + })?; + // In order to implement compute_note_hash_and_nullifier, we need to know all of the different note types the - // contract might use. These are the types that are marked as #[aztec(note)]. + // contract might use and their serialized lengths. These are the types that are marked as #[aztec(note)]. + let mut notes_and_lengths = vec![]; + + for (path, typ) in fetch_notes(context) { + let serialized_len: u128 = get_serialized_length( + &traits, + "NoteInterface", + &Type::Struct(typ.clone(), vec![]), + &context.def_interner, + ) + .map_err(|_err| { + ( + AztecMacroError::CouldNotImplementComputeNoteHashAndNullifier { + secondary_message: Some(format!( + "Failed to get serialized length for note type {}", + path + )), + }, + file_id, + ) + })? + .into(); + + if serialized_len > max_note_length_const { + return Err(( + AztecMacroError::CouldNotImplementComputeNoteHashAndNullifier { + secondary_message: Some(format!( + "Note type {} as {} fields, which is more than the maximum allowed length of {}.", + path, + serialized_len, + max_note_length_const + )), + }, + file_id, + )); + } + + notes_and_lengths.push((path.to_string(), serialized_len)); + } + + let max_note_length: u128 = + *notes_and_lengths.iter().map(|(_, serialized_len)| serialized_len).max().unwrap_or(&0); + let note_types = - fetch_notes(context).iter().map(|(path, _)| path.to_string()).collect::>(); + notes_and_lengths.iter().map(|(note_type, _)| note_type.clone()).collect::>(); // We can now generate a version of compute_note_hash_and_nullifier tailored for the contract in this crate. - let func = generate_compute_note_hash_and_nullifier(¬e_types); + let func = generate_compute_note_hash_and_nullifier(¬e_types, max_note_length); // And inject the newly created function into the contract. @@ -85,8 +143,12 @@ pub fn inject_compute_note_hash_and_nullifier( Ok(()) } -fn generate_compute_note_hash_and_nullifier(note_types: &[String]) -> NoirFunction { - let function_source = generate_compute_note_hash_and_nullifier_source(note_types); +fn generate_compute_note_hash_and_nullifier( + note_types: &[String], + max_note_length: u128, +) -> NoirFunction { + let function_source = + generate_compute_note_hash_and_nullifier_source(note_types, max_note_length); let (function_ast, errors) = parse_program(&function_source); if !errors.is_empty() { @@ -98,25 +160,30 @@ fn generate_compute_note_hash_and_nullifier(note_types: &[String]) -> NoirFuncti function_ast.functions.remove(0) } -fn generate_compute_note_hash_and_nullifier_source(note_types: &[String]) -> String { +fn generate_compute_note_hash_and_nullifier_source( + note_types: &[String], + max_note_length: u128, +) -> String { // TODO(#4649): The serialized_note parameter is a fixed-size array, but we don't know what length it should have. // For now we hardcode it to 20, which is the same as MAX_NOTE_FIELDS_LENGTH. if note_types.is_empty() { // Even if the contract does not include any notes, other parts of the stack expect for this function to exist, // so we include a dummy version. - " + format!( + " unconstrained fn compute_note_hash_and_nullifier( contract_address: dep::aztec::protocol_types::address::AztecAddress, nonce: Field, storage_slot: Field, note_type_id: Field, - serialized_note: [Field; 20] - ) -> pub [Field; 4] { + serialized_note: [Field; {}] + ) -> pub [Field; 4] {{ assert(false, \"This contract does not use private notes\"); [0, 0, 0, 0] - }" - .to_string() + }}", + max_note_length + ) } else { // For contracts that include notes we do a simple if-else chain comparing note_type_id with the different // get_note_type_id of each of the note types. @@ -141,12 +208,13 @@ fn generate_compute_note_hash_and_nullifier_source(note_types: &[String]) -> Str nonce: Field, storage_slot: Field, note_type_id: Field, - serialized_note: [Field; 20] + serialized_note: [Field; {}] ) -> pub [Field; 4] {{ let note_header = dep::aztec::prelude::NoteHeader::new(contract_address, nonce, storage_slot); {} }}", + max_note_length, full_if_statement ) } diff --git a/aztec_macros/src/transforms/functions.rs b/aztec_macros/src/transforms/functions.rs index 534d24289b7..8e90d40aaec 100644 --- a/aztec_macros/src/transforms/functions.rs +++ b/aztec_macros/src/transforms/functions.rs @@ -26,7 +26,7 @@ use crate::{ pub fn transform_function( ty: &str, func: &mut NoirFunction, - storage_defined: bool, + storage_struct_name: Option, is_initializer: bool, insert_init_check: bool, is_internal: bool, @@ -54,8 +54,8 @@ pub fn transform_function( } // Add access to the storage struct - if storage_defined { - let storage_def = abstract_storage(&ty.to_lowercase(), false); + if let Some(storage_struct_name) = storage_struct_name { + let storage_def = abstract_storage(storage_struct_name, &ty.to_lowercase(), false); func.def.body.statements.insert(0, storage_def); } @@ -206,8 +206,11 @@ pub fn export_fn_abi( /// ``` /// /// This will allow developers to access their contract' storage struct in unconstrained functions -pub fn transform_unconstrained(func: &mut NoirFunction) { - func.def.body.statements.insert(0, abstract_storage("Unconstrained", true)); +pub fn transform_unconstrained(func: &mut NoirFunction, storage_struct_name: String) { + func.def + .body + .statements + .insert(0, abstract_storage(storage_struct_name, "Unconstrained", true)); } /// Helper function that returns what the private context would look like in the ast @@ -572,7 +575,7 @@ fn abstract_return_values(func: &NoirFunction) -> Result>, /// unconstrained fn lol() { /// let storage = Storage::init(Context::none()); /// } -fn abstract_storage(typ: &str, unconstrained: bool) -> Statement { +fn abstract_storage(storage_struct_name: String, typ: &str, unconstrained: bool) -> Statement { let init_context_call = if unconstrained { call( variable_path(chained_dep!("aztec", "context", "Context", "none")), // Path @@ -588,8 +591,8 @@ fn abstract_storage(typ: &str, unconstrained: bool) -> Statement { assignment( "storage", // Assigned to call( - variable_path(chained_path!("Storage", "init")), // Path - vec![init_context_call], // args + variable_path(chained_path!(storage_struct_name.as_str(), "init")), // Path + vec![init_context_call], // args ), ) } diff --git a/aztec_macros/src/transforms/storage.rs b/aztec_macros/src/transforms/storage.rs index 9135be32443..66057108517 100644 --- a/aztec_macros/src/transforms/storage.rs +++ b/aztec_macros/src/transforms/storage.rs @@ -1,12 +1,10 @@ -use std::borrow::Borrow; - use noirc_errors::Span; use noirc_frontend::{ graph::CrateId, macros_api::{ FieldElement, FileId, HirContext, HirExpression, HirLiteral, HirStatement, NodeInterner, }, - node_interner::{TraitId, TraitImplKind}, + node_interner::TraitId, parse_program, parser::SortedModule, token::SecondaryAttribute, @@ -23,7 +21,9 @@ use crate::{ make_type, pattern, return_type, variable, variable_path, }, errors::AztecMacroError, - hir_utils::{collect_crate_structs, collect_traits, get_contract_module_data}, + hir_utils::{ + collect_crate_structs, collect_traits, get_contract_module_data, get_serialized_length, + }, }, }; @@ -196,7 +196,7 @@ pub fn generate_storage_implementation( } /// Obtains the serialized length of a type that implements the Serialize trait. -fn get_serialized_length( +pub fn get_storage_serialized_length( traits: &[TraitId], typ: &Type, interner: &NodeInterner, @@ -214,48 +214,22 @@ fn get_serialized_length( secondary_message: Some("State storage variable must be generic".to_string()), })?; - let is_note = traits.iter().any(|&trait_id| { - let r#trait = interner.get_trait(trait_id); - r#trait.name.0.contents == "NoteInterface" - && !interner.lookup_all_trait_implementations(stored_in_state, trait_id).is_empty() - }); + let is_note = match stored_in_state { + Type::Struct(typ, _) => interner + .struct_attributes(&typ.borrow().id) + .iter() + .any(|attr| is_custom_attribute(attr, "aztec(note)")), + _ => false, + }; // Maps and (private) Notes always occupy a single slot. Someone could store a Note in PublicMutable for whatever reason though. if struct_name == "Map" || (is_note && struct_name != "PublicMutable") { return Ok(1); } - let serialized_trait_impl_kind = traits - .iter() - .find_map(|&trait_id| { - let r#trait = interner.get_trait(trait_id); - if r#trait.borrow().name.0.contents == "Serialize" - && r#trait.borrow().generics.len() == 1 - { - interner - .lookup_all_trait_implementations(stored_in_state, trait_id) - .into_iter() - .next() - } else { - None - } - }) - .ok_or(AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some("Stored data must implement Serialize trait".to_string()), - })?; - - let serialized_trait_impl_id = match serialized_trait_impl_kind { - TraitImplKind::Normal(trait_impl_id) => Ok(trait_impl_id), - _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }), - }?; - - let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id); - let serialized_trait_impl = serialized_trait_impl_shared.borrow(); - - match serialized_trait_impl.trait_generics.first().unwrap() { - Type::Constant(value) => Ok(*value), - _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }), - } + get_serialized_length(traits, "Serialize", stored_in_state, interner).map_err(|err| { + AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some(err.primary_message) } + }) } /// Assigns storage slots to the storage struct fields based on the serialized length of the types. This automatic assignment @@ -436,7 +410,7 @@ pub fn assign_storage_slots( }; let type_serialized_len = - get_serialized_length(&traits, field_type, &context.def_interner) + get_storage_serialized_length(&traits, field_type, &context.def_interner) .map_err(|err| (err, file_id))?; context.def_interner.update_expression(new_call_expression.arguments[1], |expr| { @@ -504,7 +478,7 @@ pub fn generate_storage_layout( let (struct_ast, errors) = parse_program(&storage_fields_source); if !errors.is_empty() { dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { + return Err(AztecMacroError::CouldNotExportStorageLayout { secondary_message: Some("Failed to parse Noir macro code (struct StorageLayout). This is either a bug in the compiler or the Noir macro code".to_string()), span: None }); diff --git a/aztec_macros/src/utils/hir_utils.rs b/aztec_macros/src/utils/hir_utils.rs index ae895d2075c..3b2f14fd87e 100644 --- a/aztec_macros/src/utils/hir_utils.rs +++ b/aztec_macros/src/utils/hir_utils.rs @@ -7,8 +7,11 @@ use noirc_frontend::{ resolution::{path_resolver::StandardPathResolver, resolver::Resolver}, type_check::type_check_func, }, - macros_api::{FileId, HirContext, MacroError, ModuleDefId, StructId}, - node_interner::{FuncId, TraitId}, + macros_api::{ + FileId, HirContext, HirExpression, HirLiteral, MacroError, ModuleDefId, NodeInterner, + StructId, + }, + node_interner::{FuncId, TraitId, TraitImplKind}, ItemVisibility, LetStatement, NoirFunction, Shared, Signedness, StructType, Type, }; @@ -309,3 +312,80 @@ fn find_non_contract_dependencies_bfs( }) }) } + +pub fn get_serialized_length( + traits: &[TraitId], + trait_name: &str, + typ: &Type, + interner: &NodeInterner, +) -> Result { + let serialized_trait_impl_kind = traits + .iter() + .find_map(|&trait_id| { + let r#trait = interner.get_trait(trait_id); + if r#trait.name.0.contents == trait_name && r#trait.generics.len() == 1 { + interner.lookup_all_trait_implementations(typ, trait_id).into_iter().next() + } else { + None + } + }) + .ok_or(MacroError { + primary_message: format!("Type {} must implement {} trait", typ, trait_name), + secondary_message: None, + span: None, + })?; + + let serialized_trait_impl_id = match serialized_trait_impl_kind { + TraitImplKind::Normal(trait_impl_id) => Ok(trait_impl_id), + _ => Err(MacroError { + primary_message: format!("{} trait impl for {} must not be assumed", trait_name, typ), + secondary_message: None, + span: None, + }), + }?; + + let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id); + let serialized_trait_impl = serialized_trait_impl_shared.borrow(); + + match serialized_trait_impl.trait_generics.first().unwrap() { + Type::Constant(value) => Ok(*value), + _ => Err(MacroError { + primary_message: format!("{} length for {} must be a constant", trait_name, typ), + secondary_message: None, + span: None, + }), + } +} + +pub fn get_global_numberic_const( + context: &HirContext, + const_name: &str, +) -> Result { + context + .def_interner + .get_all_globals() + .iter() + .find_map(|global_info| { + if global_info.ident.0.contents == const_name { + let stmt = context.def_interner.get_global_let_statement(global_info.id); + if let Some(let_stmt) = stmt { + let expression = context.def_interner.expression(&let_stmt.expression); + match expression { + HirExpression::Literal(HirLiteral::Integer(value, _)) => { + Some(value.to_u128()) + } + _ => None, + } + } else { + None + } + } else { + None + } + }) + .ok_or(MacroError { + primary_message: format!("Could not find {} global constant", const_name), + secondary_message: None, + span: None, + }) +} diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index c4f0a8d67ba..e4d308fbb6b 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -66,7 +66,7 @@ mod test { // Allocate a default Module for the root, giving it a ModuleId let mut modules: Arena = Arena::default(); let location = Location::new(Default::default(), root_file_id); - let root = modules.insert(ModuleData::new(None, location, false)); + let root = modules.insert(ModuleData::new(None, None, location, false)); let def_map = CrateDefMap { root: LocalModuleId(root), diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index b18a9e9ec5c..c789bc386ef 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -1,5 +1,3 @@ -use crate::cmp::Eq; - struct BoundedVec { storage: [T; MaxLen], len: u64, @@ -95,62 +93,3 @@ impl BoundedVec { ret } } - -impl Eq for BoundedVec where T: Eq { - fn eq(self, other: BoundedVec) -> bool { - let mut ret = self.len == other.len; - let mut exceeded_len = false; - for i in 0..MaxLen { - exceeded_len |= i == self.len; - if !exceeded_len { - ret &= self.storage[i] == other.storage[i]; - } - } - ret - } -} - -mod bounded_vec_tests { - // TODO: Allow imports from "super" - use crate::collections::bounded_vec::BoundedVec; - - #[test] - fn empty_equality() { - let mut bounded_vec1: BoundedVec = BoundedVec::new(); - let mut bounded_vec2: BoundedVec = BoundedVec::new(); - - assert_eq(bounded_vec1, bounded_vec2); - } - - #[test] - fn inequality() { - let mut bounded_vec1: BoundedVec = BoundedVec::new(); - let mut bounded_vec2: BoundedVec = BoundedVec::new(); - bounded_vec1.push(1); - bounded_vec2.push(2); - - assert(bounded_vec1 != bounded_vec2); - } - - #[test] - fn equality_respects_specified_length() { - let mut bounded_vec1: BoundedVec = BoundedVec::new(); - bounded_vec1.push(1); - - // This BoundedVec has an extra value past the end of its specified length, - // this should be ignored when checking equality so they are considered equal. - let mut bounded_vec2: BoundedVec = BoundedVec { storage: [1, 2, 0], len: 1 }; - - assert_eq(bounded_vec1, bounded_vec2); - - // Pushing another entry onto `bounded_vec1` to make the underlying arrays equal should - // result in the `BoundedVec`s being unequal as their lengths are different. - bounded_vec1.push(2); - - assert(bounded_vec1 != bounded_vec2); - - bounded_vec2.push(2); - - assert_eq(bounded_vec1, bounded_vec2); - } -} diff --git a/scripts/benchmark_start.sh b/scripts/benchmark_start.sh deleted file mode 100755 index 3e69b3d2c65..00000000000 --- a/scripts/benchmark_start.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -echo -1 | sudo tee /proc/sys/kernel/perf_event_paranoid diff --git a/scripts/benchmark_stop.sh b/scripts/benchmark_stop.sh deleted file mode 100755 index 964e5291817..00000000000 --- a/scripts/benchmark_stop.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -echo 4 | sudo tee /proc/sys/kernel/perf_event_paranoid diff --git a/tooling/backend_interface/src/cli/info.rs b/tooling/backend_interface/src/cli/info.rs index 6e6603ce53e..8ca3d4dd0a3 100644 --- a/tooling/backend_interface/src/cli/info.rs +++ b/tooling/backend_interface/src/cli/info.rs @@ -56,7 +56,7 @@ fn info_command() -> Result<(), BackendError> { let expression_width = InfoCommand { crs_path }.run(backend.binary_path())?; - assert!(matches!(expression_width, ExpressionWidth::Bounded { width: 4 })); + assert!(matches!(expression_width, ExpressionWidth::Bounded { width: 3 })); Ok(()) } diff --git a/tooling/bb_abstraction_leaks/build.rs b/tooling/bb_abstraction_leaks/build.rs index b3dfff9e94c..0f9770c805d 100644 --- a/tooling/bb_abstraction_leaks/build.rs +++ b/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.35.1"; +const VERSION: &str = "0.34.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/tooling/debugger/ignored-tests.txt b/tooling/debugger/ignored-tests.txt index 40d32072843..4507aeb8545 100644 --- a/tooling/debugger/ignored-tests.txt +++ b/tooling/debugger/ignored-tests.txt @@ -15,5 +15,3 @@ to_bytes_integration fold_basic fold_basic_nested_call fold_call_witness_condition -fold_after_inlined_calls - diff --git a/tooling/nargo_cli/Cargo.toml b/tooling/nargo_cli/Cargo.toml index 111caaa9c92..1629ae86edb 100644 --- a/tooling/nargo_cli/Cargo.toml +++ b/tooling/nargo_cli/Cargo.toml @@ -72,9 +72,13 @@ assert_cmd = "2.0.8" assert_fs = "1.0.10" predicates = "2.1.5" fm.workspace = true -criterion.workspace = true -pprof.workspace = true +criterion = "0.5.0" paste = "1.0.14" +pprof = { version = "0.12", features = [ + "flamegraph", + "frame-pointer", + "criterion", +] } iai = "0.1.1" test-binary = "3.0.2" diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index af9e47a8e63..438e91ff302 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.35.1", + "@aztec/bb.js": "portal:../../../../barretenberg/ts", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/yarn.lock b/yarn.lock index e9915882fac..b45678f5d8b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,19 +221,18 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.35.1": - version: 0.35.1 - resolution: "@aztec/bb.js@npm:0.35.1" +"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg": + version: 0.0.0-use.local + resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg" dependencies: comlink: ^4.4.1 commander: ^10.0.1 debug: ^4.3.4 tslib: ^2.4.0 bin: - bb.js: dest/node/main.js - checksum: 8e3551f059523d9494af4721a9219e2c6e63c8ed1df447a2d0daa9f8526a794758ae708bd1d9c9b1fbfb89c56dc867d9f0b87250dbabfcde23ec02dabbb5a32a + bb.js: ./dest/node/main.js languageName: node - linkType: hard + linkType: soft "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -4396,7 +4395,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.35.1 + "@aztec/bb.js": "portal:../../../../barretenberg/ts" "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3