diff --git a/.noir-sync-commit b/.noir-sync-commit index c59540a2d12..e9d72241d77 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -e3cdebe515e4dc4ee6e16e01bd8af25135939798 +164d29e4d1960d16fdeafe2cc8ea8144a769f7b2 diff --git a/noir-projects/noir-protocol-circuits/Nargo.template.toml b/noir-projects/noir-protocol-circuits/Nargo.template.toml index e575b7ea742..6e29c88ecc0 100644 --- a/noir-projects/noir-protocol-circuits/Nargo.template.toml +++ b/noir-projects/noir-protocol-circuits/Nargo.template.toml @@ -1,7 +1,7 @@ [workspace] members = [ "crates/types", - "crates/blob", +# "crates/blob", "crates/parity-base", "crates/parity-lib", "crates/parity-root", diff --git a/noir-projects/noir-protocol-circuits/crates/blob/src/main.nr b/noir-projects/noir-protocol-circuits/crates/blob/src/main.nr index 1a5ca5e8fdc..d4b4b06b957 100644 --- a/noir-projects/noir-protocol-circuits/crates/blob/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/blob/src/main.nr @@ -58,15 +58,15 @@ unconstrained fn __batch_invert_impl(mut x: [F; N]) -> [F; N] { unconstrained fn __compute_fracs(z: F, ys: [F; FIELDS_PER_BLOB]) -> [F; FIELDS_PER_BLOB] { let mut denoms: [F; FIELDS_PER_BLOB] = [BigNum::new(); FIELDS_PER_BLOB]; for i in 0..FIELDS_PER_BLOB { - denoms[i] = z.__add(NEGATIVE_ROOTS[i]); // (z - ω^i) + denoms[i] = z.__add(NEGATIVE_ROOTS[i]); // (z - omega^i) } - let inv_denoms = __batch_invert_impl(denoms); // 1 / (z - ω^i), for all i + let inv_denoms = __batch_invert_impl(denoms); // 1 / (z - omega^i), for all i let mut fracs: [F; FIELDS_PER_BLOB] = [BigNum::new(); FIELDS_PER_BLOB]; for i in 0..FIELDS_PER_BLOB { - let inv_denom = inv_denoms[i]; // 1 / (z - ω^i) + let inv_denom = inv_denoms[i]; // 1 / (z - omega^i) - fracs[i] = ys[i].__mul(inv_denom); // y_i / (z - ω^i) + fracs[i] = ys[i].__mul(inv_denom); // y_i / (z - omega^i) } fracs } @@ -260,9 +260,9 @@ fn main(blob: [F; FIELDS_PER_BLOB], kzg_commitment: [Field; 2]) -> pub (Field, F /** * ___d-1 - * z^d - 1 \ ω^i + * z^d - 1 \ omega^i * p(z) = --------- . / y_i . --------- - * d /____ z - ω^i + * d /____ z - omega^i * i=0 * * p(z) = factor . sum( y_i . num / denom ) @@ -271,7 +271,7 @@ fn main(blob: [F; FIELDS_PER_BLOB], kzg_commitment: [Field; 2]) -> pub (Field, F * where d = 4096 * * Precompute: - * - The d roots of unity ω^i (plus maybe their negatives for z - ω^i computations). + * - The d roots of unity omega^i (plus maybe their negatives for z - omega^i computations). * - (1 / d) * * @param z @@ -342,13 +342,13 @@ fn barycentric_evaluate_blob_at_z(z: F, ys: [F; FIELDS_PER_BLOB]) -> F { let mut sum: F = BigNum::new(); // Making a call to this function causes a "stack too deep" error, so I've put the body of that function here, instead: - // let fracs = __compute_fracs(z, ys); // { y_i / (z - ω^i) } + // let fracs = __compute_fracs(z, ys); // { y_i / (z - omega^i) } // Note: it's more efficient (saving 30k constraints) to compute: // ___d-1 // \ / y_i \ - // / | --------- | . ω^i - // /____ \ z - ω^i / + // / | --------- | . omega^i + // /____ \ z - omega^i / // i=0 // ^^^^^^^^^ // frac @@ -356,34 +356,34 @@ fn barycentric_evaluate_blob_at_z(z: F, ys: [F; FIELDS_PER_BLOB]) -> F { // ... than to compute: // // ___d-1 - // \ / ω^i \ + // \ / omega^i \ // / y_i . | --------- | - // /____ \ z - ω^i / + // /____ \ z - omega^i / // i=0 // - // perhaps because all the ω^i terms are constant witnesses? + // perhaps because all the omega^i terms are constant witnesses? //***************************************************************** // This section is only needed because `__compute_fracs` isn't working (stack too deep error). - let mut fracs: [F; FIELDS_PER_BLOB] = [BigNum::new(); FIELDS_PER_BLOB]; // y_i / (z - ω^i), for all i + let mut fracs: [F; FIELDS_PER_BLOB] = [BigNum::new(); FIELDS_PER_BLOB]; // y_i / (z - omega^i), for all i let mut denoms = [BigNum::new(); FIELDS_PER_BLOB]; for i in 0..FIELDS_PER_BLOB { - denoms[i] = z.__add(NEGATIVE_ROOTS[i]); // (z - ω^i) + denoms[i] = z.__add(NEGATIVE_ROOTS[i]); // (z - omega^i) } // If you're seeing a `bug` warning for this line, I think it's fine. // Ideally, we'd be using `__compute_fracs`, anyway, but we're getting a "stack too deep" error. - let inv_denoms = __batch_invert_impl(denoms); // 1 / (z - ω^i), for all i + let inv_denoms = __batch_invert_impl(denoms); // 1 / (z - omega^i), for all i for i in 0..FIELDS_PER_BLOB { let num = ys[i]; - let inv_denom = inv_denoms[i]; // 1 / (z - ω^i) + let inv_denom = inv_denoms[i]; // 1 / (z - omega^i) - let frac = num.__mul(inv_denom); // y_i * (1 / (z - ω^i)) + let frac = num.__mul(inv_denom); // y_i * (1 / (z - omega^i)) - fracs[i] = frac; // y_i / (z - ω^i) + fracs[i] = frac; // y_i / (z - omega^i) std::as_witness(fracs[i].limbs[0]); std::as_witness(fracs[i].limbs[1]); std::as_witness(fracs[i].limbs[2]); @@ -414,9 +414,9 @@ fn barycentric_evaluate_blob_at_z(z: F, ys: [F; FIELDS_PER_BLOB]) -> F { // Seeking: // ___d-1 - // \ ω^i + // \ omega^i // sum = / y_i . --------- - // /____ z - ω^i + // /____ z - omega^i // i=0 let NUM_PARTIAL_SUMS = FIELDS_PER_BLOB / 8; for i in 0..NUM_PARTIAL_SUMS { @@ -426,15 +426,15 @@ fn barycentric_evaluate_blob_at_z(z: F, ys: [F; FIELDS_PER_BLOB]) -> F { // Seeking: // ___i*8 + 7 - // \ ω^k + // \ omega^k // partial_sum = / y_k . --------- - // /____ z - ω^k + // /____ z - omega^k // k=i*8 + 0 for j in 0..8 { let k = i * 8 + j; - lhs[j] = ROOTS[k]; // ω^k - rhs[j] = fracs[k]; // y_k / (z - ω^k) + lhs[j] = ROOTS[k]; // omega^k + rhs[j] = fracs[k]; // y_k / (z - omega^k) std::as_witness(lhs[j].limbs[0]); std::as_witness(lhs[j].limbs[1]); std::as_witness(lhs[j].limbs[2]); @@ -442,10 +442,10 @@ fn barycentric_evaluate_blob_at_z(z: F, ys: [F; FIELDS_PER_BLOB]) -> F { std::as_witness(rhs[j].limbs[1]); std::as_witness(rhs[j].limbs[2]); - // y_k * ( ω^k / (z - ω^k) ) + // y_k * ( omega^k / (z - omega^k) ) let summand = ROOTS[k].__mul(fracs[k]); - // partial_sum + ( y_k * ( ω^k / (z - ω^k) ) -> partial_sum + // partial_sum + ( y_k * ( omega^k / (z - omega^k) ) -> partial_sum partial_sum = partial_sum.__add(summand); std::as_witness(partial_sum.limbs[0]); std::as_witness(partial_sum.limbs[1]); @@ -454,9 +454,9 @@ fn barycentric_evaluate_blob_at_z(z: F, ys: [F; FIELDS_PER_BLOB]) -> F { // Seeking: // ___i*8 - 1 ___i*8 + 7 - // \ ω^i \ / y_k \ - // sum_out = / y_i . --------- + / ω^k . | --------- | - // /____ z - ω^i /____ \ z - ω^k / + // \ omega^i \ / y_k \ + // sum_out = / y_i . --------- + / omega^k . | --------- | + // /____ z - omega^i /____ \ z - omega^k / // 0 k = i*8 // ^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ // sum partial_sum @@ -464,9 +464,9 @@ fn barycentric_evaluate_blob_at_z(z: F, ys: [F; FIELDS_PER_BLOB]) -> F { // ... that is: // // ___i*8 - 1 ___ 7 - // \ ω^i \ + // \ omega^i \ // sum_out = / y_i . --------- + / lhs[j] . rhs[j] - // /____ z - ω^i /____ + // /____ z - omega^i /____ // 0 j = 0 // ^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^ // sum partial_sum diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 47ccd7c840f..f3ad4dcaf3d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -15,11 +15,11 @@ global ARGS_LENGTH: u32 = 16; * * In the kernel circuits, we accumulate elements such as note hashes and the nullifiers from all functions calls in a * transaction. Therefore, we always must have: - * MAX_XXX_PER_TX ≥ MAX_XXX_PER_CALL + * MAX_XXX_PER_TX >= MAX_XXX_PER_CALL * * For instance: - * MAX_NOTE_HASHES_PER_TX ≥ MAX_NOTE_HASHES_PER_CALL - * MAX_NULLIFIERS_PER_TX ≥ MAX_NULLIFIERS_PER_CALL + * MAX_NOTE_HASHES_PER_TX >= MAX_NOTE_HASHES_PER_CALL + * MAX_NULLIFIERS_PER_TX >= MAX_NULLIFIERS_PER_CALL * */ @@ -137,7 +137,7 @@ global AZTEC_TARGET_COMMITTEE_SIZE: u32 = 48; // The following is taken from building a block and looking at the `lastArchive` value in it. // You can run the `integration_l1_publisher.test.ts` and look at the first blocks in the fixtures. global GENESIS_ARCHIVE_ROOT: Field = 0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e; -// The following and the value in `deploy_l1_contracts´ must match. We should not have the code both places, but +// The following and the value in `deploy_l1_contracts` must match. We should not have the code both places, but // we are running into circular dependency issues. #3342 global FEE_JUICE_INITIAL_MINT: Field = 20000000000; @@ -333,12 +333,12 @@ global AVM_PUBLIC_COLUMN_MAX_SIZE_LOG2 = 8; * +-----------+-------------------------------+----------------------+ * | Hash size | Number of elements hashed (n) | Condition to use | * |-----------+-------------------------------+----------------------| - * | LOW | n ≤ 8 | 0 < hash_index ≤ 32 | - * | MID | 8 < n ≤ 16 | 32 < hash_index ≤ 40 | - * | HIGH | 16 < n ≤ 48 | 40 < hash_index ≤ 48 | + * | LOW | n <= 8 | 0 < hash_index <= 32 | + * | MID | 8 < n <= 16 | 32 < hash_index <= 40 | + * | HIGH | 16 < n <= 48 | 40 < hash_index <= 48 | * +-----------+-------------------------------+----------------------+ */ -// Indices with size ≤ 8 +// Indices with size <= 8 global GENERATOR_INDEX__NOTE_HASH: u32 = 1; global GENERATOR_INDEX__NOTE_HASH_NONCE: u32 = 2; global GENERATOR_INDEX__UNIQUE_NOTE_HASH: u32 = 3; @@ -371,10 +371,10 @@ global GENERATOR_INDEX__SIDE_EFFECT: u32 = 29; global GENERATOR_INDEX__FEE_PAYLOAD: u32 = 30; global GENERATOR_INDEX__COMBINED_PAYLOAD: u32 = 31; global GENERATOR_INDEX__TX_NULLIFIER: u32 = 32; -// Indices with size ≤ 16 +// Indices with size <= 16 global GENERATOR_INDEX__TX_REQUEST: u32 = 33; global GENERATOR_INDEX__SIGNATURE_PAYLOAD: u32 = 34; -// Indices with size ≤ 44 +// Indices with size <= 44 global GENERATOR_INDEX__VK: u32 = 41; global GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS: u32 = 42; global GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS: u32 = 43; diff --git a/noir/.rebuild_patterns_native b/noir/.rebuild_patterns_native index 3d1eeeb33be..89493b066f7 100644 --- a/noir/.rebuild_patterns_native +++ b/noir/.rebuild_patterns_native @@ -3,7 +3,6 @@ ^noir/scripts/test_native.sh ^noir/noir-repo/acvm-repo ^noir/noir-repo/compiler -^noir/noir-repo/aztec_macros ^noir/noir-repo/noir_stdlib ^noir/noir-repo/tooling/backend_interface ^noir/noir-repo/tooling/bb_abstraction_leaks diff --git a/noir/.rebuild_patterns_packages b/noir/.rebuild_patterns_packages index 0541ebaa879..df6e13000bb 100644 --- a/noir/.rebuild_patterns_packages +++ b/noir/.rebuild_patterns_packages @@ -7,7 +7,6 @@ ^noir/noir-repo/yarn.lock ^noir/noir-repo/acvm-repo ^noir/noir-repo/compiler -^noir/noir-repo/aztec_macros ^noir/noir-repo/noir_stdlib ^noir/noir-repo/tooling/noir_codegen ^noir/noir-repo/tooling/noir_js diff --git a/noir/Earthfile b/noir/Earthfile index f5252a92d52..2ad31a5ba23 100644 --- a/noir/Earthfile +++ b/noir/Earthfile @@ -7,7 +7,6 @@ nargo-src: # Relevant source (TODO finer-grained 'tooling') COPY --dir \ noir-repo/acvm-repo \ - noir-repo/aztec_macros \ noir-repo/compiler \ noir-repo/noir_stdlib \ noir-repo/tooling \ @@ -43,7 +42,7 @@ nargo: SAVE IMAGE aztecprotocol/nargo test: - FROM +nargo + FROM +nargo-src COPY ./scripts/test_native.sh ./scripts/test_native.sh COPY noir-repo/.rustfmt.toml noir-repo/.rustfmt.toml @@ -116,7 +115,6 @@ packages-deps: # Relevant source (TODO finer-grained) COPY --dir \ noir-repo/acvm-repo \ - noir-repo/aztec_macros \ noir-repo/compiler \ noir-repo/docs \ noir-repo/noir_stdlib \ diff --git a/noir/noir-repo/.github/workflows/test-js-packages.yml b/noir/noir-repo/.github/workflows/test-js-packages.yml index 9f46e6f98e8..e45a482cc59 100644 --- a/noir/noir-repo/.github/workflows/test-js-packages.yml +++ b/noir/noir-repo/.github/workflows/test-js-packages.yml @@ -183,7 +183,7 @@ jobs: with: name: acvm-js path: ./acvm-repo/acvm_js - + - name: Set up test environment uses: ./.github/actions/setup @@ -230,13 +230,13 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - + - name: Download nargo binary uses: actions/download-artifact@v4 with: name: nargo path: ./nargo - + - name: Download artifact uses: actions/download-artifact@v4 with: @@ -248,7 +248,7 @@ jobs: with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm - + - name: Set nargo on PATH run: | nargo_binary="${{ github.workspace }}/nargo/nargo" @@ -336,13 +336,13 @@ jobs: with: name: acvm-js path: ./acvm-repo/acvm_js - + - name: Download noirc_abi package artifact uses: actions/download-artifact@v4 with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm - + - name: Set nargo on PATH run: | nargo_binary="${{ github.workspace }}/nargo/nargo" @@ -468,7 +468,7 @@ jobs: working-directory: ./compiler/integration-tests run: | yarn test:browser - + test-examples: name: Example scripts runs-on: ubuntu-latest @@ -509,6 +509,59 @@ jobs: working-directory: ./examples/codegen_verifier run: ./test.sh + external-repo-checks: + needs: [build-nargo] + runs-on: ubuntu-latest + # Only run when 'run-external-checks' label is present + if: contains(github.event.pull_request.labels.*.name, 'run-external-checks') + timeout-minutes: 30 + strategy: + fail-fast: false + matrix: + project: + # Disabled as these are currently failing with many visibility errors + # - { repo: AztecProtocol/aztec-nr, path: ./ } + # - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } + # Disabled as aztec-packages requires a setup-step in order to generate a `Nargo.toml` + #- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits } + - { repo: zac-williamson/noir-edwards, path: ./, ref: 037e44b2ee8557c51f6aef9bb9d63ea9e32722d1 } + # TODO: Enable these once they're passing against master again. + # - { repo: zac-williamson/noir-bignum, path: ./, ref: 030c2acce1e6b97c44a3bbbf3429ed96f20d72d3 } + # - { repo: vlayer-xyz/monorepo, path: ./, ref: ee46af88c025863872234eb05d890e1e447907cb } + # - { repo: hashcloak/noir-bigint, path: ./, ref: 940ddba3a5201b508e7b37a2ef643551afcf5ed8 } + name: Check external repo - ${{ matrix.project.repo }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + repository: ${{ matrix.project.repo }} + path: test-repo + ref: ${{ matrix.project.ref }} + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Remove requirements on compiler version + working-directory: ./test-repo + run: | + # Github actions seems to not expand "**" in globs by default. + shopt -s globstar + sed -i '/^compiler_version/d' ./**/Nargo.toml + - name: Run nargo check + working-directory: ./test-repo/${{ matrix.project.path }} + run: nargo check + # This is a job which depends on all test jobs and reports the overall status. # This allows us to add/remove test jobs without having to update the required workflows. tests-end: @@ -526,7 +579,7 @@ jobs: - test-integration-node - test-integration-browser - test-examples - + steps: - name: Report overall success run: | diff --git a/noir/noir-repo/.tokeignore b/noir/noir-repo/.tokeignore index 55f24e41dbd..58aa2c9706a 100644 --- a/noir/noir-repo/.tokeignore +++ b/noir/noir-repo/.tokeignore @@ -1,9 +1,6 @@ docs scripts -# aztec_macros is explicitly considered OOS for Noir audit -aztec_macros - # config files *.toml *.md diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index e8f06054cf7..6a469bd67f4 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -436,22 +436,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" -[[package]] -name = "aztec_macros" -version = "0.34.0" -dependencies = [ - "acvm", - "bn254_blackbox_solver", - "convert_case 0.6.0", - "hex", - "im", - "iter-extended", - "itertools", - "noirc_errors", - "noirc_frontend", - "regex", -] - [[package]] name = "backtrace" version = "0.3.68" @@ -2898,7 +2882,6 @@ name = "noirc_driver" version = "0.34.0" dependencies = [ "acvm", - "aztec_macros", "build-data", "clap", "fm", diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 6ef8829252a..a6cfa7de07f 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -1,8 +1,6 @@ [workspace] members = [ - # Aztec Macro crate for metaprogramming - "aztec_macros", # Compiler crates "compiler/noirc_arena", "compiler/noirc_evaluator", diff --git a/noir/noir-repo/aztec_macros/Cargo.toml b/noir/noir-repo/aztec_macros/Cargo.toml deleted file mode 100644 index e7bee99b74e..00000000000 --- a/noir/noir-repo/aztec_macros/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "aztec_macros" -version.workspace = true -authors.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -repository.workspace = true - -[lints] -workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acvm.workspace = true -noirc_frontend.workspace = true -noirc_errors.workspace = true -iter-extended.workspace = true -bn254_blackbox_solver.workspace = true -convert_case = "0.6.0" -im.workspace = true -regex = "1.10" -itertools = "^0.10" -hex.workspace = true diff --git a/noir/noir-repo/aztec_macros/src/lib.rs b/noir/noir-repo/aztec_macros/src/lib.rs deleted file mode 100644 index 4ba8951c2f9..00000000000 --- a/noir/noir-repo/aztec_macros/src/lib.rs +++ /dev/null @@ -1,241 +0,0 @@ -mod transforms; -mod utils; - -use noirc_errors::Location; -use transforms::{ - compute_note_hash_and_optionally_a_nullifier::inject_compute_note_hash_and_optionally_a_nullifier, - contract_interface::{ - generate_contract_interface, stub_function, update_fn_signatures_in_contract_interface, - }, - events::{generate_event_impls, transform_event_abi}, - functions::{ - check_for_public_args, export_fn_abi, transform_function, transform_unconstrained, - }, - note_interface::{generate_note_interface_impl, inject_note_exports}, - storage::{ - assign_storage_slots, check_for_storage_definition, check_for_storage_implementation, - generate_storage_implementation, generate_storage_layout, inject_context_in_storage, - }, -}; - -use noirc_frontend::macros_api::{ - CrateId, FileId, HirContext, MacroError, MacroProcessor, SortedModule, Span, -}; - -use utils::{ - ast_utils::is_custom_attribute, - checks::{check_for_aztec_dependency, has_aztec_dependency}, - constants::MAX_CONTRACT_PRIVATE_FUNCTIONS, - errors::AztecMacroError, -}; -pub struct AztecMacro; - -impl MacroProcessor for AztecMacro { - fn process_untyped_ast( - &self, - ast: SortedModule, - crate_id: &CrateId, - file_id: FileId, - context: &HirContext, - ) -> Result { - transform(ast, crate_id, file_id, context) - } - - fn process_typed_ast( - &self, - crate_id: &CrateId, - context: &mut HirContext, - ) -> Result<(), (MacroError, FileId)> { - transform_hir(crate_id, context).map_err(|(err, file_id)| (err.into(), file_id)) - } -} - -// -// Create AST Nodes for Aztec -// - -/// Traverses every function in the ast, calling `transform_function` which -/// determines if further processing is required -fn transform( - mut ast: SortedModule, - crate_id: &CrateId, - file_id: FileId, - context: &HirContext, -) -> Result { - let empty_spans = context.def_interner.is_in_lsp_mode(); - - // Usage -> mut ast -> aztec_library::transform(&mut ast) - // Covers all functions in the ast - for submodule in - ast.submodules.iter_mut().map(|m| &mut m.item).filter(|submodule| submodule.is_contract) - { - if transform_module( - &file_id, - &mut submodule.contents, - submodule.name.0.contents.as_str(), - empty_spans, - ) - .map_err(|err| (err.into(), file_id))? - { - check_for_aztec_dependency(crate_id, context)?; - } - } - - generate_event_impls(&mut ast, empty_spans).map_err(|err| (err.into(), file_id))?; - generate_note_interface_impl(&mut ast, empty_spans).map_err(|err| (err.into(), file_id))?; - - Ok(ast) -} - -/// Determines if ast nodes are annotated with aztec attributes. -/// For annotated functions it calls the `transform` function which will perform the required transformations. -/// Returns true if an annotated node is found, false otherwise -fn transform_module( - file_id: &FileId, - module: &mut SortedModule, - module_name: &str, - empty_spans: bool, -) -> Result { - let mut has_transformed_module = false; - - // Check for a user defined storage struct - - let maybe_storage_struct_name = check_for_storage_definition(module)?; - - let storage_defined = maybe_storage_struct_name.is_some(); - - if let Some(ref storage_struct_name) = maybe_storage_struct_name { - inject_context_in_storage(module)?; - if !check_for_storage_implementation(module, storage_struct_name) { - generate_storage_implementation(module, storage_struct_name)?; - } - generate_storage_layout(module, storage_struct_name.clone(), module_name, empty_spans)?; - } - - let has_initializer = module.functions.iter().any(|func| { - func.item - .def - .attributes - .secondary - .iter() - .any(|attr| is_custom_attribute(attr, "aztec(initializer)")) - }); - - let mut stubs: Vec<_> = vec![]; - - for func in module.functions.iter_mut() { - let func = &mut func.item; - let mut is_private = false; - let mut is_public = false; - let mut is_initializer = false; - let mut is_internal = false; - let mut insert_init_check = has_initializer; - let mut is_static = false; - - for secondary_attribute in func.def.attributes.secondary.clone() { - if is_custom_attribute(&secondary_attribute, "aztec(private)") { - is_private = true; - } else if is_custom_attribute(&secondary_attribute, "aztec(initializer)") { - is_initializer = true; - insert_init_check = false; - } else if is_custom_attribute(&secondary_attribute, "aztec(noinitcheck)") { - insert_init_check = false; - } else if is_custom_attribute(&secondary_attribute, "aztec(internal)") { - is_internal = true; - } else if is_custom_attribute(&secondary_attribute, "aztec(public)") { - is_public = true; - } - if is_custom_attribute(&secondary_attribute, "aztec(view)") { - is_static = true; - } - } - - // Apply transformations to the function based on collected attributes - if is_private || is_public { - let fn_type = if is_private { "Private" } else { "Public" }; - let stub_src = stub_function(fn_type, func, is_static); - stubs.push((stub_src, Location { file: *file_id, span: func.name_ident().span() })); - - export_fn_abi(&mut module.types, func, empty_spans)?; - transform_function( - fn_type, - func, - maybe_storage_struct_name.clone(), - is_initializer, - insert_init_check, - is_internal, - is_static, - )?; - has_transformed_module = true; - } else if storage_defined && func.def.is_unconstrained { - transform_unconstrained(func, maybe_storage_struct_name.clone().unwrap()); - has_transformed_module = true; - } - } - - if has_transformed_module { - // We only want to run these checks if the macro processor has found the module to be an Aztec contract. - - let private_functions: Vec<_> = module - .functions - .iter() - .map(|t| &t.item) - .filter(|func| { - func.def - .attributes - .secondary - .iter() - .any(|attr| is_custom_attribute(attr, "aztec(private)")) - }) - .collect(); - - let public_functions: Vec<_> = module - .functions - .iter() - .map(|func| &func.item) - .filter(|func| { - func.def - .attributes - .secondary - .iter() - .any(|attr| is_custom_attribute(attr, "aztec(public)")) - }) - .collect(); - - let private_function_count = private_functions.len(); - - check_for_public_args(&private_functions)?; - - check_for_public_args(&public_functions)?; - - if private_function_count > MAX_CONTRACT_PRIVATE_FUNCTIONS { - return Err(AztecMacroError::ContractHasTooManyPrivateFunctions { - span: Span::default(), - }); - } - - generate_contract_interface(module, module_name, &stubs, storage_defined, empty_spans)?; - } - - Ok(has_transformed_module) -} - -// -// Transform Hir Nodes for Aztec -// - -/// Completes the Hir with data gathered from type resolution -fn transform_hir( - crate_id: &CrateId, - context: &mut HirContext, -) -> Result<(), (AztecMacroError, FileId)> { - if has_aztec_dependency(crate_id, context) { - transform_event_abi(crate_id, context)?; - inject_compute_note_hash_and_optionally_a_nullifier(crate_id, context)?; - assign_storage_slots(crate_id, context)?; - inject_note_exports(crate_id, context)?; - update_fn_signatures_in_contract_interface(crate_id, context) - } else { - Ok(()) - } -} diff --git a/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs b/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs deleted file mode 100644 index 4d5dcc6f1af..00000000000 --- a/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs +++ /dev/null @@ -1,232 +0,0 @@ -use noirc_errors::{Location, Span}; -use noirc_frontend::ast::{FunctionReturnType, NoirFunction, UnresolvedTypeData}; -use noirc_frontend::{ - graph::CrateId, - macros_api::{FileId, HirContext}, - Type, -}; - -use crate::utils::parse_utils::parse_program; -use crate::utils::{ - errors::AztecMacroError, - hir_utils::{ - collect_crate_functions, collect_traits, fetch_notes, get_contract_module_data, - get_global_numberic_const, get_serialized_length, inject_fn, - }, -}; - -// Check if "compute_note_hash_and_optionally_a_nullifier(AztecAddress,Field,Field,Field,bool,[Field; N]) -> [Field; 4]" is defined -fn check_for_compute_note_hash_and_optionally_a_nullifier_definition( - crate_id: &CrateId, - context: &HirContext, -) -> bool { - collect_crate_functions(crate_id, context).iter().any(|funct_id| { - let func_data = context.def_interner.function_meta(funct_id); - let func_name = context.def_interner.function_name(funct_id); - func_name == "compute_note_hash_and_optionally_a_nullifier" - && func_data.parameters.len() == 6 - && func_data.parameters.0.first().is_some_and(| (_, typ, _) | match typ { - Type::Struct(struct_typ, _) => struct_typ.borrow().name.0.contents == "AztecAddress", - _ => false - }) - && func_data.parameters.0.get(1).is_some_and(|(_, typ, _)| typ.is_field()) - && func_data.parameters.0.get(2).is_some_and(|(_, typ, _)| typ.is_field()) - && func_data.parameters.0.get(3).is_some_and(|(_, typ, _)| typ.is_field()) - && func_data.parameters.0.get(4).is_some_and(|(_, typ, _)| typ.is_bool()) - // checks if the 6th parameter is an array and contains only fields - && func_data.parameters.0.get(5).is_some_and(|(_, typ, _)| match typ { - Type::Array(_, inner_type) => inner_type.to_owned().is_field(), - _ => false - }) - // We check the return type the same way as we did the 5th parameter - && match &func_data.return_type { - FunctionReturnType::Default(_) => false, - FunctionReturnType::Ty(unresolved_type) => { - match &unresolved_type.typ { - UnresolvedTypeData::Array(_, inner_type) => matches!(inner_type.typ, UnresolvedTypeData::FieldElement), - _ => false, - } - } - } - }) -} - -pub fn inject_compute_note_hash_and_optionally_a_nullifier( - crate_id: &CrateId, - context: &mut HirContext, -) -> Result<(), (AztecMacroError, FileId)> { - if let Some((_, module_id, file_id)) = get_contract_module_data(context, crate_id) { - // If compute_note_hash_and_optionally_a_nullifier is already defined by the user, we skip auto-generation in order to provide an - // escape hatch for this mechanism. - // TODO(#4647): improve this diagnosis and error messaging. - if context.crate_graph.root_crate_id() != crate_id - || check_for_compute_note_hash_and_optionally_a_nullifier_definition(crate_id, context) - { - return Ok(()); - } - - let traits: Vec<_> = collect_traits(context); - - // Get MAX_NOTE_FIELDS_LENGTH global to check if the notes in our contract are too long. - let max_note_length_const = get_global_numberic_const(context, "MAX_NOTE_FIELDS_LENGTH") - .map_err(|err| { - ( - AztecMacroError::CouldNotImplementComputeNoteHashAndOptionallyANullifier { - secondary_message: Some(err.primary_message), - }, - file_id, - ) - })?; - - // In order to implement compute_note_hash_and_optionally_a_nullifier, we need to know all of the different note types the - // contract might use and their serialized lengths. These are the types that are marked as #[aztec(note)]. - let mut notes_and_lengths = vec![]; - - for (path, typ) in fetch_notes(context) { - let serialized_len: u128 = get_serialized_length( - &traits, - "NoteInterface", - &Type::Struct(typ.clone(), vec![]), - &context.def_interner, - ) - .map_err(|_err| { - ( - AztecMacroError::CouldNotImplementComputeNoteHashAndOptionallyANullifier { - secondary_message: Some(format!( - "Failed to get serialized length for note type {}", - path - )), - }, - file_id, - ) - })? - .into(); - - if serialized_len > max_note_length_const { - return Err(( - AztecMacroError::CouldNotImplementComputeNoteHashAndOptionallyANullifier { - secondary_message: Some(format!( - "Note type {} as {} fields, which is more than the maximum allowed length of {}.", - path, - serialized_len, - max_note_length_const - )), - }, - file_id, - )); - } - - notes_and_lengths.push((path.to_string(), serialized_len)); - } - - let max_note_length: u128 = - *notes_and_lengths.iter().map(|(_, serialized_len)| serialized_len).max().unwrap_or(&0); - - let note_types: Vec = - notes_and_lengths.iter().map(|(note_type, _)| note_type.clone()).collect::>(); - - // We can now generate a version of compute_note_hash_and_optionally_a_nullifier tailored for the contract in this crate. - let empty_spans = context.def_interner.is_in_lsp_mode(); - let func = generate_compute_note_hash_and_optionally_a_nullifier( - ¬e_types, - max_note_length, - empty_spans, - ); - - // And inject the newly created function into the contract. - - // TODO(#4373): We don't have a reasonable location for the source code of this autogenerated function, so we simply - // pass an empty span. This function should not produce errors anyway so this should not matter. - let location = Location::new(Span::empty(0), file_id); - - inject_fn(crate_id, context, func, location, module_id, file_id).map_err(|err| { - ( - AztecMacroError::CouldNotImplementComputeNoteHashAndOptionallyANullifier { - secondary_message: err.secondary_message, - }, - file_id, - ) - })?; - } - Ok(()) -} - -fn generate_compute_note_hash_and_optionally_a_nullifier( - note_types: &[String], - max_note_length: u128, - empty_spans: bool, -) -> NoirFunction { - let function_source = - generate_compute_note_hash_and_optionally_a_nullifier_source(note_types, max_note_length); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors.clone()); - } - assert_eq!(errors.len(), 0, "Failed to parse Noir macro code. This is either a bug in the compiler or the Noir macro code"); - - let mut function_ast = function_ast.into_sorted(); - function_ast.functions.remove(0).item -} - -fn generate_compute_note_hash_and_optionally_a_nullifier_source( - note_types: &[String], - max_note_length: u128, -) -> String { - // TODO(#4649): The serialized_note parameter is a fixed-size array, but we don't know what length it should have. - // For now we hardcode it to 20, which is the same as MAX_NOTE_FIELDS_LENGTH. - - if note_types.is_empty() { - // Even if the contract does not include any notes, other parts of the stack expect for this function to exist, - // so we include a dummy version. - format!( - " - unconstrained fn compute_note_hash_and_optionally_a_nullifier( - contract_address: aztec::protocol_types::address::AztecAddress, - nonce: Field, - storage_slot: Field, - note_type_id: Field, - compute_nullifier: bool, - serialized_note: [Field; {}], - ) -> pub [Field; 4] {{ - assert(false, \"This contract does not use private notes\"); - [0, 0, 0, 0] - }}", - max_note_length - ) - } else { - // For contracts that include notes we do a simple if-else chain comparing note_type_id with the different - // get_note_type_id of each of the note types. - - let if_statements: Vec = note_types.iter().map(|note_type| format!( - "if (note_type_id == {0}::get_note_type_id()) {{ - aztec::note::utils::compute_note_hash_and_optionally_a_nullifier({0}::deserialize_content, note_header, compute_nullifier, serialized_note) - }}" - , note_type)).collect(); - - let full_if_statement = if_statements.join(" else ") - + " - else { - assert(false, \"Unknown note type ID\"); - [0, 0, 0, 0] - }"; - - format!( - " - unconstrained fn compute_note_hash_and_optionally_a_nullifier( - contract_address: aztec::protocol_types::address::AztecAddress, - nonce: Field, - storage_slot: Field, - note_type_id: Field, - compute_nullifier: bool, - serialized_note: [Field; {}], - ) -> pub [Field; 4] {{ - let note_header = aztec::prelude::NoteHeader::new(contract_address, nonce, storage_slot); - - {} - }}", - max_note_length, - full_if_statement - ) - } -} diff --git a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs deleted file mode 100644 index 522489daa1b..00000000000 --- a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs +++ /dev/null @@ -1,468 +0,0 @@ -use acvm::acir::AcirField; - -use bn254_blackbox_solver::poseidon_hash; -use noirc_errors::Location; -use noirc_frontend::ast::{Documented, Ident, NoirFunction, UnresolvedTypeData}; -use noirc_frontend::{ - graph::CrateId, - macros_api::{FieldElement, FileId, HirContext, HirExpression, HirLiteral, HirStatement}, - parser::SortedModule, - Type, -}; - -use itertools::Itertools; - -use crate::utils::parse_utils::parse_program; -use crate::utils::{ - errors::AztecMacroError, - hir_utils::{collect_crate_structs, get_contract_module_data, signature_of_type}, -}; - -// Generates the stubs for contract functions as low level calls using CallInterface, turning -// #[aztec(public)] // also private -// fn a_function(first_arg: Field, second_arg: Struct, third_arg: [Field; 4]) -> Field { -// ... -// } -// -// into -// -// pub fn a_function(self, first_arg: Field, second_arg: Struct, third_arg: [Field; 4]) -> PublicCallInterface { -// let mut args_acc: [Field] = &[]; -// args_acc = args_acc.append(first_arg.serialize().as_slice()); -// args_acc = args_acc.append(second_arg.serialize().as_slice()); -// let hash_third_arg = third_arg.map(|x: Field| x.serialize()); -// for i in 0..third_arg.len() { -// args_acc = args_acc.append(third_arg[i].serialize().as_slice()); -// } -// let args_hash = aztec::hash::hash_args(args_acc); -// assert(args_hash == aztec::oracle::arguments::pack_arguments(args_acc)); -// PublicCallInterface { -// target_contract: self.target_contract, -// selector: FunctionSelector::from_signature("SELECTOR_PLACEHOLDER"), -// args_hash, -// name: "a_function", -// args_hash, -// args: args_acc, -// original: | inputs: dep::aztec::context::inputs::PublicContextInputs | -> Field { -// a_function(inputs, first_arg, second_arg, third_arg) -// }, -// is_static: false, -// gas_opts: dep::aztec::context::gas::GasOpts::default() -// } -// } -// -// The selector placeholder has to be replaced with the actual function signature after type checking in the next macro pass -pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call: bool) -> String { - let fn_name = func.name().to_string(); - let fn_parameters = func - .parameters() - .iter() - .map(|param| { - format!( - "{}: {}", - param.pattern.name_ident().0.contents, - param.typ.to_string().replace("plain::", "") - ) - }) - .collect::>() - .join(", "); - let fn_return_type: noirc_frontend::ast::UnresolvedType = func.return_type(); - - let parameters = func.parameters(); - let is_void = if matches!(fn_return_type.typ, UnresolvedTypeData::Unit) { "Void" } else { "" }; - let is_static = if is_static_call { "Static" } else { "" }; - let return_type_hint = fn_return_type.typ.to_string().replace("plain::", ""); - let call_args = parameters - .iter() - .map(|arg| { - let param_name = arg.pattern.name_ident().0.contents.clone(); - match &arg.typ.typ { - UnresolvedTypeData::Array(_, typ) => { - format!( - "let serialized_{0} = {0}.map(|x: {1}| x.serialize()); - for i in 0..{0}.len() {{ - args_acc = args_acc.append(serialized_{0}[i].as_slice()); - }}\n", - param_name, - typ.typ.to_string().replace("plain::", "") - ) - } - UnresolvedTypeData::Named(_, _, _) | UnresolvedTypeData::String(_) => { - format!("args_acc = args_acc.append({}.serialize().as_slice());\n", param_name) - } - _ => { - format!("args_acc = args_acc.append(&[{}.to_field()]);\n", param_name) - } - } - }) - .collect::>() - .join(""); - - let param_types = if !parameters.is_empty() { - parameters - .iter() - .map(|param| param.pattern.name_ident().0.contents.clone()) - .collect::>() - .join(", ") - } else { - "".to_string() - }; - - let original = format!( - "| inputs: dep::aztec::context::inputs::{}ContextInputs | -> {} {{ - {}(inputs{}) - }}", - aztec_visibility, - if aztec_visibility == "Private" { - "dep::aztec::protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs".to_string() - } else { - return_type_hint.clone() - }, - fn_name, - if param_types.is_empty() { "".to_string() } else { format!(" ,{} ", param_types) } - ); - let arg_types = format!( - "({}{})", - parameters - .iter() - .map(|param| param.typ.typ.to_string().replace("plain::", "")) - .collect::>() - .join(","), - // In order to distinguish between a single element Tuple (Type,) and a single type with unnecessary parenthesis around it (Type), - // The latter gets simplified to Type, that is NOT a valid env - if parameters.len() == 1 { "," } else { "" } - ); - - let generics = if is_void == "Void" { - format!("{}>", arg_types) - } else { - format!("{}, {}>", return_type_hint, arg_types) - }; - - let args = format!( - "let mut args_acc: [Field] = &[]; - {} - {}", - call_args, - if aztec_visibility == "Private" { - "let args_hash = aztec::hash::hash_args(args_acc);" - } else { - "" - } - ); - - let gas_opts = if aztec_visibility == "Public" { - "gas_opts: dep::aztec::context::gas::GasOpts::default()" - } else { - "" - }; - - let fn_body = format!( - "{} - let selector = dep::aztec::protocol_types::abis::function_selector::FunctionSelector::from_field(0); - dep::aztec::context::{}{}{}CallInterface {{ - target_contract: self.target_contract, - selector, - name: \"{}\", - {} - args: args_acc, - original: {}, - is_static: {}, - {} - }}", - args, - aztec_visibility, - is_static, - is_void, - fn_name, - if aztec_visibility == "Private" { "args_hash," } else { "" }, - original, - is_static_call, - gas_opts - ); - - format!( - "pub fn {}(self, {}) -> dep::aztec::context::{}{}{}CallInterface<{},{} {{ - {} - }}", - fn_name, - fn_parameters, - aztec_visibility, - is_static, - is_void, - fn_name.len(), - generics, - fn_body - ) -} - -// Generates the contract interface as a struct with an `at` function that holds the stubbed functions and provides -// them with a target contract address. The struct has the same name as the contract (which is technically a module) -// so imports look nice. The `at` function is also exposed as a contract library method for external use. -pub fn generate_contract_interface( - module: &mut SortedModule, - module_name: &str, - stubs: &[(String, Location)], - has_storage_layout: bool, - empty_spans: bool, -) -> Result<(), AztecMacroError> { - let storage_layout_getter = format!( - "#[contract_library_method] - pub fn storage() -> StorageLayout {{ - {}_STORAGE_LAYOUT - }}", - module_name, - ); - let contract_interface = format!( - " - struct {0} {{ - target_contract: aztec::protocol_types::address::AztecAddress - }} - - impl {0} {{ - {1} - - pub fn at( - target_contract: aztec::protocol_types::address::AztecAddress - ) -> Self {{ - Self {{ target_contract }} - }} - - pub fn interface() -> Self {{ - Self {{ target_contract: dep::aztec::protocol_types::address::AztecAddress::zero() }} - }} - - {2} - }} - - #[contract_library_method] - pub fn at( - target_contract: aztec::protocol_types::address::AztecAddress - ) -> {0} {{ - {0} {{ target_contract }} - }} - - #[contract_library_method] - pub fn interface() -> {0} {{ - {0} {{ target_contract: dep::aztec::protocol_types::address::AztecAddress::zero() }} - }} - - {3} - ", - module_name, - stubs.iter().map(|(src, _)| src.to_owned()).collect::>().join("\n"), - if has_storage_layout { storage_layout_getter.clone() } else { "".to_string() }, - if has_storage_layout { format!("#[contract_library_method]\n{}", storage_layout_getter) } else { "".to_string() } - ); - - let (contract_interface_ast, errors) = parse_program(&contract_interface, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotGenerateContractInterface { secondary_message: Some("Failed to parse Noir macro code during contract interface generation. This is either a bug in the compiler or the Noir macro code".to_string()), }); - } - - let mut contract_interface_ast = contract_interface_ast.into_sorted(); - let mut impl_with_locations = contract_interface_ast.impls.pop().unwrap(); - - impl_with_locations.methods = impl_with_locations - .methods - .iter() - .enumerate() - .map(|(i, (documented_method, orig_span))| { - let method = &documented_method.item; - if method.name() == "at" || method.name() == "interface" || method.name() == "storage" { - (documented_method.clone(), *orig_span) - } else { - let (_, new_location) = stubs[i]; - let mut modified_method = method.clone(); - modified_method.def.name = - Ident::new(modified_method.name().to_string(), new_location.span); - (Documented::not_documented(modified_method), *orig_span) - } - }) - .collect(); - - module.types.push(contract_interface_ast.types.pop().unwrap()); - module.impls.push(impl_with_locations); - for function in contract_interface_ast.functions { - module.functions.push(function); - } - - Ok(()) -} - -fn compute_fn_signature_hash(fn_name: &str, parameters: &[Type]) -> u32 { - let signature = format!( - "{}({})", - fn_name, - parameters.iter().map(signature_of_type).collect::>().join(",") - ); - - hash_to_selector(&signature) -} - -// Updates the function signatures in the contract interface with the actual ones, replacing the placeholder. -// This is done by locating the contract interface struct, its functions (stubs) and assuming the second to last statement of each -// is a let statement initializing the selector with a FunctionSelector::from_field call. -pub fn update_fn_signatures_in_contract_interface( - crate_id: &CrateId, - context: &mut HirContext, -) -> Result<(), (AztecMacroError, FileId)> { - if let Some((struct_name, _, file_id)) = get_contract_module_data(context, crate_id) { - let maybe_interface_struct = - collect_crate_structs(crate_id, context).iter().find_map(|struct_id| { - let r#struct = context.def_interner.get_struct(*struct_id); - if r#struct.borrow().name.0.contents == struct_name { - Some(r#struct) - } else { - None - } - }); - - if let Some(interface_struct) = maybe_interface_struct { - if let Some(methods) = - context.def_interner.get_struct_methods(interface_struct.borrow().id).cloned() - { - for func_id in methods.iter().flat_map(|(_name, methods)| methods.direct.iter()) { - let name = context.def_interner.function_name(func_id); - let fn_parameters = - &context.def_interner.function_meta(func_id).parameters.clone(); - - if name == "at" || name == "interface" || name == "storage" { - continue; - } - - let fn_signature_hash = compute_fn_signature_hash( - name, - &fn_parameters - .iter() - .skip(1) - .map(|(_, typ, _)| typ.clone()) - .collect::>(), - ); - let hir_func = - context.def_interner.function(func_id).block(&context.def_interner); - - let function_selector_statement = context.def_interner.statement( - hir_func.statements().get(hir_func.statements().len() - 2).ok_or(( - AztecMacroError::CouldNotGenerateContractInterface { - secondary_message: Some( - "Function signature statement not found, invalid body length" - .to_string(), - ), - }, - file_id, - ))?, - ); - let function_selector_expression_id = match function_selector_statement { - HirStatement::Let(let_statement) => Ok(let_statement.expression), - _ => Err(( - AztecMacroError::CouldNotGenerateContractInterface { - secondary_message: Some( - "Function selector statement must be an expression".to_string(), - ), - }, - file_id, - )), - }?; - let function_selector_expression = - context.def_interner.expression(&function_selector_expression_id); - - let current_fn_signature_expression_id = match function_selector_expression { - HirExpression::Call(call_expr) => Ok(call_expr.arguments[0]), - _ => Err(( - AztecMacroError::CouldNotGenerateContractInterface { - secondary_message: Some( - "Function selector argument expression must be call expression" - .to_string(), - ), - }, - file_id, - )), - }?; - - let current_fn_signature_expression = - context.def_interner.expression(¤t_fn_signature_expression_id); - - match current_fn_signature_expression { - HirExpression::Literal(HirLiteral::Integer(value, _)) => { - if !value.is_zero() { - Err(( - AztecMacroError::CouldNotGenerateContractInterface { - secondary_message: Some( - "Function signature argument must be a placeholder with value 0".to_string()), - }, - file_id, - )) - } else { - Ok(()) - } - } - _ => Err(( - AztecMacroError::CouldNotGenerateContractInterface { - secondary_message: Some( - "Function signature argument must be a literal field element" - .to_string(), - ), - }, - file_id, - )), - }?; - - context.def_interner.update_expression( - current_fn_signature_expression_id, - |expr| { - *expr = HirExpression::Literal(HirLiteral::Integer( - FieldElement::from(fn_signature_hash as u128), - false, - )) - }, - ); - } - } - } - } - Ok(()) -} - -fn poseidon2_hash_bytes(inputs: Vec) -> FieldElement { - let fields: Vec<_> = inputs - .into_iter() - .chunks(31) - .into_iter() - .map(|bytes_chunk| { - let mut chunk_as_vec: Vec = bytes_chunk.collect(); - chunk_as_vec.extend(std::iter::repeat(0).take(32 - chunk_as_vec.len())); - // Build a little endian field element - chunk_as_vec.reverse(); - FieldElement::from_be_bytes_reduce(&chunk_as_vec) - }) - .collect(); - - poseidon_hash(&fields).expect("Poseidon hash failed") -} - -pub(crate) fn hash_to_selector(inputs: &str) -> u32 { - let hash = poseidon2_hash_bytes(inputs.as_bytes().to_vec()).to_be_bytes(); - // Take the last 4 bytes of the hash and convert them to an integer - // If you change the following value you have to change NUM_BYTES_PER_NOTE_TYPE_ID in l1_note_payload.ts as well - let num_bytes_per_note_type_id = 4; - u32::from_be_bytes(hash[(32 - num_bytes_per_note_type_id)..32].try_into().unwrap()) -} - -#[cfg(test)] -mod test { - use crate::transforms::contract_interface::hash_to_selector; - - #[test] - fn test_selector_is_valid() { - let selector = hash_to_selector("IS_VALID()"); - assert_eq!(hex::encode(&selector.to_be_bytes()), "73cdda47"); - } - - #[test] - fn test_long_selector() { - let selector = hash_to_selector("foo_and_bar_and_baz_and_foo_bar_baz_and_bar_foo"); - assert_eq!(hex::encode(&selector.to_be_bytes()), "7590a997"); - } -} diff --git a/noir/noir-repo/aztec_macros/src/transforms/events.rs b/noir/noir-repo/aztec_macros/src/transforms/events.rs deleted file mode 100644 index 878bc37393a..00000000000 --- a/noir/noir-repo/aztec_macros/src/transforms/events.rs +++ /dev/null @@ -1,417 +0,0 @@ -use noirc_errors::Span; -use noirc_frontend::ast::{ - Documented, ItemVisibility, NoirFunction, NoirTraitImpl, TraitImplItem, TraitImplItemKind, -}; -use noirc_frontend::macros_api::{NodeInterner, StructId}; -use noirc_frontend::token::SecondaryAttribute; -use noirc_frontend::{ - graph::CrateId, - macros_api::{FileId, HirContext}, - parser::SortedModule, -}; - -use crate::utils::hir_utils::collect_crate_structs; -use crate::utils::parse_utils::parse_program; -use crate::utils::{ast_utils::is_custom_attribute, errors::AztecMacroError}; - -// Automatic implementation of most of the methods in the EventInterface trait, guiding the user with meaningful error messages in case some -// methods must be implemented manually. -pub fn generate_event_impls( - module: &mut SortedModule, - empty_spans: bool, -) -> Result<(), AztecMacroError> { - // Find structs annotated with #[aztec(event)] - // Why doesn't this work ? Events are not tagged and do not appear, it seems only going through the submodule works - // let annotated_event_structs = module - // .types - // .iter_mut() - // .filter(|typ| typ.attributes.iter().any(|attr: &SecondaryAttribute| is_custom_attribute(attr, "aztec(event)"))); - // This did not work because I needed the submodule itself to add the trait impl back in to, but it would be nice if it was tagged on the module level - // let mut annotated_event_structs = module.submodules.iter_mut() - // .flat_map(|submodule| submodule.contents.types.iter_mut()) - // .filter(|typ| typ.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)"))); - - // To diagnose - // let test = module.types.iter_mut(); - // for event_struct in test { - // print!("\ngenerate_event_interface_impl COUNT: {}\n", event_struct.name.0.contents); - // } - - for submodule in module.submodules.iter_mut().map(|m| &mut m.item) { - let annotated_event_structs = - submodule.contents.types.iter_mut().map(|typ| &mut typ.item).filter(|typ| { - typ.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)")) - }); - - for event_struct in annotated_event_structs { - // event_struct.attributes.push(SecondaryAttribute::Abi("events".to_string())); - // If one impl is pushed, this doesn't throw the "#[abi(tag)] attributes can only be used in contracts" error - // But if more than one impl is pushed, we get an increasing amount of "#[abi(tag)] attributes can only be used in contracts" errors - // We work around this by doing this addition in the HIR pass via transform_event_abi below. - - let event_type = event_struct.name.0.contents.to_string(); - let event_len = event_struct.fields.len() as u32; - // event_byte_len = event fields * 32 + randomness (32) + event_type_id (32) - let event_byte_len = event_len * 32 + 64; - - let mut event_fields = vec![]; - - for field in event_struct.fields.iter() { - let field_ident = &field.item.name; - let field_type = &field.item.typ; - event_fields.push(( - field_ident.0.contents.to_string(), - field_type.typ.to_string().replace("plain::", ""), - )); - } - - let mut event_interface_trait_impl = generate_trait_impl_stub_event_interface( - event_type.as_str(), - event_byte_len, - empty_spans, - )?; - event_interface_trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(generate_fn_get_event_type_id( - event_type.as_str(), - event_len, - empty_spans, - )?), - span: Span::default(), - })); - event_interface_trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(generate_fn_private_to_be_bytes( - event_type.as_str(), - event_byte_len, - empty_spans, - )?), - span: Span::default(), - })); - event_interface_trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(generate_fn_to_be_bytes( - event_type.as_str(), - event_byte_len, - empty_spans, - )?), - span: Span::default(), - })); - event_interface_trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(generate_fn_emit( - event_type.as_str(), - empty_spans, - )?), - span: Span::default(), - })); - submodule.contents.trait_impls.push(event_interface_trait_impl); - - let serialize_trait_impl = generate_trait_impl_serialize( - event_type.as_str(), - event_len, - &event_fields, - empty_spans, - )?; - submodule.contents.trait_impls.push(serialize_trait_impl); - - let deserialize_trait_impl = generate_trait_impl_deserialize( - event_type.as_str(), - event_len, - &event_fields, - empty_spans, - )?; - submodule.contents.trait_impls.push(deserialize_trait_impl); - } - } - - Ok(()) -} - -fn generate_trait_impl_stub_event_interface( - event_type: &str, - byte_length: u32, - empty_spans: bool, -) -> Result { - let byte_length_without_randomness = byte_length - 32; - let trait_impl_source = format!( - " -impl dep::aztec::event::event_interface::EventInterface<{byte_length}, {byte_length_without_randomness}> for {event_type} {{ - }} - " - ) - .to_string(); - - let (parsed_ast, errors) = parse_program(&trait_impl_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementEventInterface { - secondary_message: Some(format!("Failed to parse Noir macro code (trait impl of {event_type} for EventInterface). This is either a bug in the compiler or the Noir macro code")), - }); - } - - let mut sorted_ast = parsed_ast.into_sorted(); - let event_interface_impl = sorted_ast.trait_impls.remove(0); - - Ok(event_interface_impl) -} - -fn generate_trait_impl_serialize( - event_type: &str, - event_len: u32, - event_fields: &[(String, String)], - empty_spans: bool, -) -> Result { - let field_names = event_fields - .iter() - .map(|field| { - let field_type = field.1.as_str(); - match field_type { - "Field" => format!("self.{}", field.0), - "bool" | "u8" | "u32" | "u64" | "i8" | "i32" | "i64" => { - format!("self.{} as Field", field.0) - } - _ => format!("self.{}.to_field()", field.0), - } - }) - .collect::>(); - let field_input = field_names.join(","); - - let trait_impl_source = format!( - " - impl dep::aztec::protocol_types::traits::Serialize<{event_len}> for {event_type} {{ - fn serialize(self: {event_type}) -> [Field; {event_len}] {{ - [{field_input}] - }} - }} - " - ) - .to_string(); - - let (parsed_ast, errors) = parse_program(&trait_impl_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementEventInterface { - secondary_message: Some(format!("Failed to parse Noir macro code (trait impl of Serialize for {event_type}). This is either a bug in the compiler or the Noir macro code")), - }); - } - - let mut sorted_ast = parsed_ast.into_sorted(); - let serialize_impl = sorted_ast.trait_impls.remove(0); - - Ok(serialize_impl) -} - -fn generate_trait_impl_deserialize( - event_type: &str, - event_len: u32, - event_fields: &[(String, String)], - empty_spans: bool, -) -> Result { - let field_names: Vec = event_fields - .iter() - .enumerate() - .map(|(index, field)| { - let field_type = field.1.as_str(); - match field_type { - "Field" => format!("{}: fields[{}]", field.0, index), - "bool" | "u8" | "u32" | "u64" | "i8" | "i32" | "i64" => { - format!("{}: fields[{}] as {}", field.0, index, field_type) - } - _ => format!("{}: {}::from_field(fields[{}])", field.0, field.1, index), - } - }) - .collect::>(); - let field_input = field_names.join(","); - - let trait_impl_source = format!( - " - impl dep::aztec::protocol_types::traits::Deserialize<{event_len}> for {event_type} {{ - fn deserialize(fields: [Field; {event_len}]) -> {event_type} {{ - {event_type} {{ {field_input} }} - }} - }} - " - ) - .to_string(); - - let (parsed_ast, errors) = parse_program(&trait_impl_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementEventInterface { - secondary_message: Some(format!("Failed to parse Noir macro code (trait impl of Deserialize for {event_type}). This is either a bug in the compiler or the Noir macro code")), - }); - } - - let mut sorted_ast = parsed_ast.into_sorted(); - let deserialize_impl = sorted_ast.trait_impls.remove(0); - - Ok(deserialize_impl) -} - -fn generate_fn_get_event_type_id( - event_type: &str, - field_length: u32, - empty_spans: bool, -) -> Result { - let from_signature_input = - std::iter::repeat("Field").take(field_length as usize).collect::>().join(","); - let function_source = format!( - " - fn get_event_type_id() -> dep::aztec::protocol_types::abis::event_selector::EventSelector {{ - comptime {{ dep::aztec::protocol_types::abis::event_selector::EventSelector::from_signature(\"{event_type}({from_signature_input})\") }} - }} - ", - ) - .to_string(); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementEventInterface { - secondary_message: Some(format!("Failed to parse Noir macro code (fn get_event_type_id, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -fn generate_fn_private_to_be_bytes( - event_type: &str, - byte_length: u32, - empty_spans: bool, -) -> Result { - let function_source = format!( - " - fn private_to_be_bytes(self: {event_type}, randomness: Field) -> [u8; {byte_length}] {{ - let mut buffer: [u8; {byte_length}] = [0; {byte_length}]; - - let randomness_bytes: [u8; 32] = randomness.to_be_bytes(); - let event_type_id_bytes: [u8; 32] = {event_type}::get_event_type_id().to_field().to_be_bytes(); - - for i in 0..32 {{ - buffer[i] = randomness_bytes[i]; - buffer[32 + i] = event_type_id_bytes[i]; - }} - - let serialized_event = self.serialize(); - - for i in 0..serialized_event.len() {{ - let bytes: [u8; 32] = serialized_event[i].to_be_bytes(); - for j in 0..32 {{ - buffer[64 + i * 32 + j] = bytes[j]; - }} - }} - - buffer - }} - " - ) - .to_string(); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementEventInterface { - secondary_message: Some(format!("Failed to parse Noir macro code (fn private_to_be_bytes, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -fn generate_fn_to_be_bytes( - event_type: &str, - byte_length: u32, - empty_spans: bool, -) -> Result { - let byte_length_without_randomness = byte_length - 32; - let function_source = format!( - " - fn to_be_bytes(self: {event_type}) -> [u8; {byte_length_without_randomness}] {{ - let mut buffer: [u8; {byte_length_without_randomness}] = [0; {byte_length_without_randomness}]; - - let event_type_id_bytes: [u8; 32] = {event_type}::get_event_type_id().to_field().to_be_bytes(); - - for i in 0..32 {{ - buffer[i] = event_type_id_bytes[i]; - }} - - let serialized_event = self.serialize(); - - for i in 0..serialized_event.len() {{ - let bytes: [u8; 32] = serialized_event[i].to_be_bytes(); - for j in 0..32 {{ - buffer[32 + i * 32 + j] = bytes[j]; - }} - }} - - buffer - }} - ") - .to_string(); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementEventInterface { - secondary_message: Some(format!("Failed to parse Noir macro code (fn to_be_bytes, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -fn generate_fn_emit(event_type: &str, empty_spans: bool) -> Result { - let function_source = format!( - " - fn emit(self: {event_type}, _emit: fn[Env](Self) -> ()) {{ - _emit(self); - }} - " - ) - .to_string(); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementEventInterface { - secondary_message: Some(format!("Failed to parse Noir macro code (fn emit, implemented for EventInterface of {event_type}). This is either a bug in the compiler or the Noir macro code")), - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -// We do this pass in the HIR to work around the "#[abi(tag)] attributes can only be used in contracts" error -pub fn transform_event_abi( - crate_id: &CrateId, - context: &mut HirContext, -) -> Result<(), (AztecMacroError, FileId)> { - for struct_id in collect_crate_structs(crate_id, context) { - let attributes = context.def_interner.struct_attributes(&struct_id); - if attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(event)")) { - transform_event(struct_id, &mut context.def_interner)?; - } - } - Ok(()) -} - -fn transform_event( - struct_id: StructId, - interner: &mut NodeInterner, -) -> Result<(), (AztecMacroError, FileId)> { - interner.update_struct_attributes(struct_id, |struct_attributes| { - struct_attributes.push(SecondaryAttribute::Abi("events".to_string())); - }); - - Ok(()) -} diff --git a/noir/noir-repo/aztec_macros/src/transforms/functions.rs b/noir/noir-repo/aztec_macros/src/transforms/functions.rs deleted file mode 100644 index d2091f6f98d..00000000000 --- a/noir/noir-repo/aztec_macros/src/transforms/functions.rs +++ /dev/null @@ -1,858 +0,0 @@ -use convert_case::{Case, Casing}; -use noirc_errors::Span; -use noirc_frontend::ast::{self, Documented, FunctionKind}; -use noirc_frontend::ast::{ - BlockExpression, ConstrainKind, ConstrainStatement, Expression, ExpressionKind, - ForLoopStatement, ForRange, FunctionReturnType, Ident, Literal, NoirFunction, NoirStruct, - Param, PathKind, Pattern, Signedness, Statement, StatementKind, UnresolvedType, - UnresolvedTypeData, Visibility, -}; - -use noirc_frontend::macros_api::FieldElement; - -use crate::utils::ast_utils::member_access; -use crate::utils::parse_utils::parse_program; -use crate::{ - chained_dep, chained_path, - utils::{ - ast_utils::{ - assignment, assignment_with_type, call, cast, expression, ident, ident_path, - index_array, make_eq, make_statement, make_type, method_call, mutable_assignment, - mutable_reference, path, path_segment, return_type, variable, variable_ident, - variable_path, - }, - errors::AztecMacroError, - }, -}; - -// If it does, it will insert the following things: -/// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs -/// - Hashes all of the function input variables -/// - This instantiates a helper function -pub fn transform_function( - ty: &str, - func: &mut NoirFunction, - storage_struct_name: Option, - is_initializer: bool, - insert_init_check: bool, - is_internal: bool, - is_static: bool, -) -> Result<(), AztecMacroError> { - assert!(matches!(ty, "Private" | "Public")); - let context_name = format!("{}Context", ty); - let inputs_name = format!("{}ContextInputs", ty); - let return_type_name = format!("{}CircuitPublicInputs", ty); - let is_private = ty == "Private"; - - // Force a static context if the function is static - if is_static { - let is_static_check = create_static_check(func.name(), is_private); - func.def.body.statements.insert(0, is_static_check); - } - - // Add check that msg sender equals this address and flag function as internal - if is_internal { - let is_internal_check = create_internal_check(func.name()); - func.def.body.statements.insert(0, is_internal_check); - } - - // Add initialization check - if insert_init_check { - let init_check = create_init_check(ty); - func.def.body.statements.insert(0, init_check); - } - - // Add assertion for initialization arguments and sender - if is_initializer { - func.def.body.statements.insert(0, create_assert_initializer(ty)); - } - - // Add access to the storage struct - if let Some(storage_struct_name) = storage_struct_name { - let storage_def = abstract_storage(storage_struct_name, false); - func.def.body.statements.insert(0, storage_def); - } - - // Insert the context creation as the first action - let create_context = if is_private { - create_context_private(&context_name, &func.def.parameters)? - } else { - create_context_public()? - }; - func.def.body.statements.splice(0..0, (create_context).iter().cloned()); - - // Add the inputs to the params - let input = create_inputs(&inputs_name); - func.def.parameters.insert(0, input); - - // Abstract return types such that they get added to the kernel's return_values - if is_private { - if let Some(return_values_statements) = abstract_return_values(func)? { - // In case we are pushing return values to the context, we remove the statement that originated it - // This avoids running duplicate code, since blocks like if/else can be value returning statements - func.def.body.statements.pop(); - // Add the new return statement - func.def.body.statements.extend(return_values_statements); - } - } - - // Before returning mark the contract as initialized - if is_initializer { - let mark_initialized = create_mark_as_initialized(ty); - func.def.body.statements.push(mark_initialized); - } - - // Push the finish method call to the end of the function - if is_private { - let finish_def = create_context_finish(); - func.def.body.statements.push(finish_def); - } - - // The AVM doesn't need a return type yet. - if is_private { - let return_type = create_return_type(&return_type_name); - func.def.return_type = return_type; - func.def.return_visibility = Visibility::Public; - } else { - func.def.return_visibility = Visibility::Public; - } - - // Public functions should have unconstrained auto-inferred - func.def.is_unconstrained = !is_private; - - // Private functions need to be recursive - if is_private { - func.kind = FunctionKind::Recursive; - } - - Ok(()) -} - -// Generates a global struct containing the original (before transform_function gets executed) function abi that gets exported -// in the contract artifact after compilation. The abi will be later used to decode the function return values in the simulator. -pub fn export_fn_abi( - types: &mut Vec>, - func: &NoirFunction, - empty_spans: bool, -) -> Result<(), AztecMacroError> { - let mut parameters_struct_source: Option<&str> = None; - - let struct_source = format!( - " - struct {}_parameters {{ - {} - }} - ", - func.name(), - func.parameters() - .iter() - .map(|param| { - let param_name = match param.pattern.clone() { - Pattern::Identifier(ident) => Ok(ident.0.contents), - _ => Err(AztecMacroError::CouldNotExportFunctionAbi { - span: Some(param.span), - secondary_message: Some( - "Only identifier patterns are supported".to_owned(), - ), - }), - }; - - format!( - "{}: {}", - param_name.unwrap(), - param.typ.typ.to_string().replace("plain::", "") - ) - }) - .collect::>() - .join(",\n"), - ); - - if !func.parameters().is_empty() { - parameters_struct_source = Some(&struct_source); - } - - let mut program = String::new(); - - let parameters = if let Some(parameters_struct_source) = parameters_struct_source { - program.push_str(parameters_struct_source); - format!("parameters: {}_parameters,\n", func.name()) - } else { - "".to_string() - }; - - let return_type_str = func.return_type().typ.to_string().replace("plain::", ""); - let return_type = if return_type_str != "()" { - format!("return_type: {},\n", return_type_str) - } else { - "".to_string() - }; - - let export_struct_source = format!( - " - #[abi(functions)] - struct {}_abi {{ - {}{} - }}", - func.name(), - parameters, - return_type - ); - - program.push_str(&export_struct_source); - - let (ast, errors) = parse_program(&program, empty_spans); - if !errors.is_empty() { - return Err(AztecMacroError::CouldNotExportFunctionAbi { - span: None, - secondary_message: Some( - format!("Failed to parse Noir macro code (struct {}_abi). This is either a bug in the compiler or the Noir macro code", func.name()) - ) - }); - } - - let sorted_ast = ast.into_sorted(); - types.extend(sorted_ast.types); - Ok(()) -} - -/// Transform Unconstrained -/// -/// Inserts the following code at the beginning of an unconstrained function -/// ```noir -/// let context = UnconstrainedContext::new(); -/// let storage = Storage::init(context); -/// ``` -/// -/// This will allow developers to access their contract' storage struct in unconstrained functions -pub fn transform_unconstrained(func: &mut NoirFunction, storage_struct_name: String) { - // let context = UnconstrainedContext::new(); - let let_context = assignment( - "context", // Assigned to - call( - variable_path(chained_dep!( - "aztec", - "context", - "unconstrained_context", - "UnconstrainedContext", - "new" - )), - vec![], - ), - ); - - // We inject the statements at the beginning, in reverse order. - func.def.body.statements.insert(0, abstract_storage(storage_struct_name, true)); - func.def.body.statements.insert(0, let_context); -} - -/// Helper function that returns what the private context would look like in the ast -/// This should make it available to be consumed within aztec private annotated functions. -/// -/// The replaced code: -/// ```noir -/// /// Before -/// fn foo(inputs: PrivateContextInputs) { -/// // ... -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() { -/// // ... -/// } -fn create_inputs(ty: &str) -> Param { - let context_ident = ident("inputs"); - let context_pattern = Pattern::Identifier(context_ident); - - let path_snippet = ty.to_case(Case::Snake); // e.g. private_context_inputs - let type_path = chained_dep!("aztec", "context", "inputs", &path_snippet, ty); - - let context_type = make_type(UnresolvedTypeData::Named(type_path, Default::default(), true)); - let visibility = Visibility::Private; - - Param { pattern: context_pattern, typ: context_type, visibility, span: Span::default() } -} - -/// Creates an initialization check to ensure that the contract has been initialized, meant to -/// be injected as the first statement of any function after the context has been created. -/// -/// ```noir -/// assert_is_initialized(&mut context); -/// ``` -fn create_init_check(ty: &str) -> Statement { - let fname = format!("assert_is_initialized_{}", ty.to_case(Case::Snake)); - make_statement(StatementKind::Expression(call( - variable_path(chained_dep!("aztec", "initializer", &fname)), - vec![mutable_reference("context")], - ))) -} - -/// Creates a call to mark_as_initialized which emits the initialization nullifier, meant to -/// be injected as the last statement before returning in a constructor. -/// -/// ```noir -/// mark_as_initialized(&mut context); -/// ``` -fn create_mark_as_initialized(ty: &str) -> Statement { - let fname = format!("mark_as_initialized_{}", ty.to_case(Case::Snake)); - make_statement(StatementKind::Expression(call( - variable_path(chained_dep!("aztec", "initializer", &fname)), - vec![mutable_reference("context")], - ))) -} - -/// Forces a static context for a function, ensuring that no state modifications are allowed -/// -/// ```noir -/// assert(context.inputs.call_context.is_static_call == true, "Function can only be called statically") -/// ``` -fn create_static_check(fname: &str, is_private: bool) -> Statement { - let is_static_call_expr = if is_private { - ["inputs", "call_context", "is_static_call"] - .iter() - .fold(variable("context"), |acc, member| member_access(acc, member)) - } else { - method_call(variable("context"), "is_static_call", vec![]) - }; - make_statement(StatementKind::Constrain(ConstrainStatement { - kind: ConstrainKind::Assert, - arguments: vec![ - make_eq(is_static_call_expr, expression(ExpressionKind::Literal(Literal::Bool(true)))), - expression(ExpressionKind::Literal(Literal::Str(format!( - "Function {} can only be called statically", - fname - )))), - ], - span: Default::default(), - })) -} - -/// Creates a check for internal functions ensuring that the caller is self. -/// -/// ```noir -/// assert(context.msg_sender() == context.this_address(), "Function can only be called internally"); -/// ``` -fn create_internal_check(fname: &str) -> Statement { - make_statement(StatementKind::Constrain(ConstrainStatement { - kind: ConstrainKind::Assert, - arguments: vec![ - make_eq( - method_call(variable("context"), "msg_sender", vec![]), - method_call(variable("context"), "this_address", vec![]), - ), - expression(ExpressionKind::Literal(Literal::Str(format!( - "Function {} can only be called internally", - fname - )))), - ], - span: Default::default(), - })) -} - -/// Creates a call to assert_initialization_matches_address_preimage to be inserted -/// in the initializer. Checks that the args and sender to the initializer match the -/// commitments from the address preimage. -/// -/// ```noir -/// assert_initialization_matches_address_preimage(context); -/// ``` -fn create_assert_initializer(ty: &str) -> Statement { - let fname = - format!("assert_initialization_matches_address_preimage_{}", ty.to_case(Case::Snake)); - make_statement(StatementKind::Expression(call( - variable_path(chained_dep!("aztec", "initializer", &fname)), - vec![variable("context")], - ))) -} - -fn serialize_to_hasher( - identifier: &Ident, - typ: &UnresolvedTypeData, - hasher_name: &str, -) -> Option> { - let mut statements = Vec::new(); - - // Match the type to determine the padding to do - match typ { - // `{hasher_name}.extend_from_array({ident}.serialize())` - UnresolvedTypeData::Named(..) => { - statements.push(add_struct_to_hasher(identifier, hasher_name)); - } - UnresolvedTypeData::Array(_, arr_type) => { - statements.push(add_array_to_hasher(identifier, arr_type, hasher_name)); - } - // `{hasher_name}.push({ident})` - UnresolvedTypeData::FieldElement => { - statements.push(add_field_to_hasher(identifier, hasher_name)); - } - // Add the integer to the bounded vec, casted to a field - // `{hasher_name}.push({ident} as Field)` - UnresolvedTypeData::Integer(..) | UnresolvedTypeData::Bool => { - statements.push(add_cast_to_hasher(identifier, hasher_name)); - } - UnresolvedTypeData::String(..) => { - let (var_bytes, id) = str_to_bytes(identifier); - statements.push(var_bytes); - statements.push(add_array_to_hasher( - &id, - &UnresolvedType { - typ: UnresolvedTypeData::Integer( - Signedness::Unsigned, - ast::IntegerBitSize::ThirtyTwo, - ), - span: Span::default(), - }, - hasher_name, - )) - } - _ => return None, - }; - Some(statements) -} - -/// Creates the private context object to be accessed within the function, the parameters need to be extracted to be -/// appended into the args hash object. -/// -/// The replaced code: -/// ```noir -/// #[aztec(private)] -/// fn foo(structInput: SomeStruct, arrayInput: [u8; 10], fieldInput: Field) -> Field { -/// // Create the hasher object -/// let mut hasher = Hasher::new(); -/// -/// // struct inputs call serialize on them to add an array of fields -/// hasher.add_multiple(structInput.serialize()); -/// -/// // Array inputs are iterated over and each element is added to the hasher (as a field) -/// for i in 0..arrayInput.len() { -/// hasher.add(arrayInput[i] as Field); -/// } -/// // Field inputs are added to the hasher -/// hasher.add({ident}); -/// -/// // Create the context -/// // The inputs (injected by this `create_inputs`) and completed hash object are passed to the context -/// let mut context = PrivateContext::new(inputs, hasher.hash()); -/// } -/// ``` -fn create_context_private(ty: &str, params: &[Param]) -> Result, AztecMacroError> { - let mut injected_statements: Vec = vec![]; - - let hasher_name = "args_hasher"; - - // `let mut args_hasher = Hasher::new();` - let let_hasher = mutable_assignment( - hasher_name, // Assigned to - call( - variable_path(chained_dep!("aztec", "hash", "ArgsHasher", "new")), // Path - vec![], // args - ), - ); - - // Completes: `let mut args_hasher = Hasher::new();` - injected_statements.push(let_hasher); - - // Iterate over each of the function parameters, adding to them to the hasher - for Param { pattern, typ, span, .. } in params { - match pattern { - Pattern::Identifier(identifier) => { - // Match the type to determine the padding to do - let unresolved_type = &typ.typ; - injected_statements.extend( - serialize_to_hasher(identifier, unresolved_type, hasher_name).ok_or_else( - || AztecMacroError::UnsupportedFunctionArgumentType { - typ: unresolved_type.clone(), - span: *span, - }, - )?, - ); - } - _ => todo!(), // Maybe unreachable? - } - } - - // Create the inputs to the context - let inputs_expression = variable("inputs"); - // `args_hasher.hash()` - let hash_call = method_call( - variable(hasher_name), // variable - "hash", // method name - vec![], // args - ); - - let path_snippet = ty.to_case(Case::Snake); // e.g. private_context - - // let mut context = {ty}::new(inputs, hash); - let let_context = mutable_assignment( - "context", // Assigned to - call( - variable_path(chained_dep!("aztec", "context", &path_snippet, ty, "new")), // Path - vec![inputs_expression, hash_call], // args - ), - ); - injected_statements.push(let_context); - - // Return all expressions that will be injected by the hasher - Ok(injected_statements) -} - -/// Creates the public context object to be accessed within the function. -/// -/// The replaced code: -/// ```noir -/// #[aztec(public)] -/// fn foo(inputs: PublicContextInputs, ...) -> Field { -/// let mut context = PublicContext::new(inputs); -/// } -/// ``` -fn create_context_public() -> Result, AztecMacroError> { - let mut injected_expressions: Vec = vec![]; - - // Create the inputs to the context - let inputs_expression = variable("inputs"); - - // let mut context = {ty}::new(inputs, hash); - let let_context = mutable_assignment( - "context", // Assigned to - call( - variable_path(chained_dep!( - "aztec", - "context", - "public_context", - "PublicContext", - "new" - )), // Path - vec![inputs_expression], // args - ), - ); - injected_expressions.push(let_context); - - // Return all expressions that will be injected by the hasher - Ok(injected_expressions) -} - -/// Abstract Return Type -/// -/// This function intercepts the function's current return type and replaces it with pushes to a hasher -/// that will be used to generate the returns hash for the kernel. -/// -/// The replaced code: -/// ```noir -/// /// Before -/// #[aztec(private)] -/// fn foo() -> Field { -/// // ... -/// let my_return_value: Field = 10; -/// my_return_value -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() -> protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs { -/// // ... -/// let my_return_value: Field = 10; -/// let macro__returned__values = my_return_value; -/// let mut returns_hasher = ArgsHasher::new(); -/// returns_hasher.add(macro__returned__values); -/// context.set_return_hash(returns_hasher); -/// } -/// ``` -/// Similarly; Structs will be pushed to the hasher, after serialize() is called on them. -/// Arrays will be iterated over and each element will be pushed to the hasher. -/// Any primitive type that can be cast will be casted to a field and pushed to the hasher. -fn abstract_return_values(func: &NoirFunction) -> Result>, AztecMacroError> { - let current_return_type = func.return_type().typ; - - // Short circuit if the function doesn't return anything - match current_return_type { - UnresolvedTypeData::Unit | UnresolvedTypeData::Unspecified => return Ok(None), - _ => (), - } - - let Some(last_statement) = func.def.body.statements.last() else { - return Ok(None); - }; - - // TODO: support tuples here and in inputs -> convert into an issue - // Check if the return type is an expression, if it is, we can handle it - match last_statement { - Statement { kind: StatementKind::Expression(expression), .. } => { - let return_value_name = "macro__returned__values"; - let hasher_name = "returns_hasher"; - - let mut replacement_statements = vec![ - assignment_with_type( - return_value_name, // Assigned to - current_return_type.clone(), - expression.clone(), - ), - mutable_assignment( - hasher_name, // Assigned to - call( - variable_path(chained_dep!("aztec", "hash", "ArgsHasher", "new")), // Path - vec![], // args - ), - ), - ]; - - let serialization_statements = - serialize_to_hasher(&ident(return_value_name), ¤t_return_type, hasher_name) - .ok_or_else(|| AztecMacroError::UnsupportedFunctionReturnType { - typ: current_return_type.clone(), - span: func.return_type().span, - })?; - - replacement_statements.extend(serialization_statements); - - replacement_statements.push(make_statement(StatementKind::Semi(method_call( - variable("context"), - "set_return_hash", - vec![variable(hasher_name)], - )))); - - Ok(Some(replacement_statements)) - } - _ => Ok(None), - } -} - -/// Abstract storage -/// -/// For private functions: -/// ```noir -/// #[aztec(private)] -/// fn lol() { -/// let storage = Storage::init(&mut context); -/// } -/// ``` -/// -/// For public functions: -/// ```noir -/// #[aztec(public)] -/// fn lol() { -/// let storage = Storage::init(&mut context); -/// } -/// ``` -/// -/// For unconstrained functions: -/// ```noir -/// unconstrained fn lol() { -/// let storage = Storage::init(context); -/// } -fn abstract_storage(storage_struct_name: String, unconstrained: bool) -> Statement { - let context_expr = - if unconstrained { variable("context") } else { mutable_reference("context") }; - - assignment( - "storage", // Assigned to - call( - variable_path(chained_path!(storage_struct_name.as_str(), "init")), // Path - vec![context_expr], // args - ), - ) -} - -/// Create Return Type -/// -/// Public functions return protocol_types::abis::public_circuit_public_inputs::PublicCircuitPublicInputs while -/// private functions return protocol_types::abis::private_circuit_public_inputs::::PrivateCircuitPublicInputs -/// -/// This call constructs an ast token referencing the above types -/// The name is set in the function above `transform`, hence the -/// whole token name is passed in -/// -/// The replaced code: -/// ```noir -/// -/// /// Before -/// fn foo() -> protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs { -/// // ... -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() { -/// // ... -/// } -fn create_return_type(ty: &str) -> FunctionReturnType { - let path_snippet = ty.to_case(Case::Snake); // e.g. private_circuit_public_inputs or public_circuit_public_inputs - let return_path = chained_dep!("aztec", "protocol_types", "abis", &path_snippet, ty); - return_type(return_path) -} - -/// Create Context Finish -/// -/// Each aztec function calls `context.finish()` at the end of a function -/// to return values required by the kernel. -/// -/// The replaced code: -/// ```noir -/// /// Before -/// fn foo() -> protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs { -/// // ... -/// context.finish() -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() { -/// // ... -/// } -fn create_context_finish() -> Statement { - let method_call = method_call( - variable("context"), // variable - "finish", // method name - vec![], // args - ); - make_statement(StatementKind::Expression(method_call)) -} - -// -// Methods to create hasher inputs -// - -fn add_struct_to_hasher(identifier: &Ident, hasher_name: &str) -> Statement { - // If this is a struct, we call serialize and add the array to the hasher - let serialized_call = method_call( - variable_path(path(identifier.clone())), // variable - "serialize", // method name - vec![], // args - ); - - make_statement(StatementKind::Semi(method_call( - variable(hasher_name), // variable - "add_multiple", // method name - vec![serialized_call], // args - ))) -} - -fn str_to_bytes(identifier: &Ident) -> (Statement, Ident) { - // let identifier_as_bytes = identifier.as_bytes(); - let var = variable_ident(identifier.clone()); - let contents = if let ExpressionKind::Variable(p) = &var.kind { - p.first_name() - } else { - panic!("Unexpected identifier type") - }; - let bytes_name = format!("{}_bytes", contents); - let var_bytes = assignment(&bytes_name, method_call(var, "as_bytes", vec![])); - let id = Ident::new(bytes_name, Span::default()); - - (var_bytes, id) -} - -fn create_loop_over(var: Expression, loop_body: Vec) -> Statement { - // If this is an array of primitive types (integers / fields) we can add them each to the hasher - // casted to a field - let span = var.span; - - // `array.len()` - let end_range_expression = method_call( - var, // variable - "len", // method name - vec![], // args - ); - - // What will be looped over - - // - `hasher.add({ident}[i] as Field)` - let for_loop_block = - expression(ExpressionKind::Block(BlockExpression { statements: loop_body })); - - // `for i in 0..{ident}.len()` - make_statement(StatementKind::For(ForLoopStatement { - range: ForRange::Range( - expression(ExpressionKind::Literal(Literal::Integer( - FieldElement::from(i128::from(0)), - false, - ))), - end_range_expression, - ), - identifier: ident("i"), - block: for_loop_block, - span, - })) -} - -fn add_array_to_hasher( - identifier: &Ident, - arr_type: &UnresolvedType, - hasher_name: &str, -) -> Statement { - // If this is an array of primitive types (integers / fields) we can add them each to the hasher - // casted to a field - - // Wrap in the semi thing - does that mean ended with semi colon? - // `hasher.add({ident}[i] as Field)` - - let arr_index = index_array(identifier.clone(), "i"); - let (add_expression, hasher_method_name) = match arr_type.typ { - UnresolvedTypeData::Named(..) => { - let hasher_method_name = "add_multiple".to_owned(); - let call = method_call( - // All serialize on each element - arr_index, // variable - "serialize", // method name - vec![], // args - ); - (call, hasher_method_name) - } - _ => { - let hasher_method_name = "add".to_owned(); - let call = cast( - arr_index, // lhs - `ident[i]` - UnresolvedTypeData::FieldElement, // cast to - `as Field` - ); - (call, hasher_method_name) - } - }; - - let block_statement = make_statement(StatementKind::Semi(method_call( - variable(hasher_name), // variable - &hasher_method_name, // method name - vec![add_expression], - ))); - - create_loop_over(variable_ident(identifier.clone()), vec![block_statement]) -} - -fn add_field_to_hasher(identifier: &Ident, hasher_name: &str) -> Statement { - // `hasher.add({ident})` - let ident = variable_path(path(identifier.clone())); - make_statement(StatementKind::Semi(method_call( - variable(hasher_name), // variable - "add", // method name - vec![ident], // args - ))) -} - -fn add_cast_to_hasher(identifier: &Ident, hasher_name: &str) -> Statement { - // `hasher.add({ident} as Field)` - // `{ident} as Field` - let cast_operation = cast( - variable_path(path(identifier.clone())), // lhs - UnresolvedTypeData::FieldElement, // rhs - ); - - // `hasher.add({ident} as Field)` - make_statement(StatementKind::Semi(method_call( - variable(hasher_name), // variable - "add", // method name - vec![cast_operation], // args - ))) -} - -/** - * Takes a vector of functions and checks for the presence of arguments with Public visibility - * Returns AztecMAcroError::PublicArgsDisallowed if found - */ -pub fn check_for_public_args(functions: &[&NoirFunction]) -> Result<(), AztecMacroError> { - for func in functions { - for param in &func.def.parameters { - if param.visibility == Visibility::Public { - return Err(AztecMacroError::PublicArgsDisallowed { span: func.span() }); - } - } - } - Ok(()) -} diff --git a/noir/noir-repo/aztec_macros/src/transforms/mod.rs b/noir/noir-repo/aztec_macros/src/transforms/mod.rs deleted file mode 100644 index bd419bced6f..00000000000 --- a/noir/noir-repo/aztec_macros/src/transforms/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub mod compute_note_hash_and_optionally_a_nullifier; -pub mod contract_interface; -pub mod events; -pub mod functions; -pub mod note_interface; -pub mod storage; diff --git a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs deleted file mode 100644 index 9ca260d1b95..00000000000 --- a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs +++ /dev/null @@ -1,848 +0,0 @@ -use noirc_errors::Span; -use noirc_frontend::ast::{ - Documented, ItemVisibility, LetStatement, NoirFunction, NoirStruct, PathKind, StructField, - TraitImplItem, TraitImplItemKind, TypeImpl, UnresolvedTypeData, UnresolvedTypeExpression, -}; -use noirc_frontend::{ - graph::CrateId, - macros_api::{FileId, HirContext, HirExpression, HirLiteral, HirStatement}, - parser::SortedModule, - Type, -}; - -use acvm::AcirField; -use regex::Regex; - -use crate::utils::parse_utils::parse_program; -use crate::{ - chained_dep, - utils::{ - ast_utils::{ - check_trait_method_implemented, ident, ident_path, is_custom_attribute, make_type, - path_segment, - }, - errors::AztecMacroError, - hir_utils::{fetch_notes, get_contract_module_data, inject_global}, - }, -}; - -use super::contract_interface::hash_to_selector; - -// Automatic implementation of most of the methods in the NoteInterface trait, guiding the user with meaningful error messages in case some -// methods must be implemented manually. -pub fn generate_note_interface_impl( - module: &mut SortedModule, - empty_spans: bool, -) -> Result<(), AztecMacroError> { - // Find structs annotated with #[aztec(note)] - let annotated_note_structs = - module.types.iter_mut().map(|t| &mut t.item).filter(|typ| { - typ.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(note)")) - }); - - let mut structs_to_inject = vec![]; - - for note_struct in annotated_note_structs { - // Look for the NoteInterface trait implementation for the note - let trait_impl = module - .trait_impls - .iter_mut() - .find(|trait_impl| { - if let UnresolvedTypeData::Named(struct_path, _, _) = &trait_impl.object_type.typ { - struct_path.last_ident() == note_struct.name - && trait_impl.trait_name.last_name() == "NoteInterface" - } else { - false - } - }) - .ok_or(AztecMacroError::CouldNotImplementNoteInterface { - span: Some(note_struct.name.span()), - secondary_message: Some(format!( - "Could not find NoteInterface trait implementation for note: {}", - note_struct.name.0.contents - )), - })?; - let note_interface_impl_span = - if empty_spans { Span::default() } else { trait_impl.object_type.span }; - - // Look for the note struct implementation, generate a default one if it doesn't exist (in order to append methods to it) - let existing_impl = module.impls.iter_mut().find(|r#impl| match &r#impl.object_type.typ { - UnresolvedTypeData::Named(path, _, _) => path.last_ident().eq(¬e_struct.name), - _ => false, - }); - let note_impl = if let Some(note_impl) = existing_impl { - note_impl - } else { - let default_impl = TypeImpl { - object_type: trait_impl.object_type.clone(), - type_span: note_struct.name.span(), - generics: vec![], - methods: vec![], - where_clause: vec![], - }; - module.impls.push(default_impl.clone()); - module.impls.last_mut().unwrap() - }; - // Identify the note type (struct name), its fields and its serialized length (generic param of NoteInterface trait impl) - let note_type = note_struct.name.0.contents.to_string(); - let mut note_fields = vec![]; - let note_interface_generics = trait_impl - .trait_generics - .ordered_args - .iter() - .map(|gen| match gen.typ.clone() { - UnresolvedTypeData::Named(path, _, _) => Ok(path.last_name().to_string()), - UnresolvedTypeData::Expression(UnresolvedTypeExpression::Constant(val, _)) => { - Ok(val.to_string()) - } - _ => Err(AztecMacroError::CouldNotImplementNoteInterface { - span: Some(trait_impl.object_type.span), - secondary_message: Some(format!( - "NoteInterface must be generic over NOTE_LEN and NOTE_BYTES_LEN: {}", - note_type - )), - }), - }) - .collect::, _>>()?; - let [note_serialized_len, note_bytes_len]: [_; 2] = - note_interface_generics.try_into().expect( - "NoteInterface must be generic over 2 types, NOTE_FIELDS_LEN and NOTE_BYTES_LEN", - ); - - // Automatically inject the header field if it's not present - let header_field_name = if let Some(existing_header) = - note_struct.fields.iter().find(|field| match &field.item.typ.typ { - UnresolvedTypeData::Named(path, _, _) => path.last_name() == "NoteHeader", - _ => false, - }) { - existing_header.clone().item.name - } else { - let generated_header = StructField { - name: ident("header"), - typ: make_type(UnresolvedTypeData::Named( - chained_dep!("aztec", "note", "note_header", "NoteHeader"), - Default::default(), - false, - )), - }; - note_struct.fields.push(Documented::not_documented(generated_header.clone())); - generated_header.name - }; - - for field in note_struct.fields.iter() { - let field_ident = &field.item.name; - let field_type = &field.item.typ; - note_fields.push(( - field_ident.0.contents.to_string(), - field_type.typ.to_string().replace("plain::", ""), - )); - } - - if !check_trait_method_implemented(trait_impl, "serialize_content") - && !check_trait_method_implemented(trait_impl, "deserialize_content") - && !note_impl - .methods - .iter() - .any(|(func, _)| func.item.def.name.0.contents == "properties") - { - let note_serialize_content_fn = generate_note_serialize_content( - ¬e_type, - ¬e_fields, - ¬e_serialized_len, - &header_field_name.0.contents, - note_interface_impl_span, - empty_spans, - )?; - trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(note_serialize_content_fn), - span: note_interface_impl_span, - })); - - let note_deserialize_content_fn = generate_note_deserialize_content( - ¬e_type, - ¬e_fields, - ¬e_serialized_len, - &header_field_name.0.contents, - note_interface_impl_span, - empty_spans, - )?; - trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(note_deserialize_content_fn), - span: note_interface_impl_span, - })); - - let note_properties_struct = generate_note_properties_struct( - ¬e_type, - ¬e_fields, - &header_field_name.0.contents, - note_interface_impl_span, - empty_spans, - )?; - structs_to_inject.push(Documented::not_documented(note_properties_struct)); - let note_properties_fn = generate_note_properties_fn( - ¬e_type, - ¬e_fields, - &header_field_name.0.contents, - note_interface_impl_span, - empty_spans, - )?; - note_impl - .methods - .push((Documented::not_documented(note_properties_fn), note_impl.type_span)); - } - - if !check_trait_method_implemented(trait_impl, "get_header") { - let get_header_fn = generate_note_get_header( - ¬e_type, - &header_field_name.0.contents, - note_interface_impl_span, - empty_spans, - )?; - trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(get_header_fn), - span: note_interface_impl_span, - })); - } - if !check_trait_method_implemented(trait_impl, "set_header") { - let set_header_fn = generate_note_set_header( - ¬e_type, - &header_field_name.0.contents, - note_interface_impl_span, - empty_spans, - )?; - trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(set_header_fn), - span: note_interface_impl_span, - })); - } - - if !check_trait_method_implemented(trait_impl, "get_note_type_id") { - let note_type_id = compute_note_type_id(¬e_type); - let get_note_type_id_fn = - generate_get_note_type_id(note_type_id, note_interface_impl_span, empty_spans)?; - trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(get_note_type_id_fn), - span: note_interface_impl_span, - })); - } - - if !check_trait_method_implemented(trait_impl, "compute_note_hiding_point") { - let compute_note_hiding_point_fn = generate_compute_note_hiding_point( - ¬e_type, - note_interface_impl_span, - empty_spans, - )?; - trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(compute_note_hiding_point_fn), - span: note_interface_impl_span, - })); - } - - if !check_trait_method_implemented(trait_impl, "to_be_bytes") { - let to_be_bytes_fn = generate_note_to_be_bytes( - ¬e_type, - note_bytes_len.as_str(), - note_serialized_len.as_str(), - note_interface_impl_span, - empty_spans, - )?; - trait_impl.items.push(Documented::not_documented(TraitImplItem { - kind: TraitImplItemKind::Function(to_be_bytes_fn), - span: note_interface_impl_span, - })); - } - } - - module.types.extend(structs_to_inject); - Ok(()) -} - -fn generate_note_to_be_bytes( - note_type: &String, - byte_length: &str, - serialized_length: &str, - impl_span: Span, - empty_spans: bool, -) -> Result { - let function_source = format!( - " - fn to_be_bytes(self: {1}, storage_slot: Field) -> [u8; {0}] {{ - assert({0} == {2} * 32 + 64, \"Note byte length must be equal to (serialized_length * 32) + 64 bytes\"); - let serialized_note = self.serialize_content(); - - let mut buffer: [u8; {0}] = [0; {0}]; - - let storage_slot_bytes: [u8; 32] = storage_slot.to_be_bytes(); - let note_type_id_bytes: [u8; 32] = {1}::get_note_type_id().to_be_bytes(); - - for i in 0..32 {{ - buffer[i] = storage_slot_bytes[i]; - buffer[32 + i] = note_type_id_bytes[i]; - }} - - for i in 0..serialized_note.len() {{ - let bytes: [u8; 32] = serialized_note[i].to_be_bytes(); - for j in 0..32 {{ - buffer[64 + i * 32 + j] = bytes[j]; - }} - }} - buffer - }} - ", - byte_length, note_type, serialized_length - ) - .to_string(); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some("Failed to parse Noir macro code (fn to_be_bytes). This is either a bug in the compiler or the Noir macro code".to_string()), - span: Some(impl_span) - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.span = impl_span; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -fn generate_note_get_header( - note_type: &String, - note_header_field_name: &String, - impl_span: Span, - empty_spans: bool, -) -> Result { - let function_source = format!( - " - fn get_header(note: {}) -> aztec::note::note_header::NoteHeader {{ - note.{} - }} - ", - note_type, note_header_field_name - ) - .to_string(); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some("Failed to parse Noir macro code (fn get_header). This is either a bug in the compiler or the Noir macro code".to_string()), - span: Some(impl_span) - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.span = impl_span; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -fn generate_note_set_header( - note_type: &String, - note_header_field_name: &String, - impl_span: Span, - empty_spans: bool, -) -> Result { - let function_source = format!( - " - fn set_header(self: &mut {}, header: aztec::note::note_header::NoteHeader) {{ - self.{} = header; - }} - ", - note_type, note_header_field_name - ); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some("Failed to parse Noir macro code (fn set_header). This is either a bug in the compiler or the Noir macro code".to_string()), - span: Some(impl_span) - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.span = impl_span; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -// Automatically generate the note type id getter method. The id itself its calculated as the concatenation -// of the conversion of the characters in the note's struct name to unsigned integers. -fn generate_get_note_type_id( - note_type_id: u32, - impl_span: Span, - empty_spans: bool, -) -> Result { - // TODO(#7165): replace {} with dep::aztec::protocol_types::abis::note_selector::compute_note_selector(\"{}\") in the function source below - let function_source = format!( - " - fn get_note_type_id() -> Field {{ - {} - }} - ", - note_type_id - ) - .to_string(); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some("Failed to parse Noir macro code (fn get_note_type_id). This is either a bug in the compiler or the Noir macro code".to_string()), - span: Some(impl_span) - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.span = impl_span; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -// Automatically generate a struct that represents the note's serialization metadata, as -// -// NoteTypeFields { -// field1: PropertySelector { index: 0, offset: 0, length: 32 }, -// field2: PropertySelector { index: 1, offset: 0, length: 32 }, -// ... -// } -// -// It assumes each field occupies an entire field and its serialized in definition order -fn generate_note_properties_struct( - note_type: &str, - note_fields: &[(String, String)], - note_header_field_name: &String, - impl_span: Span, - empty_spans: bool, -) -> Result { - let struct_source = - generate_note_properties_struct_source(note_type, note_fields, note_header_field_name); - - let (struct_ast, errors) = parse_program(&struct_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some(format!("Failed to parse Noir macro code (struct {}Properties). This is either a bug in the compiler or the Noir macro code", note_type)), - span: Some(impl_span) - }); - } - - let mut struct_ast = struct_ast.into_sorted(); - Ok(struct_ast.types.remove(0).item) -} - -// Generate the deserialize_content method as -// -// fn deserialize_content(serialized_note: [Field; NOTE_SERIALIZED_LEN]) -> Self { -// NoteType { -// note_field1: serialized_note[0] as Field, -// note_field2: NoteFieldType2::from_field(serialized_note[1])... -// } -// } -// It assumes every note field is stored in an individual serialized field, -// and can be converted to the original type via the from_field() trait (structs) or cast as Field (integers) -fn generate_note_deserialize_content( - note_type: &str, - note_fields: &[(String, String)], - note_serialize_len: &String, - note_header_field_name: &String, - impl_span: Span, - empty_spans: bool, -) -> Result { - let function_source = generate_note_deserialize_content_source( - note_type, - note_fields, - note_serialize_len, - note_header_field_name, - ); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some("Failed to parse Noir macro code (fn deserialize_content). This is either a bug in the compiler or the Noir macro code".to_string()), - span: Some(impl_span) - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.span = impl_span; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -// Generate the serialize_content method as -// -// fn serialize_content(self: {}) -> [Field; NOTE_SERIALIZED_LEN] { -// [self.note_field1 as Field, self.note_field2.to_field()...] -// } -// -// It assumes every struct field can be converted either via the to_field() trait (structs) or cast as Field (integers) -fn generate_note_serialize_content( - note_type: &str, - note_fields: &[(String, String)], - note_serialize_len: &String, - note_header_field_name: &String, - impl_span: Span, - empty_spans: bool, -) -> Result { - let function_source = generate_note_serialize_content_source( - note_type, - note_fields, - note_serialize_len, - note_header_field_name, - ); - - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some("Failed to parse Noir macro code (fn serialize_content). This is either a bug in the compiler or the Noir macro code".to_string()), - span: Some(impl_span) - }); - } - - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.span = impl_span; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -// Automatically generate a function in the Note's impl that returns the note's fields metadata -fn generate_note_properties_fn( - note_type: &str, - note_fields: &[(String, String)], - note_header_field_name: &String, - impl_span: Span, - empty_spans: bool, -) -> Result { - let function_source = - generate_note_properties_fn_source(note_type, note_fields, note_header_field_name); - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some("Failed to parse Noir macro code (fn properties). This is either a bug in the compiler or the Noir macro code".to_string()), - span: Some(impl_span) - }); - } - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.span = impl_span; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -// Automatically generate the method to compute the note's hiding point as: -// fn compute_note_hiding_point(self: NoteType) -> Point { -// aztec::hash::pedersen_commitment(self.serialize_content(), aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_HIDING_POINT) -// } -// -fn generate_compute_note_hiding_point( - note_type: &String, - impl_span: Span, - empty_spans: bool, -) -> Result { - // TODO(#7771): update this to do only 1 MSM call - let function_source = format!( - r#" - fn compute_note_hiding_point(self: {}) -> aztec::protocol_types::point::Point {{ - assert(self.header.storage_slot != 0, "Storage slot must be set before computing note hiding point"); - let slot_scalar = dep::std::hash::from_field_unsafe(self.header.storage_slot); - - let point_before_slotting = aztec::hash::pedersen_commitment(self.serialize_content(), aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_HIDING_POINT); - let slot_point = dep::std::embedded_curve_ops::multi_scalar_mul([dep::aztec::generators::G_slot], [slot_scalar]); - point_before_slotting + slot_point - }} - "#, - note_type - ); - let (function_ast, errors) = parse_program(&function_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some("Failed to parse Noir macro code (fn compute_note_hiding_point). This is either a bug in the compiler or the Noir macro code".to_string()), - span: Some(impl_span) - }); - } - let mut function_ast = function_ast.into_sorted(); - let mut noir_fn = function_ast.functions.remove(0).item; - noir_fn.def.span = impl_span; - noir_fn.def.visibility = ItemVisibility::Public; - Ok(noir_fn) -} - -fn generate_note_exports_global( - note_type: &str, - note_type_id: &str, - empty_spans: bool, -) -> Result { - let struct_source = format!( - " - #[abi(notes)] - global {0}_EXPORTS: (Field, str<{1}>) = (0x{2},\"{0}\"); - ", - note_type, - note_type.len(), - note_type_id - ) - .to_string(); - - let (global_ast, errors) = parse_program(&struct_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotImplementNoteInterface { - secondary_message: Some(format!("Failed to parse Noir macro code (struct {}Exports). This is either a bug in the compiler or the Noir macro code", note_type)), - span: None - }); - } - - let mut global_ast = global_ast.into_sorted(); - Ok(global_ast.globals.pop().unwrap().item) -} - -// Source code generator functions. These utility methods produce Noir code as strings, that are then parsed and added to the AST. - -fn generate_note_properties_struct_source( - note_type: &str, - note_fields: &[(String, String)], - note_header_field_name: &String, -) -> String { - let note_property_selectors = note_fields - .iter() - .filter_map(|(field_name, _)| { - if field_name != note_header_field_name { - Some(format!( - "{field_name}: dep::aztec::note::note_getter_options::PropertySelector" - )) - } else { - None - } - }) - .collect::>() - .join(",\n"); - format!( - " - struct {}Properties {{ - {} - }}", - note_type, note_property_selectors - ) - .to_string() -} - -fn generate_note_properties_fn_source( - note_type: &str, - note_fields: &[(String, String)], - note_header_field_name: &String, -) -> String { - let note_property_selectors = note_fields - .iter() - .enumerate() - .filter_map(|(index, (field_name, _))| { - if field_name != note_header_field_name { - Some(format!( - "{}: aztec::note::note_getter_options::PropertySelector {{ index: {}, offset: 0, length: 32 }}", - field_name, - index - )) - } else { - None - } - }) - .collect::>() - .join(", "); - format!( - " - pub fn properties() -> {0}Properties {{ - {0}Properties {{ - {1} - }} - }}", - note_type, note_property_selectors - ) - .to_string() -} - -fn generate_note_serialize_content_source( - note_type: &str, - note_fields: &[(String, String)], - note_serialize_len: &String, - note_header_field_name: &String, -) -> String { - let note_fields = note_fields - .iter() - .filter_map(|(field_name, field_type)| { - if field_name != note_header_field_name { - if field_type == "Field" { - Some(format!("self.{}", field_name)) - } else { - Some(format!("self.{}.to_field()", field_name)) - } - } else { - None - } - }) - .collect::>() - .join(", "); - format!( - " - fn serialize_content(self: {}) -> [Field; {}] {{ - [{}] - }}", - note_type, note_serialize_len, note_fields - ) - .to_string() -} - -fn generate_note_deserialize_content_source( - note_type: &str, - note_fields: &[(String, String)], - note_serialize_len: &String, - note_header_field_name: &String, -) -> String { - let note_fields = note_fields - .iter() - .enumerate() - .map(|(index, (field_name, field_type))| { - if field_name != note_header_field_name { - // TODO: Simplify this when https://github.com/noir-lang/noir/issues/4463 is fixed - if field_type.eq("Field") - || Regex::new(r"u[0-9]+").unwrap().is_match(field_type) - || field_type.eq("bool") - { - format!("{}: serialized_note[{}] as {},", field_name, index, field_type) - } else { - format!( - "{}: {}::from_field(serialized_note[{}]),", - field_name, field_type, index - ) - } - } else { - format!( - "{note_header_field_name}: dep::aztec::note::note_header::NoteHeader::empty()" - ) - } - }) - .collect::>() - .join("\n"); - format!( - " - fn deserialize_content(serialized_note: [Field; {}]) -> Self {{ - {} {{ - {} - }} - }}", - note_serialize_len, note_type, note_fields - ) - .to_string() -} - -// TODO(#7165): nuke this function -// Utility function to generate the note type id as a Field -fn compute_note_type_id(note_type: &str) -> u32 { - // TODO(#4519) Improve automatic note id generation and assignment - hash_to_selector(note_type) -} - -pub fn inject_note_exports( - crate_id: &CrateId, - context: &mut HirContext, -) -> Result<(), (AztecMacroError, FileId)> { - if let Some((_, module_id, file_id)) = get_contract_module_data(context, crate_id) { - let notes = fetch_notes(context); - - for (_, note) in notes { - let func_id = context - .def_interner - .lookup_method( - &Type::Struct(context.def_interner.get_struct(note.borrow().id), vec![]), - note.borrow().id, - "get_note_type_id", - false, - true, - ) - .ok_or(( - AztecMacroError::CouldNotExportStorageLayout { - span: None, - secondary_message: Some(format!( - "Could not retrieve get_note_type_id function for note {}", - note.borrow().name.0.contents - )), - }, - file_id, - ))?; - let get_note_type_id_function = - context.def_interner.function(&func_id).block(&context.def_interner); - let get_note_type_id_statement_id = - get_note_type_id_function.statements().first().ok_or(( - AztecMacroError::CouldNotExportStorageLayout { - span: None, - secondary_message: Some(format!( - "Could not retrieve note id statement from function for note {}", - note.borrow().name.0.contents - )), - }, - file_id, - ))?; - let note_type_id_statement = - context.def_interner.statement(get_note_type_id_statement_id); - - let note_type_id = match note_type_id_statement { - HirStatement::Expression(expression_id) => { - match context.def_interner.expression(&expression_id) { - HirExpression::Literal(HirLiteral::Integer(value, _)) => Ok(value), - HirExpression::Literal(_) => Err(( - AztecMacroError::CouldNotExportStorageLayout { - span: None, - secondary_message: Some( - "note_type_id statement must be a literal integer expression" - .to_string(), - ), - }, - file_id, - )), - _ => Err(( - AztecMacroError::CouldNotExportStorageLayout { - span: None, - secondary_message: Some( - "note_type_id statement must be a literal expression" - .to_string(), - ), - }, - file_id, - )), - } - } - _ => Err(( - AztecMacroError::CouldNotExportStorageLayout { - span: None, - secondary_message: Some( - "note_type_id statement must be an expression".to_string(), - ), - }, - file_id, - )), - }?; - let empty_spans = context.def_interner.is_in_lsp_mode(); - let global = generate_note_exports_global( - ¬e.borrow().name.0.contents, - ¬e_type_id.to_hex(), - empty_spans, - ) - .map_err(|err| (err, file_id))?; - - inject_global(crate_id, context, global, module_id, file_id); - } - } - Ok(()) -} diff --git a/noir/noir-repo/aztec_macros/src/transforms/storage.rs b/noir/noir-repo/aztec_macros/src/transforms/storage.rs deleted file mode 100644 index e8ee1675546..00000000000 --- a/noir/noir-repo/aztec_macros/src/transforms/storage.rs +++ /dev/null @@ -1,564 +0,0 @@ -use acvm::acir::AcirField; -use noirc_errors::Span; -use noirc_frontend::ast::{ - BlockExpression, Documented, Expression, ExpressionKind, FunctionDefinition, GenericTypeArgs, - Ident, Literal, NoirFunction, NoirStruct, Pattern, StatementKind, TypeImpl, UnresolvedType, - UnresolvedTypeData, -}; -use noirc_frontend::{ - graph::CrateId, - macros_api::{ - FieldElement, FileId, HirContext, HirExpression, HirLiteral, HirStatement, NodeInterner, - }, - node_interner::TraitId, - parser::SortedModule, - token::SecondaryAttribute, - Type, -}; - -use crate::utils::parse_utils::parse_program; -use crate::{ - chained_path, - utils::{ - ast_utils::{ - call, expression, ident, ident_path, is_custom_attribute, lambda, make_statement, - make_type, path_segment, pattern, return_type, variable, variable_path, - }, - errors::AztecMacroError, - hir_utils::{ - collect_crate_structs, collect_traits, get_contract_module_data, get_serialized_length, - }, - }, -}; - -// Check to see if the user has defined a storage struct -pub fn check_for_storage_definition( - module: &SortedModule, -) -> Result, AztecMacroError> { - let result: Vec<&NoirStruct> = module - .types - .iter() - .map(|t| &t.item) - .filter(|r#struct| { - r#struct.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(storage)")) - }) - .collect(); - if result.len() > 1 { - return Err(AztecMacroError::MultipleStorageDefinitions { - span: result.first().map(|res| res.name.span()), - }); - } - Ok(result.iter().map(|&r#struct| r#struct.name.0.contents.clone()).next()) -} - -// Injects the Context generic in each of the Storage struct fields to avoid boilerplate, -// taking maps into account (including nested maps) -fn inject_context_in_storage_field(field: &mut UnresolvedType) -> Result<(), AztecMacroError> { - match &mut field.typ { - UnresolvedTypeData::Named(path, generics, _) => { - generics.ordered_args.push(make_type(UnresolvedTypeData::Named( - ident_path("Context"), - GenericTypeArgs::default(), - false, - ))); - match path.last_name() { - "Map" => inject_context_in_storage_field(&mut generics.ordered_args[1]), - _ => Ok(()), - } - } - _ => Err(AztecMacroError::CouldNotInjectContextGenericInStorage { - secondary_message: Some(format!("Unsupported type: {:?}", field.typ)), - }), - } -} - -// Injects the Context generic in the storage struct to avoid boilerplate -// Transforms this: -// struct Storage { -// a_var: SomeStoragePrimitive, -// a_map: Map>, -// } -// -// Into this: -// -// struct Storage { -// a_var: SomeStoragePrimitive, -// a_map: Map, Context>, -// } -pub fn inject_context_in_storage(module: &mut SortedModule) -> Result<(), AztecMacroError> { - let storage_struct = module - .types - .iter_mut() - .map(|t| &mut t.item) - .find(|r#struct| { - r#struct.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(storage)")) - }) - .unwrap(); - storage_struct.generics.push(ident("Context").into()); - storage_struct - .fields - .iter_mut() - .map(|field| inject_context_in_storage_field(&mut field.item.typ)) - .collect::, _>>()?; - Ok(()) -} - -// Check to see if the user has defined an impl for the storage struct -pub fn check_for_storage_implementation( - module: &SortedModule, - storage_struct_name: &String, -) -> bool { - module.impls.iter().any(|r#impl| match &r#impl.object_type.typ { - UnresolvedTypeData::Named(path, _, _) => path.last_name() == *storage_struct_name, - _ => false, - }) -} - -/// Auxiliary function to generate the storage constructor for a given field, using -/// the Storage definition as a reference. Supports nesting. -pub fn generate_storage_field_constructor( - (type_ident, unresolved_type): &(Ident, UnresolvedType), - slot: Expression, -) -> Result { - let typ = &unresolved_type.typ; - match typ { - UnresolvedTypeData::Named(path, generics, _) => { - let mut new_path = path.clone().to_owned(); - new_path.segments.push(path_segment("new")); - match path.last_name() { - "Map" => Ok(call( - variable_path(new_path), - vec![ - variable("context"), - slot, - lambda( - // This lambda will be equivalent to the following - // | context, slot | { T::new(context, slot) } - // Since the `new` function has type bindings for its arguments, we don't specify the types - // of either context nor slot, and avoid that way having to deal with the generic context - // type. - vec![ - (pattern("context"), make_type(UnresolvedTypeData::Unspecified)), - ( - Pattern::Identifier(ident("slot")), - make_type(UnresolvedTypeData::Unspecified), - ), - ], - generate_storage_field_constructor( - // Map is expected to have three generic parameters: key, value and context (i.e. - // Map. Here `get(1)` fetches the value type. - &( - type_ident.clone(), - generics.ordered_args.get(1).unwrap().clone(), - ), - variable("slot"), - )?, - ), - ], - )), - _ => Ok(call(variable_path(new_path), vec![variable("context"), slot])), - } - } - _ => Err(AztecMacroError::UnsupportedStorageType { - typ: typ.clone(), - span: Some(type_ident.span()), - }), - } -} - -// Generates the Storage implementation block from the Storage struct definition if it does not exist -/// From: -/// -/// struct Storage { -/// a_map: Map, Context>, -/// a_nested_map: Map, Context>, Context>, -/// a_field: SomeStoragePrimitive, -/// } -/// -/// To: -/// -/// impl Storage { -/// fn init(context: Context) -> Self { -/// Storage { -/// a_map: Map::new(context, 0, |context, slot| { -/// SomeStoragePrimitive::new(context, slot) -/// }), -/// a_nested_map: Map::new(context, 0, |context, slot| { -/// Map::new(context, slot, |context, slot| { -/// SomeStoragePrimitive::new(context, slot) -/// }) -/// }), -/// a_field: SomeStoragePrimitive::new(context, 0), -/// } -/// } -/// } -/// -/// Storage slots are generated as 0 and will be populated using the information from the HIR -/// at a later stage. -pub fn generate_storage_implementation( - module: &mut SortedModule, - storage_struct_name: &String, -) -> Result<(), AztecMacroError> { - let definition = module - .types - .iter() - .map(|t| &t.item) - .find(|r#struct| r#struct.name.0.contents == *storage_struct_name) - .unwrap(); - - let slot_zero = expression(ExpressionKind::Literal(Literal::Integer( - FieldElement::from(i128::from(0)), - false, - ))); - - let field_constructors = definition - .fields - .iter() - .flat_map(|field| { - let ident = &field.item.name; - let typ = &field.item.typ; - generate_storage_field_constructor(&(ident.clone(), typ.clone()), slot_zero.clone()) - .map(|expression| (field.item.name.clone(), expression)) - }) - .collect(); - - let storage_constructor_statement = - make_statement(StatementKind::Expression(expression(ExpressionKind::constructor(( - UnresolvedType::from_path(chained_path!(storage_struct_name)), - field_constructors, - ))))); - - // This is the type over which the impl is generic. - let generic_context_ident = ident("Context"); - let generic_context_type = make_type(UnresolvedTypeData::Named( - ident_path("Context"), - GenericTypeArgs::default(), - true, - )); - - let init = NoirFunction::normal(FunctionDefinition::normal( - &ident("init"), - &vec![], - &[(ident("context"), generic_context_type.clone())], - &BlockExpression { statements: vec![storage_constructor_statement] }, - &[], - &return_type(chained_path!("Self")), - )); - - let ordered_args = vec![generic_context_type.clone()]; - let generics = GenericTypeArgs { ordered_args, named_args: Vec::new() }; - - let storage_impl = TypeImpl { - object_type: UnresolvedType { - typ: UnresolvedTypeData::Named(chained_path!(storage_struct_name), generics, true), - span: Span::default(), - }, - type_span: Span::default(), - generics: vec![generic_context_ident.into()], - - methods: vec![(Documented::not_documented(init), Span::default())], - - where_clause: vec![], - }; - module.impls.push(storage_impl); - - Ok(()) -} - -/// Obtains the serialized length of a type that implements the Serialize trait. -pub fn get_storage_serialized_length( - traits: &[TraitId], - typ: &Type, - interner: &NodeInterner, -) -> Result { - let (struct_name, maybe_stored_in_state) = match typ { - Type::Struct(struct_type, generics) => { - Ok((struct_type.borrow().name.0.contents.clone(), generics.first())) - } - _ => Err(AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some("State storage variable must be a struct".to_string()), - }), - }?; - let stored_in_state = - maybe_stored_in_state.ok_or(AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some("State storage variable must be generic".to_string()), - })?; - - let is_note = match stored_in_state { - Type::Struct(typ, _) => interner - .struct_attributes(&typ.borrow().id) - .iter() - .any(|attr| is_custom_attribute(attr, "aztec(note)")), - _ => false, - }; - - // Maps and (private) Notes always occupy a single slot. Someone could store a Note in PublicMutable for whatever reason though. - if struct_name == "Map" || (is_note && struct_name != "PublicMutable") { - return Ok(1); - } - - get_serialized_length(traits, "Serialize", stored_in_state, interner).map_err(|err| { - AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some(err.primary_message) } - }) -} - -/// Assigns storage slots to the storage struct fields based on the serialized length of the types. This automatic assignment -/// will only trigger if the assigned storage slot is invalid (0 as generated by generate_storage_implementation) -pub fn assign_storage_slots( - crate_id: &CrateId, - context: &mut HirContext, -) -> Result<(), (AztecMacroError, FileId)> { - let traits: Vec<_> = collect_traits(context); - if let Some((_, _, file_id)) = get_contract_module_data(context, crate_id) { - let maybe_storage_struct = - collect_crate_structs(crate_id, context).iter().find_map(|struct_id| { - let r#struct = context.def_interner.get_struct(*struct_id); - let attributes = context.def_interner.struct_attributes(struct_id); - if attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(storage)")) - && r#struct.borrow().id.krate() == *crate_id - { - Some(r#struct) - } else { - None - } - }); - - let maybe_storage_layout = - context.def_interner.get_all_globals().iter().find_map(|global_info| { - let statement = context.def_interner.get_global_let_statement(global_info.id); - if statement.clone().is_some_and(|stmt| { - stmt.attributes - .iter() - .any(|attr| *attr == SecondaryAttribute::Abi("storage".to_string())) - }) { - let expr = context.def_interner.expression(&statement.unwrap().expression); - match expr { - HirExpression::Constructor(hir_constructor_expression) => { - if hir_constructor_expression.r#type.borrow().id.krate() == *crate_id { - Some(hir_constructor_expression) - } else { - None - } - } - _ => None, - } - } else { - None - } - }); - - if let (Some(storage_struct), Some(storage_layout)) = - (maybe_storage_struct, maybe_storage_layout) - { - let init_id = context - .def_interner - .lookup_method( - &Type::Struct( - context.def_interner.get_struct(storage_struct.borrow().id), - vec![], - ), - storage_struct.borrow().id, - "init", - false, - true, - ) - .ok_or(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage struct must have an init function".to_string(), - ), - }, - file_id, - ))?; - let init_function = - context.def_interner.function(&init_id).block(&context.def_interner); - let init_function_statement_id = init_function.statements().first().ok_or(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some("Init storage statement not found".to_string()), - }, - file_id, - ))?; - let storage_constructor_statement = - context.def_interner.statement(init_function_statement_id); - - let storage_constructor_expression = match storage_constructor_statement { - HirStatement::Expression(expression_id) => { - match context.def_interner.expression(&expression_id) { - HirExpression::Constructor(hir_constructor_expression) => { - Ok(hir_constructor_expression) - } - _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage constructor statement must be a constructor expression" - .to_string(), - ), - }, - file_id, - )), - } - } - _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage constructor statement must be an expression".to_string(), - ), - }, - file_id, - )), - }?; - - let mut storage_slot: u32 = 1; - for (index, (_, expr_id)) in storage_constructor_expression.fields.iter().enumerate() { - let fields = storage_struct - .borrow() - .get_fields(&storage_constructor_expression.struct_generics); - let (field_name, field_type) = fields.get(index).unwrap(); - let new_call_expression = match context.def_interner.expression(expr_id) { - HirExpression::Call(hir_call_expression) => Ok(hir_call_expression), - _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage field initialization expression is not a call expression" - .to_string(), - ), - }, - file_id, - )), - }?; - - let slot_arg_expression = - context.def_interner.expression(&new_call_expression.arguments[1]); - - let current_storage_slot = match slot_arg_expression { - HirExpression::Literal(HirLiteral::Integer(slot, _)) => Ok(slot.to_u128()), - _ => Err(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some( - "Storage slot argument expression must be a literal integer" - .to_string(), - ), - }, - file_id, - )), - }?; - - let storage_layout_field = - storage_layout.fields.iter().find(|field| field.0 .0.contents == *field_name); - - let storage_layout_slot_expr_id = - if let Some((_, expr_id)) = storage_layout_field { - let expr = context.def_interner.expression(expr_id); - if let HirExpression::Constructor(storage_layout_field_storable_expr) = expr - { - storage_layout_field_storable_expr.fields.iter().find_map( - |(field, expr_id)| { - if field.0.contents == "slot" { - Some(*expr_id) - } else { - None - } - }, - ) - } else { - None - } - } else { - None - } - .ok_or(( - AztecMacroError::CouldNotAssignStorageSlots { - secondary_message: Some(format!( - "Storage layout field ({}) not found or has an incorrect type", - field_name - )), - }, - file_id, - ))?; - - let new_storage_slot = if current_storage_slot == 0 { - u128::from(storage_slot) - } else { - current_storage_slot - }; - - let type_serialized_len = - get_storage_serialized_length(&traits, field_type, &context.def_interner) - .map_err(|err| (err, file_id))?; - - context.def_interner.update_expression(new_call_expression.arguments[1], |expr| { - *expr = HirExpression::Literal(HirLiteral::Integer( - FieldElement::from(new_storage_slot), - false, - )) - }); - - context.def_interner.update_expression(storage_layout_slot_expr_id, |expr| { - *expr = HirExpression::Literal(HirLiteral::Integer( - FieldElement::from(new_storage_slot), - false, - )) - }); - - storage_slot += type_serialized_len; - } - } - } - - Ok(()) -} - -pub fn generate_storage_layout( - module: &mut SortedModule, - storage_struct_name: String, - module_name: &str, - empty_spans: bool, -) -> Result<(), AztecMacroError> { - let definition = module - .types - .iter() - .map(|t| &t.item) - .find(|r#struct| r#struct.name.0.contents == *storage_struct_name) - .unwrap(); - - let mut storable_fields = vec![]; - let mut storable_fields_impl = vec![]; - - definition.fields.iter().for_each(|field| { - let field_ident = &field.item.name; - storable_fields.push(format!("{}: dep::aztec::prelude::Storable", field_ident)); - storable_fields_impl - .push(format!("{}: dep::aztec::prelude::Storable {{ slot: 0 }}", field_ident,)); - }); - - let storage_fields_source = format!( - " - struct StorageLayout {{ - {} - }} - - #[abi(storage)] - global {}_STORAGE_LAYOUT = StorageLayout {{ - {} - }}; - ", - storable_fields.join(",\n"), - module_name, - storable_fields_impl.join(",\n") - ); - - let (struct_ast, errors) = parse_program(&storage_fields_source, empty_spans); - if !errors.is_empty() { - dbg!(errors); - return Err(AztecMacroError::CouldNotExportStorageLayout { - secondary_message: Some("Failed to parse Noir macro code (struct StorageLayout). This is either a bug in the compiler or the Noir macro code".to_string()), - span: None - }); - } - - let mut struct_ast = struct_ast.into_sorted(); - module.types.push(struct_ast.types.pop().unwrap()); - module.globals.push(struct_ast.globals.pop().unwrap()); - - Ok(()) -} diff --git a/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs b/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs deleted file mode 100644 index eeb8e1f7d78..00000000000 --- a/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs +++ /dev/null @@ -1,195 +0,0 @@ -use noirc_errors::{Span, Spanned}; -use noirc_frontend::ast::{ - BinaryOpKind, CallExpression, CastExpression, Expression, ExpressionKind, FunctionReturnType, - Ident, IndexExpression, InfixExpression, Lambda, MemberAccessExpression, MethodCallExpression, - NoirTraitImpl, Path, PathSegment, Pattern, PrefixExpression, Statement, StatementKind, - TraitImplItemKind, UnaryOp, UnresolvedType, UnresolvedTypeData, -}; -use noirc_frontend::token::SecondaryAttribute; - -// -// Helper macros for creating noir ast nodes -// -pub fn ident(name: &str) -> Ident { - Ident::new(name.to_string(), Span::default()) -} - -pub fn ident_path(name: &str) -> Path { - Path::from_ident(ident(name)) -} - -pub fn path_segment(name: &str) -> PathSegment { - PathSegment::from(ident(name)) -} - -pub fn path(ident: Ident) -> Path { - Path::from_ident(ident) -} - -pub fn expression(kind: ExpressionKind) -> Expression { - Expression::new(kind, Span::default()) -} - -pub fn variable(name: &str) -> Expression { - expression(ExpressionKind::Variable(ident_path(name))) -} - -pub fn variable_ident(identifier: Ident) -> Expression { - expression(ExpressionKind::Variable(path(identifier))) -} - -pub fn variable_path(path: Path) -> Expression { - expression(ExpressionKind::Variable(path)) -} - -pub fn method_call( - object: Expression, - method_name: &str, - arguments: Vec, -) -> Expression { - expression(ExpressionKind::MethodCall(Box::new(MethodCallExpression { - object, - method_name: ident(method_name), - arguments, - is_macro_call: false, - generics: None, - }))) -} - -pub fn call(func: Expression, arguments: Vec) -> Expression { - expression(ExpressionKind::Call(Box::new(CallExpression { - func: Box::new(func), - is_macro_call: false, - arguments, - }))) -} - -pub fn pattern(name: &str) -> Pattern { - Pattern::Identifier(ident(name)) -} - -pub fn mutable(name: &str) -> Pattern { - Pattern::Mutable(Box::new(pattern(name)), Span::default(), true) -} - -pub fn mutable_assignment(name: &str, assigned_to: Expression) -> Statement { - make_statement(StatementKind::new_let( - mutable(name), - make_type(UnresolvedTypeData::Unspecified), - assigned_to, - )) -} - -pub fn mutable_reference(variable_name: &str) -> Expression { - expression(ExpressionKind::Prefix(Box::new(PrefixExpression { - operator: UnaryOp::MutableReference, - rhs: variable(variable_name), - }))) -} - -pub fn assignment(name: &str, assigned_to: Expression) -> Statement { - assignment_with_type(name, UnresolvedTypeData::Unspecified, assigned_to) -} - -pub fn assignment_with_type( - name: &str, - typ: UnresolvedTypeData, - assigned_to: Expression, -) -> Statement { - make_statement(StatementKind::new_let(pattern(name), make_type(typ), assigned_to)) -} - -pub fn return_type(path: Path) -> FunctionReturnType { - let ty = make_type(UnresolvedTypeData::Named(path, Default::default(), true)); - FunctionReturnType::Ty(ty) -} - -pub fn lambda(parameters: Vec<(Pattern, UnresolvedType)>, body: Expression) -> Expression { - expression(ExpressionKind::Lambda(Box::new(Lambda { - parameters, - return_type: UnresolvedType { typ: UnresolvedTypeData::Unspecified, span: Span::default() }, - body, - }))) -} - -pub fn make_eq(lhs: Expression, rhs: Expression) -> Expression { - expression(ExpressionKind::Infix(Box::new(InfixExpression { - lhs, - rhs, - operator: Spanned::from(Span::default(), BinaryOpKind::Equal), - }))) -} - -pub fn make_statement(kind: StatementKind) -> Statement { - Statement { span: Span::default(), kind } -} - -pub fn member_access(lhs: Expression, member: &str) -> Expression { - expression(ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { - lhs, - rhs: ident(member), - }))) -} - -#[macro_export] -macro_rules! chained_path { - ( $base:expr ) => { - { - ident_path($base) - } - }; - ( $base:expr $(, $tail:expr)* ) => { - { - let mut base_path = ident_path($base); - $( - base_path.segments.push(path_segment($tail)); - )* - base_path - } - } -} - -#[macro_export] -macro_rules! chained_dep { - ( $base:expr $(, $tail:expr)* ) => { - { - let mut base_path = ident_path($base); - base_path.kind = PathKind::Plain; - $( - base_path.segments.push(path_segment($tail)); - )* - base_path - } - } -} - -pub fn cast(lhs: Expression, ty: UnresolvedTypeData) -> Expression { - expression(ExpressionKind::Cast(Box::new(CastExpression { lhs, r#type: make_type(ty) }))) -} - -pub fn make_type(typ: UnresolvedTypeData) -> UnresolvedType { - UnresolvedType { typ, span: Span::default() } -} - -pub fn index_array(array: Ident, index: &str) -> Expression { - expression(ExpressionKind::Index(Box::new(IndexExpression { - collection: variable_path(path(array)), - index: variable(index), - }))) -} - -pub fn check_trait_method_implemented(trait_impl: &NoirTraitImpl, method_name: &str) -> bool { - trait_impl.items.iter().any(|item| match &item.item.kind { - TraitImplItemKind::Function(func) => func.def.name.0.contents == method_name, - _ => false, - }) -} - -/// Checks if an attribute is a custom attribute with a specific name -pub fn is_custom_attribute(attr: &SecondaryAttribute, attribute_name: &str) -> bool { - if let SecondaryAttribute::Custom(custom_attribute) = attr { - custom_attribute.contents.as_str() == attribute_name - } else { - false - } -} diff --git a/noir/noir-repo/aztec_macros/src/utils/checks.rs b/noir/noir-repo/aztec_macros/src/utils/checks.rs deleted file mode 100644 index c067ec570c8..00000000000 --- a/noir/noir-repo/aztec_macros/src/utils/checks.rs +++ /dev/null @@ -1,25 +0,0 @@ -use noirc_frontend::{ - graph::CrateId, - macros_api::{FileId, HirContext, MacroError}, -}; - -use super::errors::AztecMacroError; - -/// Creates an error alerting the user that they have not downloaded the Aztec-noir library -pub fn check_for_aztec_dependency( - crate_id: &CrateId, - context: &HirContext, -) -> Result<(), (MacroError, FileId)> { - if has_aztec_dependency(crate_id, context) { - Ok(()) - } else { - Err((AztecMacroError::AztecDepNotFound.into(), context.crate_graph[crate_id].root_file_id)) - } -} - -pub fn has_aztec_dependency(crate_id: &CrateId, context: &HirContext) -> bool { - context.crate_graph[crate_id] - .dependencies - .iter() - .any(|dep| dep.as_name() == "aztec-prevent-macro-injection") -} diff --git a/noir/noir-repo/aztec_macros/src/utils/constants.rs b/noir/noir-repo/aztec_macros/src/utils/constants.rs deleted file mode 100644 index 3e93b2aa545..00000000000 --- a/noir/noir-repo/aztec_macros/src/utils/constants.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub const FUNCTION_TREE_HEIGHT: u32 = 5; -pub const MAX_CONTRACT_PRIVATE_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); diff --git a/noir/noir-repo/aztec_macros/src/utils/errors.rs b/noir/noir-repo/aztec_macros/src/utils/errors.rs deleted file mode 100644 index c0b4310de96..00000000000 --- a/noir/noir-repo/aztec_macros/src/utils/errors.rs +++ /dev/null @@ -1,118 +0,0 @@ -use noirc_errors::Span; -use noirc_frontend::ast; -use noirc_frontend::macros_api::MacroError; - -use super::constants::MAX_CONTRACT_PRIVATE_FUNCTIONS; - -#[derive(Debug, Clone)] -pub enum AztecMacroError { - AztecDepNotFound, - ContractHasTooManyPrivateFunctions { span: Span }, - UnsupportedFunctionArgumentType { span: Span, typ: ast::UnresolvedTypeData }, - UnsupportedFunctionReturnType { span: Span, typ: ast::UnresolvedTypeData }, - UnsupportedStorageType { span: Option, typ: ast::UnresolvedTypeData }, - CouldNotAssignStorageSlots { secondary_message: Option }, - CouldNotImplementComputeNoteHashAndOptionallyANullifier { secondary_message: Option }, - CouldNotImplementNoteInterface { span: Option, secondary_message: Option }, - CouldNotImplementEventInterface { secondary_message: Option }, - MultipleStorageDefinitions { span: Option }, - CouldNotExportStorageLayout { span: Option, secondary_message: Option }, - CouldNotInjectContextGenericInStorage { secondary_message: Option }, - CouldNotExportFunctionAbi { span: Option, secondary_message: Option }, - CouldNotGenerateContractInterface { secondary_message: Option }, - EventError { span: Span, message: String }, - UnsupportedAttributes { span: Span, secondary_message: Option }, - PublicArgsDisallowed { span: Span }, -} - -impl From for MacroError { - fn from(err: AztecMacroError) -> Self { - match err { - AztecMacroError::AztecDepNotFound {} => MacroError { - primary_message: "Aztec dependency not found. Please add aztec as a dependency in your Nargo.toml. For more information go to https://docs.aztec.network/reference/developer_references/common_errors/aztecnr-errors#aztec-dependency-not-found-please-add-aztec-as-a-dependency-in-your-nargotoml".to_owned(), - secondary_message: None, - span: None, - }, - AztecMacroError::ContractHasTooManyPrivateFunctions { span } => MacroError { - primary_message: format!("Contract can only have a maximum of {} private functions", MAX_CONTRACT_PRIVATE_FUNCTIONS), - secondary_message: None, - span: Some(span), - }, - AztecMacroError::UnsupportedFunctionArgumentType { span, typ } => MacroError { - primary_message: format!("Provided parameter type `{typ:?}` is not supported in Aztec contract interface"), - secondary_message: None, - span: Some(span), - }, - AztecMacroError::UnsupportedFunctionReturnType { span, typ } => MacroError { - primary_message: format!("Provided return type `{typ:?}` is not supported in Aztec contract interface"), - secondary_message: None, - span: Some(span), - }, - AztecMacroError::UnsupportedStorageType { span, typ } => MacroError { - primary_message: format!("Provided storage type `{typ:?}` is not directly supported in Aztec. Please provide a custom storage implementation"), - secondary_message: None, - span, - }, - AztecMacroError::CouldNotAssignStorageSlots { secondary_message } => MacroError { - primary_message: "Could not assign storage slots, please provide a custom storage implementation".to_string(), - secondary_message, - span: None, - }, - AztecMacroError::CouldNotImplementComputeNoteHashAndOptionallyANullifier { secondary_message } => MacroError { - primary_message: "Could not implement compute_note_hash_and_optionally_a_nullifier automatically, please provide an implementation".to_string(), - secondary_message, - span: None, - }, - AztecMacroError::CouldNotImplementNoteInterface { span, secondary_message } => MacroError { - primary_message: "Could not implement automatic methods for note, please provide an implementation of the NoteInterface trait".to_string(), - secondary_message, - span - }, - AztecMacroError::CouldNotImplementEventInterface { secondary_message } => MacroError { - primary_message: "Could not implement automatic methods for event, please provide an implementation of the EventInterface trait".to_string(), - secondary_message, - span: None, - }, - AztecMacroError::MultipleStorageDefinitions { span } => MacroError { - primary_message: "Only one struct can be tagged as #[aztec(storage)]".to_string(), - secondary_message: None, - span, - }, - AztecMacroError::CouldNotExportStorageLayout { secondary_message, span } => MacroError { - primary_message: "Could not generate and export storage layout".to_string(), - secondary_message, - span, - }, - AztecMacroError::CouldNotInjectContextGenericInStorage { secondary_message } => MacroError { - primary_message: "Could not inject context generic in storage".to_string(), - secondary_message, - span: None - }, - AztecMacroError::CouldNotExportFunctionAbi { secondary_message, span } => MacroError { - primary_message: "Could not generate and export function abi".to_string(), - secondary_message, - span, - }, - AztecMacroError::CouldNotGenerateContractInterface { secondary_message } => MacroError { - primary_message: "Could not generate contract interface".to_string(), - secondary_message, - span: None - }, - AztecMacroError::EventError { span, message } => MacroError { - primary_message: message, - secondary_message: None, - span: Some(span), - }, - AztecMacroError::UnsupportedAttributes { span, secondary_message } => MacroError { - primary_message: "Unsupported attributes in contract function".to_string(), - secondary_message, - span: Some(span), - }, - AztecMacroError::PublicArgsDisallowed { span } => MacroError { - primary_message: "Aztec functions can't have public arguments".to_string(), - secondary_message: None, - span: Some(span), - }, - } - } -} diff --git a/noir/noir-repo/aztec_macros/src/utils/hir_utils.rs b/noir/noir-repo/aztec_macros/src/utils/hir_utils.rs deleted file mode 100644 index 4f1ef78b474..00000000000 --- a/noir/noir-repo/aztec_macros/src/utils/hir_utils.rs +++ /dev/null @@ -1,369 +0,0 @@ -use acvm::acir::AcirField; -use iter_extended::vecmap; -use noirc_errors::{CustomDiagnostic, Location}; -use noirc_frontend::ast; -use noirc_frontend::elaborator::Elaborator; -use noirc_frontend::hir::def_collector::dc_crate::{ - CollectedItems, UnresolvedFunctions, UnresolvedGlobal, -}; -use noirc_frontend::macros_api::{HirExpression, HirLiteral}; -use noirc_frontend::node_interner::{NodeInterner, TraitImplKind}; -use noirc_frontend::{ - graph::CrateId, - hir::def_map::{LocalModuleId, ModuleId}, - macros_api::{FileId, HirContext, MacroError, ModuleDefId, StructId}, - node_interner::{FuncId, TraitId}, - Shared, StructType, Type, -}; - -use super::ast_utils::is_custom_attribute; - -pub fn collect_crate_structs(crate_id: &CrateId, context: &HirContext) -> Vec { - context - .def_map(crate_id) - .map(|def_map| { - def_map - .modules() - .iter() - .flat_map(|(_, module)| { - module.type_definitions().filter_map(move |typ| { - if let ModuleDefId::TypeId(struct_id) = typ { - Some(struct_id) - } else { - None - } - }) - }) - .collect() - }) - .unwrap_or_default() -} - -pub fn collect_crate_functions(crate_id: &CrateId, context: &HirContext) -> Vec { - context - .def_map(crate_id) - .expect("ICE: Missing crate in def_map") - .modules() - .iter() - .flat_map(|(_, module)| module.value_definitions().filter_map(|id| id.as_function())) - .collect() -} - -pub fn collect_traits(context: &HirContext) -> Vec { - let crates = context.crates(); - crates - .flat_map(|crate_id| context.def_map(&crate_id).map(|def_map| def_map.modules())) - .flatten() - .flat_map(|module| { - module.type_definitions().filter_map(|typ| { - if let ModuleDefId::TraitId(trait_id) = typ { - Some(trait_id) - } else { - None - } - }) - }) - .collect() -} - -/// Computes the aztec signature for a resolved type. -pub fn signature_of_type(typ: &Type) -> String { - match typ { - Type::Integer(ast::Signedness::Signed, bit_size) => format!("i{}", bit_size), - Type::Integer(ast::Signedness::Unsigned, bit_size) => format!("u{}", bit_size), - Type::FieldElement => "Field".to_owned(), - Type::Bool => "bool".to_owned(), - Type::Array(len, typ) => { - if let Type::Constant(len, _) = **len { - format!("[{};{len}]", signature_of_type(typ)) - } else { - unimplemented!("Cannot generate signature for array with length type {:?}", typ) - } - } - Type::Struct(def, args) => { - let fields = def.borrow().get_fields(args); - let fields = vecmap(fields, |(_, typ)| signature_of_type(&typ)); - format!("({})", fields.join(",")) - } - Type::Tuple(types) => { - let fields = vecmap(types, signature_of_type); - format!("({})", fields.join(",")) - } - Type::String(len_typ) => { - if let Type::Constant(len, _) = **len_typ { - format!("str<{len}>") - } else { - unimplemented!( - "Cannot generate signature for string with length type {:?}", - len_typ - ) - } - } - Type::MutableReference(typ) => signature_of_type(typ), - _ => unimplemented!("Cannot generate signature for type {:?}", typ), - } -} - -// Fetches the name of all structs tagged as #[aztec(note)] in a given crate, avoiding -// contract dependencies that are just there for their interfaces. -pub fn fetch_crate_notes( - context: &HirContext, - crate_id: &CrateId, -) -> Vec<(String, Shared)> { - collect_crate_structs(crate_id, context) - .iter() - .filter_map(|struct_id| { - let r#struct = context.def_interner.get_struct(*struct_id); - let attributes = context.def_interner.struct_attributes(struct_id); - if attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(note)")) { - let module_id = struct_id.module_id(); - - fully_qualified_note_path(context, *struct_id).map(|path| { - let path = if path.contains("::") { - let prefix = if &module_id.krate == context.root_crate_id() { - "crate" - } else { - "dep" - }; - format!("{}::{}", prefix, path) - } else { - path - }; - (path.clone(), r#struct) - }) - } else { - None - } - }) - .collect() -} - -// Fetches the name of all structs tagged as #[aztec(note)], both in the current crate and all of its dependencies. -pub fn fetch_notes(context: &HirContext) -> Vec<(String, Shared)> { - context.crates().flat_map(|crate_id| fetch_crate_notes(context, &crate_id)).collect() -} - -pub fn get_contract_module_data( - context: &mut HirContext, - crate_id: &CrateId, -) -> Option<(String, LocalModuleId, FileId)> { - let def_map = context.def_map(crate_id).expect("ICE: Missing crate in def_map"); - // We first fetch modules in this crate which correspond to contracts, along with their file id. - let contract_module_file_ids: Vec<(String, LocalModuleId, FileId)> = def_map - .modules() - .iter() - .filter(|(_, module)| module.is_contract) - .map(|(idx, module)| { - (def_map.get_module_path(idx, module.parent), LocalModuleId(idx), module.location.file) - }) - .collect(); - - // If the current crate does not contain a contract module we simply skip it. - if contract_module_file_ids.is_empty() { - return None; - } - - Some(contract_module_file_ids[0].clone()) -} - -pub fn inject_fn( - crate_id: &CrateId, - context: &mut HirContext, - func: ast::NoirFunction, - location: Location, - module_id: LocalModuleId, - file_id: FileId, -) -> Result<(), MacroError> { - let func_id = context.def_interner.push_empty_fn(); - context.def_interner.push_function( - func_id, - &func.def, - ModuleId { krate: *crate_id, local_id: module_id }, - location, - ); - - context.def_map_mut(crate_id).unwrap().modules_mut()[module_id.0] - .declare_function(func.name_ident().clone(), ast::ItemVisibility::Public, func_id) - .map_err(|err| MacroError { - primary_message: format!("Failed to declare autogenerated {} function", func.name()), - secondary_message: Some(format!("Duplicate definition found {}", err.0)), - span: None, - })?; - - let mut items = CollectedItems::default(); - let functions = vec![(module_id, func_id, func)]; - let trait_id = None; - items.functions.push(UnresolvedFunctions { file_id, functions, trait_id, self_type: None }); - - let mut errors = Elaborator::elaborate(context, *crate_id, items, None); - errors.retain(|(error, _)| !CustomDiagnostic::from(error).is_warning()); - - if !errors.is_empty() { - return Err(MacroError { - primary_message: "Failed to type check autogenerated function".to_owned(), - secondary_message: Some(errors.iter().map(|err| err.0.to_string()).collect::()), - span: None, - }); - } - - Ok(()) -} - -pub fn inject_global( - crate_id: &CrateId, - context: &mut HirContext, - global: ast::LetStatement, - module_id: LocalModuleId, - file_id: FileId, -) { - let name = global.pattern.name_ident().clone(); - - let global_id = context.def_interner.push_empty_global( - name.clone(), - module_id, - *crate_id, - file_id, - global.attributes.clone(), - false, - false, - ); - - // Add the statement to the scope so its path can be looked up later - context.def_map_mut(crate_id).unwrap().modules_mut()[module_id.0] - .declare_global(name, global_id) - .unwrap_or_else(|(name, _)| { - panic!( - "Failed to declare autogenerated {} global, likely due to a duplicate definition", - name - ) - }); - - let mut items = CollectedItems::default(); - items.globals.push(UnresolvedGlobal { file_id, module_id, global_id, stmt_def: global }); - - let _errors = Elaborator::elaborate(context, *crate_id, items, None); -} - -pub fn fully_qualified_note_path(context: &HirContext, note_id: StructId) -> Option { - let module_id = note_id.module_id(); - let child_id = module_id.local_id.0; - let def_map = - context.def_map(&module_id.krate).expect("The local crate should be analyzed already"); - - let module = context.module(module_id); - - let module_path = def_map.get_module_path_with_separator(child_id, module.parent, "::"); - - if &module_id.krate == context.root_crate_id() { - Some(module_path) - } else { - find_dependencies_bfs(context, context.root_crate_id(), &module_id.krate) - .map(|crates| crates.join("::") + "::" + &module_path) - } -} - -fn find_dependencies_bfs( - context: &HirContext, - crate_id: &CrateId, - target_crate_id: &CrateId, -) -> Option> { - context.crate_graph[crate_id] - .dependencies - .iter() - .find_map(|dep| { - if &dep.crate_id == target_crate_id { - Some(vec![dep.name.to_string()]) - } else { - None - } - }) - .or_else(|| { - context.crate_graph[crate_id].dependencies.iter().find_map(|dep| { - if let Some(mut path) = - find_dependencies_bfs(context, &dep.crate_id, target_crate_id) - { - path.insert(0, dep.name.to_string()); - Some(path) - } else { - None - } - }) - }) -} - -pub fn get_serialized_length( - traits: &[TraitId], - trait_name: &str, - typ: &Type, - interner: &NodeInterner, -) -> Result { - let serialized_trait_impl_kind = traits - .iter() - .find_map(|&trait_id| { - let r#trait = interner.get_trait(trait_id); - if r#trait.name.0.contents == trait_name { - interner.lookup_all_trait_implementations(typ, trait_id).into_iter().next() - } else { - None - } - }) - .ok_or(MacroError { - primary_message: format!("Type {} must implement {} trait", typ, trait_name), - secondary_message: None, - span: None, - })?; - - let serialized_trait_impl_id = match serialized_trait_impl_kind { - TraitImplKind::Normal(trait_impl_id) => Ok(trait_impl_id), - _ => Err(MacroError { - primary_message: format!("{} trait impl for {} must not be assumed", trait_name, typ), - secondary_message: None, - span: None, - }), - }?; - - let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id); - let serialized_trait_impl = serialized_trait_impl_shared.borrow(); - - match serialized_trait_impl.trait_generics.first().unwrap() { - Type::Constant(value, _) => Ok(*value), - _ => Err(MacroError { - primary_message: format!("{} length for {} must be a constant", trait_name, typ), - secondary_message: None, - span: None, - }), - } -} - -pub fn get_global_numberic_const( - context: &HirContext, - const_name: &str, -) -> Result { - context - .def_interner - .get_all_globals() - .iter() - .find_map(|global_info| { - if global_info.ident.0.contents == const_name { - let stmt = context.def_interner.get_global_let_statement(global_info.id); - if let Some(let_stmt) = stmt { - let expression = context.def_interner.expression(&let_stmt.expression); - match expression { - HirExpression::Literal(HirLiteral::Integer(value, _)) => { - Some(value.to_u128()) - } - _ => None, - } - } else { - None - } - } else { - None - } - }) - .ok_or(MacroError { - primary_message: format!("Could not find {} global constant", const_name), - secondary_message: None, - span: None, - }) -} diff --git a/noir/noir-repo/aztec_macros/src/utils/mod.rs b/noir/noir-repo/aztec_macros/src/utils/mod.rs deleted file mode 100644 index 6809fe9f154..00000000000 --- a/noir/noir-repo/aztec_macros/src/utils/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub mod ast_utils; -pub mod checks; -pub mod constants; -pub mod errors; -pub mod hir_utils; -pub mod parse_utils; diff --git a/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs b/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs deleted file mode 100644 index 61f54377284..00000000000 --- a/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs +++ /dev/null @@ -1,591 +0,0 @@ -use noirc_frontend::{ - ast::{ - ArrayLiteral, AssignStatement, BlockExpression, CallExpression, CastExpression, - ConstrainStatement, ConstructorExpression, Expression, ExpressionKind, ForLoopStatement, - ForRange, FunctionReturnType, GenericTypeArgs, Ident, IfExpression, IndexExpression, - InfixExpression, LValue, Lambda, LetStatement, Literal, MemberAccessExpression, - MethodCallExpression, ModuleDeclaration, NoirFunction, NoirStruct, NoirTrait, - NoirTraitImpl, NoirTypeAlias, Path, PathSegment, Pattern, PrefixExpression, Statement, - StatementKind, TraitImplItem, TraitImplItemKind, TraitItem, TypeImpl, UnresolvedGeneric, - UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, - UnresolvedTypeExpression, UseTree, UseTreeKind, - }, - parser::{Item, ItemKind, ParsedSubModule, ParserError}, - ParsedModule, -}; - -/// Parses a program and will clear out (set them to a default) any spans in it if `empty_spans` is true. -/// We want to do this in code generated by macros when running in LSP mode so that the generated -/// code doesn't end up overlapping real code, messing with how inlay hints, hover, etc., work. -pub fn parse_program(source_program: &str, empty_spans: bool) -> (ParsedModule, Vec) { - let (mut parsed_program, errors) = noirc_frontend::parse_program(source_program); - if empty_spans { - empty_parsed_module(&mut parsed_program); - } - (parsed_program, errors) -} - -fn empty_parsed_module(parsed_module: &mut ParsedModule) { - for item in parsed_module.items.iter_mut() { - empty_item(item); - } -} - -fn empty_item(item: &mut Item) { - item.span = Default::default(); - - match &mut item.kind { - ItemKind::Function(noir_function) => empty_noir_function(noir_function), - ItemKind::Trait(noir_trait) => { - empty_noir_trait(noir_trait); - } - ItemKind::TraitImpl(noir_trait_impl) => { - empty_noir_trait_impl(noir_trait_impl); - } - ItemKind::Impl(type_impl) => { - empty_type_impl(type_impl); - } - ItemKind::Global(let_statement) => empty_let_statement(let_statement), - ItemKind::Submodules(parsed_submodule) => { - empty_parsed_submodule(parsed_submodule); - } - ItemKind::ModuleDecl(module_declaration) => empty_module_declaration(module_declaration), - ItemKind::Import(use_tree, _) => empty_use_tree(use_tree), - ItemKind::Struct(noir_struct) => empty_noir_struct(noir_struct), - ItemKind::TypeAlias(noir_type_alias) => empty_noir_type_alias(noir_type_alias), - ItemKind::InnerAttribute(_) => (), - } -} - -fn empty_noir_trait(noir_trait: &mut NoirTrait) { - noir_trait.span = Default::default(); - - empty_ident(&mut noir_trait.name); - empty_unresolved_generics(&mut noir_trait.generics); - empty_unresolved_trait_constraints(&mut noir_trait.where_clause); - for item in noir_trait.items.iter_mut() { - empty_trait_item(&mut item.item); - } -} - -fn empty_noir_trait_impl(noir_trait_impl: &mut NoirTraitImpl) { - empty_path(&mut noir_trait_impl.trait_name); - empty_unresolved_generics(&mut noir_trait_impl.impl_generics); - empty_unresolved_type(&mut noir_trait_impl.object_type); - empty_unresolved_trait_constraints(&mut noir_trait_impl.where_clause); - for item in noir_trait_impl.items.iter_mut() { - empty_trait_impl_item(&mut item.item); - } -} - -fn empty_type_impl(type_impl: &mut TypeImpl) { - empty_unresolved_type(&mut type_impl.object_type); - type_impl.type_span = Default::default(); - empty_unresolved_generics(&mut type_impl.generics); - empty_unresolved_trait_constraints(&mut type_impl.where_clause); - for (noir_function, _) in type_impl.methods.iter_mut() { - empty_noir_function(&mut noir_function.item); - } -} - -fn empty_noir_function(noir_function: &mut NoirFunction) { - let def = &mut noir_function.def; - - def.span = Default::default(); - empty_ident(&mut def.name); - empty_unresolved_generics(&mut def.generics); - - for param in def.parameters.iter_mut() { - param.span = Default::default(); - empty_unresolved_type(&mut param.typ); - empty_pattern(&mut param.pattern); - } - - empty_unresolved_trait_constraints(&mut def.where_clause); - empty_function_return_type(&mut def.return_type); - empty_block_expression(&mut def.body); -} - -fn empty_trait_item(trait_item: &mut TraitItem) { - match trait_item { - TraitItem::Function { - name, - generics, - parameters, - return_type, - where_clause, - body, - is_unconstrained: _, - visibility: _, - is_comptime: _, - } => { - empty_ident(name); - empty_unresolved_generics(generics); - for (name, typ) in parameters.iter_mut() { - empty_ident(name); - empty_unresolved_type(typ); - } - empty_function_return_type(return_type); - for trait_constraint in where_clause.iter_mut() { - empty_unresolved_trait_constraint(trait_constraint); - } - if let Some(body) = body { - empty_block_expression(body); - } - } - TraitItem::Constant { name, typ, default_value } => { - empty_ident(name); - empty_unresolved_type(typ); - if let Some(default_value) = default_value { - empty_expression(default_value); - } - } - TraitItem::Type { name } => { - empty_ident(name); - } - } -} - -fn empty_trait_impl_item(trait_impl_item: &mut TraitImplItem) { - trait_impl_item.span = Default::default(); - - empty_trait_impl_item_kind(&mut trait_impl_item.kind); -} - -fn empty_trait_impl_item_kind(trait_impl_item: &mut TraitImplItemKind) { - match trait_impl_item { - TraitImplItemKind::Function(noir_function) => empty_noir_function(noir_function), - TraitImplItemKind::Constant(name, typ, default_value) => { - empty_ident(name); - empty_unresolved_type(typ); - empty_expression(default_value); - } - TraitImplItemKind::Type { name, alias } => { - empty_ident(name); - empty_unresolved_type(alias); - } - } -} - -fn empty_let_statement(let_statement: &mut LetStatement) { - empty_pattern(&mut let_statement.pattern); - empty_unresolved_type(&mut let_statement.r#type); - empty_expression(&mut let_statement.expression); -} - -fn empty_parsed_submodule(parsed_submodule: &mut ParsedSubModule) { - empty_ident(&mut parsed_submodule.name); - empty_parsed_module(&mut parsed_submodule.contents); -} - -fn empty_module_declaration(module_declaration: &mut ModuleDeclaration) { - empty_ident(&mut module_declaration.ident); -} - -fn empty_use_tree(use_tree: &mut UseTree) { - empty_path(&mut use_tree.prefix); - - match &mut use_tree.kind { - UseTreeKind::Path(name, alias) => { - empty_ident(name); - if let Some(alias) = alias { - empty_ident(alias); - } - } - UseTreeKind::List(use_trees) => { - for use_tree in use_trees.iter_mut() { - empty_use_tree(use_tree); - } - } - } -} - -fn empty_noir_struct(noir_struct: &mut NoirStruct) { - noir_struct.span = Default::default(); - empty_ident(&mut noir_struct.name); - for field in noir_struct.fields.iter_mut() { - empty_ident(&mut field.item.name); - empty_unresolved_type(&mut field.item.typ); - } - empty_unresolved_generics(&mut noir_struct.generics); -} - -fn empty_noir_type_alias(noir_type_alias: &mut NoirTypeAlias) { - noir_type_alias.span = Default::default(); - empty_ident(&mut noir_type_alias.name); - empty_unresolved_type(&mut noir_type_alias.typ); -} - -fn empty_block_expression(block_expression: &mut BlockExpression) { - for statement in block_expression.statements.iter_mut() { - empty_statement(statement); - } -} - -fn empty_statement(statement: &mut Statement) { - statement.span = Default::default(); - - match &mut statement.kind { - StatementKind::Let(let_statement) => empty_let_statement(let_statement), - StatementKind::Constrain(constrain_statement) => { - empty_constrain_statement(constrain_statement) - } - StatementKind::Expression(expression) => empty_expression(expression), - StatementKind::Assign(assign_statement) => empty_assign_statement(assign_statement), - StatementKind::For(for_loop_statement) => empty_for_loop_statement(for_loop_statement), - StatementKind::Comptime(statement) => empty_statement(statement), - StatementKind::Semi(expression) => empty_expression(expression), - StatementKind::Break - | StatementKind::Continue - | StatementKind::Interned(_) - | StatementKind::Error => (), - } -} - -fn empty_constrain_statement(constrain_statement: &mut ConstrainStatement) { - empty_expressions(&mut constrain_statement.arguments); -} - -fn empty_expressions(expressions: &mut [Expression]) { - for expression in expressions.iter_mut() { - empty_expression(expression); - } -} - -fn empty_expression(expression: &mut Expression) { - expression.span = Default::default(); - - match &mut expression.kind { - ExpressionKind::Literal(literal) => empty_literal(literal), - ExpressionKind::Block(block_expression) => empty_block_expression(block_expression), - ExpressionKind::Prefix(prefix_expression) => empty_prefix_expression(prefix_expression), - ExpressionKind::Index(index_expression) => empty_index_expression(index_expression), - ExpressionKind::Call(call_expression) => empty_call_expression(call_expression), - ExpressionKind::MethodCall(method_call_expression) => { - empty_method_call_expression(method_call_expression) - } - ExpressionKind::Constructor(constructor_expression) => { - empty_constructor_expression(constructor_expression) - } - ExpressionKind::MemberAccess(member_access_expression) => { - empty_member_access_expression(member_access_expression) - } - ExpressionKind::Cast(cast_expression) => empty_cast_expression(cast_expression), - ExpressionKind::Infix(infix_expression) => empty_infix_expression(infix_expression), - ExpressionKind::If(if_expression) => empty_if_expression(if_expression), - ExpressionKind::Variable(path) => empty_path(path), - ExpressionKind::Tuple(expressions) => { - empty_expressions(expressions); - } - ExpressionKind::Lambda(lambda) => empty_lambda(lambda), - ExpressionKind::Parenthesized(expression) => empty_expression(expression), - ExpressionKind::Unquote(expression) => { - empty_expression(expression); - } - ExpressionKind::Comptime(block_expression, _span) => { - empty_block_expression(block_expression); - } - ExpressionKind::Unsafe(block_expression, _span) => { - empty_block_expression(block_expression); - } - ExpressionKind::AsTraitPath(path) => { - empty_unresolved_type(&mut path.typ); - empty_path(&mut path.trait_path); - empty_ident(&mut path.impl_item); - empty_type_args(&mut path.trait_generics); - } - ExpressionKind::TypePath(path) => { - empty_unresolved_type(&mut path.typ); - empty_ident(&mut path.item); - empty_type_args(&mut path.turbofish); - } - ExpressionKind::Quote(..) - | ExpressionKind::Resolved(_) - | ExpressionKind::Interned(_) - | ExpressionKind::InternedStatement(_) - | ExpressionKind::Error => (), - } -} - -fn empty_assign_statement(assign_statement: &mut AssignStatement) { - empty_lvalue(&mut assign_statement.lvalue); - empty_expression(&mut assign_statement.expression); -} - -fn empty_for_loop_statement(for_loop_statement: &mut ForLoopStatement) { - for_loop_statement.span = Default::default(); - empty_ident(&mut for_loop_statement.identifier); - empty_for_range(&mut for_loop_statement.range); - empty_expression(&mut for_loop_statement.block); -} - -fn empty_unresolved_types(unresolved_types: &mut [UnresolvedType]) { - for unresolved_type in unresolved_types.iter_mut() { - empty_unresolved_type(unresolved_type); - } -} - -fn empty_type_args(generics: &mut GenericTypeArgs) { - empty_unresolved_types(&mut generics.ordered_args); - for (name, typ) in &mut generics.named_args { - empty_ident(name); - empty_unresolved_type(typ); - } -} - -fn empty_unresolved_type(unresolved_type: &mut UnresolvedType) { - unresolved_type.span = Default::default(); - - match &mut unresolved_type.typ { - UnresolvedTypeData::Array(unresolved_type_expression, unresolved_type) => { - empty_unresolved_type_expression(unresolved_type_expression); - empty_unresolved_type(unresolved_type); - } - UnresolvedTypeData::Slice(unresolved_type) => empty_unresolved_type(unresolved_type), - UnresolvedTypeData::Expression(unresolved_type_expression) => { - empty_unresolved_type_expression(unresolved_type_expression) - } - UnresolvedTypeData::FormatString(unresolved_type_expression, unresolved_type) => { - empty_unresolved_type_expression(unresolved_type_expression); - empty_unresolved_type(unresolved_type); - } - UnresolvedTypeData::Parenthesized(unresolved_type) => { - empty_unresolved_type(unresolved_type) - } - UnresolvedTypeData::Named(path, unresolved_types, _) => { - empty_path(path); - empty_type_args(unresolved_types); - } - UnresolvedTypeData::TraitAsType(path, unresolved_types) => { - empty_path(path); - empty_type_args(unresolved_types); - } - UnresolvedTypeData::MutableReference(unresolved_type) => { - empty_unresolved_type(unresolved_type) - } - UnresolvedTypeData::Tuple(unresolved_types) => empty_unresolved_types(unresolved_types), - UnresolvedTypeData::Function(args, ret, _env, _) => { - empty_unresolved_types(args); - empty_unresolved_type(ret); - } - UnresolvedTypeData::AsTraitPath(path) => { - empty_unresolved_type(&mut path.typ); - empty_path(&mut path.trait_path); - empty_ident(&mut path.impl_item); - } - UnresolvedTypeData::FieldElement - | UnresolvedTypeData::Integer(_, _) - | UnresolvedTypeData::Bool - | UnresolvedTypeData::String(_) - | UnresolvedTypeData::Unit - | UnresolvedTypeData::Quoted(_) - | UnresolvedTypeData::Resolved(_) - | UnresolvedTypeData::Interned(_) - | UnresolvedTypeData::Unspecified - | UnresolvedTypeData::Error => (), - } -} - -fn empty_unresolved_generics(unresolved_generic: &mut UnresolvedGenerics) { - for generic in unresolved_generic.iter_mut() { - empty_unresolved_generic(generic); - } -} - -fn empty_unresolved_generic(unresolved_generic: &mut UnresolvedGeneric) { - match unresolved_generic { - UnresolvedGeneric::Variable(ident) => empty_ident(ident), - UnresolvedGeneric::Numeric { ident, typ } => { - empty_ident(ident); - empty_unresolved_type(typ); - } - UnresolvedGeneric::Resolved(..) => (), - } -} - -fn empty_pattern(pattern: &mut Pattern) { - match pattern { - Pattern::Identifier(ident) => empty_ident(ident), - Pattern::Mutable(pattern, _span, _) => { - empty_pattern(pattern); - } - Pattern::Tuple(patterns, _) => { - for pattern in patterns.iter_mut() { - empty_pattern(pattern); - } - } - Pattern::Struct(path, patterns, _) => { - empty_path(path); - for (name, pattern) in patterns.iter_mut() { - empty_ident(name); - empty_pattern(pattern); - } - } - Pattern::Interned(_, _) => (), - } -} - -fn empty_unresolved_trait_constraints( - unresolved_trait_constraints: &mut [UnresolvedTraitConstraint], -) { - for trait_constraint in unresolved_trait_constraints.iter_mut() { - empty_unresolved_trait_constraint(trait_constraint); - } -} - -fn empty_unresolved_trait_constraint(unresolved_trait_constraint: &mut UnresolvedTraitConstraint) { - empty_unresolved_type(&mut unresolved_trait_constraint.typ); -} - -fn empty_function_return_type(function_return_type: &mut FunctionReturnType) { - match function_return_type { - FunctionReturnType::Ty(unresolved_type) => empty_unresolved_type(unresolved_type), - FunctionReturnType::Default(_) => (), - } -} - -fn empty_ident(ident: &mut Ident) { - ident.0.set_span(Default::default()); -} - -fn empty_path(path: &mut Path) { - path.span = Default::default(); - for segment in path.segments.iter_mut() { - empty_path_segment(segment); - } -} - -fn empty_path_segment(segment: &mut PathSegment) { - segment.span = Default::default(); - empty_ident(&mut segment.ident); -} - -fn empty_literal(literal: &mut Literal) { - match literal { - Literal::Array(array_literal) => empty_array_literal(array_literal), - Literal::Slice(array_literal) => empty_array_literal(array_literal), - Literal::Bool(_) - | Literal::Integer(_, _) - | Literal::Str(_) - | Literal::RawStr(_, _) - | Literal::FmtStr(_) - | Literal::Unit => (), - } -} - -fn empty_array_literal(array_literal: &mut ArrayLiteral) { - match array_literal { - ArrayLiteral::Standard(expressions) => { - empty_expressions(expressions); - } - ArrayLiteral::Repeated { repeated_element, length } => { - empty_expression(repeated_element); - empty_expression(length); - } - } -} - -fn empty_prefix_expression(prefix_expression: &mut PrefixExpression) { - empty_expression(&mut prefix_expression.rhs); -} - -fn empty_index_expression(index_expression: &mut IndexExpression) { - empty_expression(&mut index_expression.collection); - empty_expression(&mut index_expression.index); -} - -fn empty_call_expression(call_expression: &mut CallExpression) { - empty_expression(&mut call_expression.func); - empty_expressions(&mut call_expression.arguments); -} - -fn empty_method_call_expression(method_call_expression: &mut MethodCallExpression) { - empty_expression(&mut method_call_expression.object); - empty_ident(&mut method_call_expression.method_name); - if let Some(generics) = &mut method_call_expression.generics { - empty_unresolved_types(generics); - } - empty_expressions(&mut method_call_expression.arguments); -} - -fn empty_constructor_expression(constructor_expression: &mut ConstructorExpression) { - empty_unresolved_type(&mut constructor_expression.typ); - for (name, expression) in constructor_expression.fields.iter_mut() { - empty_ident(name); - empty_expression(expression); - } -} - -fn empty_member_access_expression(member_access_expression: &mut MemberAccessExpression) { - empty_expression(&mut member_access_expression.lhs); - empty_ident(&mut member_access_expression.rhs); -} - -fn empty_cast_expression(cast_expression: &mut CastExpression) { - empty_expression(&mut cast_expression.lhs); - empty_unresolved_type(&mut cast_expression.r#type); -} - -fn empty_infix_expression(infix_expression: &mut InfixExpression) { - empty_expression(&mut infix_expression.lhs); - empty_expression(&mut infix_expression.rhs); -} - -fn empty_if_expression(if_expression: &mut IfExpression) { - empty_expression(&mut if_expression.condition); - empty_expression(&mut if_expression.consequence); - if let Some(alternative) = &mut if_expression.alternative { - empty_expression(alternative); - } -} - -fn empty_lambda(lambda: &mut Lambda) { - for (name, typ) in lambda.parameters.iter_mut() { - empty_pattern(name); - empty_unresolved_type(typ); - } - empty_unresolved_type(&mut lambda.return_type); - empty_expression(&mut lambda.body); -} - -fn empty_lvalue(lvalue: &mut LValue) { - match lvalue { - LValue::Ident(ident) => empty_ident(ident), - LValue::MemberAccess { ref mut object, ref mut field_name, span: _ } => { - empty_lvalue(object); - empty_ident(field_name); - } - LValue::Index { ref mut array, ref mut index, span: _ } => { - empty_lvalue(array); - empty_expression(index); - } - LValue::Dereference(lvalue, _) => empty_lvalue(lvalue), - LValue::Interned(..) => (), - } -} - -fn empty_for_range(for_range: &mut ForRange) { - match for_range { - ForRange::Range(from, to) => { - empty_expression(from); - empty_expression(to); - } - ForRange::Array(expression) => empty_expression(expression), - } -} - -fn empty_unresolved_type_expression(unresolved_type_expression: &mut UnresolvedTypeExpression) { - match unresolved_type_expression { - UnresolvedTypeExpression::Variable(path) => empty_path(path), - UnresolvedTypeExpression::BinaryOperation(lhs, _, rhs, _) => { - empty_unresolved_type_expression(lhs); - empty_unresolved_type_expression(rhs); - } - UnresolvedTypeExpression::Constant(_, _) => (), - UnresolvedTypeExpression::AsTraitPath(path) => { - empty_unresolved_type(&mut path.typ); - empty_path(&mut path.trait_path); - empty_ident(&mut path.impl_item); - } - } -} diff --git a/noir/noir-repo/compiler/noirc_driver/Cargo.toml b/noir/noir-repo/compiler/noirc_driver/Cargo.toml index 6b200e79b89..6a902ec20b8 100644 --- a/noir/noir-repo/compiler/noirc_driver/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_driver/Cargo.toml @@ -28,8 +28,6 @@ fxhash.workspace = true rust-embed.workspace = true tracing.workspace = true -aztec_macros = { path = "../../aztec_macros" } - [features] bn254 = ["noirc_frontend/bn254", "noirc_evaluator/bn254"] bls12_381 = ["noirc_frontend/bls12_381", "noirc_evaluator/bls12_381"] diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index 81ac5df66ba..74916d65264 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -17,7 +17,6 @@ use noirc_frontend::debug::build_debug_crate_file; use noirc_frontend::graph::{CrateId, CrateName}; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; use noirc_frontend::hir::Context; -use noirc_frontend::macros_api::MacroProcessor; use noirc_frontend::monomorphization::{ errors::MonomorphizationError, monomorphize, monomorphize_debug, }; @@ -278,10 +277,6 @@ pub fn check_crate( crate_id: CrateId, options: &CompileOptions, ) -> CompilationResult<()> { - let options = CompileOptions { disable_macros: true, ..options.clone() }; - let macros: &[&dyn MacroProcessor] = - if options.disable_macros { &[] } else { &[&aztec_macros::AztecMacro] }; - let mut errors = vec![]; let error_on_unused_imports = true; let diagnostics = CrateDefMap::collect_defs( @@ -289,7 +284,6 @@ pub fn check_crate( context, options.debug_comptime_in_file.as_deref(), error_on_unused_imports, - macros, ); errors.extend(diagnostics.into_iter().map(|(error, file_id)| { let diagnostic = CustomDiagnostic::from(&error); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index fb7091a8854..c71c3a33edf 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -732,6 +732,10 @@ impl<'a> FunctionContext<'a> { let element_types = Self::convert_type(element_type); values.map_both(element_types, |value, element_type| { let reference = value.eval_reference(); + // Reference counting in brillig relies on us incrementing reference + // counts when arrays/slices are constructed or indexed. + // Thus, if we dereference an lvalue which happens to be array/slice we should increment its reference counter. + self.builder.increment_array_reference_count(reference); self.builder.insert_load(reference, element_type).into() }) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs index c581ea9d62a..7b0a6d028de 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs @@ -802,6 +802,7 @@ impl Display for TypePath { impl FunctionDefinition { pub fn normal( name: &Ident, + is_unconstrained: bool, generics: &UnresolvedGenerics, parameters: &[(Ident, UnresolvedType)], body: &BlockExpression, @@ -821,7 +822,7 @@ impl FunctionDefinition { FunctionDefinition { name: name.clone(), attributes: Attributes::empty(), - is_unconstrained: false, + is_unconstrained, is_comptime: false, visibility: ItemVisibility::Private, generics: generics.clone(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs index 0faaf409e6c..d7c8769620d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs @@ -11,10 +11,9 @@ use crate::{ hir_def::{function::Parameters, traits::TraitFunction}, macros_api::{ BlockExpression, FunctionDefinition, FunctionReturnType, Ident, ItemVisibility, - NodeInterner, NoirFunction, Param, Pattern, UnresolvedType, Visibility, + NodeInterner, NoirFunction, UnresolvedType, }, node_interner::{FuncId, ReferenceId, TraitId}, - token::Attributes, Kind, ResolvedGeneric, Type, TypeBindings, TypeVariableKind, }; @@ -103,6 +102,7 @@ impl<'context> Elaborator<'context> { this.resolve_trait_function( trait_id, name, + *is_unconstrained, generics, parameters, return_type, @@ -164,6 +164,7 @@ impl<'context> Elaborator<'context> { &mut self, trait_id: TraitId, name: &Ident, + is_unconstrained: bool, generics: &UnresolvedGenerics, parameters: &[(Ident, UnresolvedType)], return_type: &FunctionReturnType, @@ -175,25 +176,17 @@ impl<'context> Elaborator<'context> { self.scopes.start_function(); let kind = FunctionKind::Normal; - let def = FunctionDefinition { - name: name.clone(), - attributes: Attributes::empty(), - is_unconstrained: false, - is_comptime: false, - visibility: ItemVisibility::Public, // Trait functions are always public - generics: generics.clone(), - parameters: vecmap(parameters, |(name, typ)| Param { - visibility: Visibility::Private, - pattern: Pattern::Identifier(name.clone()), - typ: typ.clone(), - span: name.span(), - }), - body: BlockExpression { statements: Vec::new() }, - span: name.span(), - where_clause: where_clause.to_vec(), - return_type: return_type.clone(), - return_visibility: Visibility::Private, - }; + let mut def = FunctionDefinition::normal( + name, + is_unconstrained, + generics, + parameters, + &BlockExpression { statements: Vec::new() }, + where_clause, + return_type, + ); + // Trait functions are always public + def.visibility = ItemVisibility::Public; let mut function = NoirFunction { kind, def }; self.define_function_meta(&mut function, func_id, Some(trait_id)); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs index 143e0450bac..105f6e09395 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -36,7 +36,7 @@ pub(super) fn display_quoted( writeln!(f, "quote {{")?; let indent = indent + 1; write!(f, "{}", " ".repeat(indent * 4))?; - display_tokens(tokens, interner, indent, f)?; + TokensPrettyPrinter { tokens, interner, indent }.fmt(f)?; writeln!(f)?; let indent = indent - 1; write!(f, "{}", " ".repeat(indent * 4))?; @@ -47,30 +47,26 @@ pub(super) fn display_quoted( struct TokensPrettyPrinter<'tokens, 'interner> { tokens: &'tokens [Token], interner: &'interner NodeInterner, + indent: usize, } impl<'tokens, 'interner> Display for TokensPrettyPrinter<'tokens, 'interner> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - display_tokens(self.tokens, self.interner, 0, f) - } -} + let mut token_printer = TokenPrettyPrinter::new(self.interner, self.indent); + for token in self.tokens { + token_printer.print(token, f)?; + } -fn display_tokens( - tokens: &[Token], - interner: &NodeInterner, - indent: usize, - f: &mut std::fmt::Formatter<'_>, -) -> std::fmt::Result { - let mut token_printer = TokenPrettyPrinter::new(interner, indent); - for token in tokens { - token_printer.print(token, f)?; + // If the printer refrained from printing a token right away, this will make it do it + token_printer.print(&Token::EOF, f)?; + + Ok(()) } - Ok(()) } pub(super) fn tokens_to_string(tokens: Rc>, interner: &NodeInterner) -> String { let tokens: Vec = tokens.iter().cloned().collect(); - TokensPrettyPrinter { tokens: &tokens, interner }.to_string() + TokensPrettyPrinter { tokens: &tokens, interner, indent: 0 }.to_string() } /// Tries to print tokens in a way that it'll be easier for the user to understand a @@ -95,6 +91,7 @@ struct TokenPrettyPrinter<'interner> { last_was_alphanumeric: bool, last_was_right_brace: bool, last_was_semicolon: bool, + last_was_op: bool, } impl<'interner> TokenPrettyPrinter<'interner> { @@ -105,6 +102,7 @@ impl<'interner> TokenPrettyPrinter<'interner> { last_was_alphanumeric: false, last_was_right_brace: false, last_was_semicolon: false, + last_was_op: false, } } @@ -112,6 +110,9 @@ impl<'interner> TokenPrettyPrinter<'interner> { let last_was_alphanumeric = self.last_was_alphanumeric; self.last_was_alphanumeric = false; + let last_was_op = self.last_was_op; + self.last_was_op = false; + // After `}` we usually want a newline... but not always! if self.last_was_right_brace { self.last_was_right_brace = false; @@ -163,6 +164,12 @@ impl<'interner> TokenPrettyPrinter<'interner> { } } + // If the last token was one of `+`, `-`, etc. and the current token is not `=`, we want a space + // (we avoid outputting a space if the token is `=` a bit below) + if last_was_op && !matches!(token, Token::Assign) { + write!(f, " ")?; + } + match token { Token::QuotedType(id) => write!(f, "{}", self.interner.get_quoted_type(*id)), Token::InternedExpr(id) => { @@ -235,16 +242,25 @@ impl<'interner> TokenPrettyPrinter<'interner> { | Token::GreaterEqual | Token::Equal | Token::NotEqual - | Token::Plus + | Token::Arrow => write!(f, " {token} "), + Token::Assign => { + if last_was_op { + write!(f, "{token} ") + } else { + write!(f, " {token} ") + } + } + Token::Plus | Token::Minus | Token::Star | Token::Slash | Token::Percent | Token::Ampersand | Token::ShiftLeft - | Token::ShiftRight - | Token::Assign - | Token::Arrow => write!(f, " {token} "), + | Token::ShiftRight => { + self.last_was_op = true; + write!(f, " {token}") + } Token::LeftParen | Token::RightParen | Token::LeftBracket diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs index a47dbeace50..5b03b27e0b2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -46,7 +46,7 @@ fn interpret_helper(src: &str) -> Result { let def_map = CrateDefMap { root: module_id, modules, krate, extern_prelude: BTreeMap::new() }; let mut collector = DefCollector::new(def_map); - collect_defs(&mut collector, ast, FileId::dummy(), module_id, krate, &mut context, &[]); + collect_defs(&mut collector, ast, FileId::dummy(), module_id, krate, &mut context); context.def_maps.insert(krate, collector.def_map); let main = context.get_main_function(&krate).expect("Expected 'main' function"); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index ab045c52169..d365e5807c2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -14,7 +14,7 @@ use crate::{Generics, Type}; use crate::hir::resolution::import::{resolve_import, ImportDirective, PathResolution}; use crate::hir::Context; -use crate::macros_api::{Expression, MacroError, MacroProcessor}; +use crate::macros_api::Expression; use crate::node_interner::{ FuncId, GlobalId, ModuleAttributes, NodeInterner, ReferenceId, StructId, TraitId, TraitImplId, TypeAliasId, @@ -214,12 +214,6 @@ impl<'a> From<&'a CompilationError> for CustomDiagnostic { } } -impl From for CompilationError { - fn from(value: MacroError) -> Self { - CompilationError::DefinitionError(DefCollectorErrorKind::MacroError(value)) - } -} - impl From for CompilationError { fn from(value: ParserError) -> Self { CompilationError::ParseError(value) @@ -272,7 +266,6 @@ impl DefCollector { root_file_id: FileId, debug_comptime_in_file: Option<&str>, error_on_unused_items: bool, - macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; let crate_id = def_map.krate; @@ -291,7 +284,6 @@ impl DefCollector { context, debug_comptime_in_file, error_on_usage_tracker, - macro_processors, )); let dep_def_map = @@ -329,7 +321,6 @@ impl DefCollector { crate_root, crate_id, context, - macro_processors, )); let submodules = vecmap(def_collector.def_map.modules().iter(), |(index, _)| index); @@ -381,6 +372,8 @@ impl DefCollector { let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); let file_id = current_def_map.file_id(module_id); + let has_path_resolution_error = resolved_import.error.is_some(); + if let Some(error) = resolved_import.error { errors.push(( DefCollectorErrorKind::PathResolutionError(error).into(), @@ -410,24 +403,29 @@ impl DefCollector { let result = current_def_map.modules[resolved_import.module_scope.0] .import(name.clone(), visibility, module_def_id, is_prelude); - let module_id = - ModuleId { krate: crate_id, local_id: resolved_import.module_scope }; - context.def_interner.usage_tracker.add_unused_item( - module_id, - name.clone(), - UnusedItem::Import, - visibility, - ); - - if visibility != ItemVisibility::Private { - let local_id = resolved_import.module_scope; - let defining_module = ModuleId { krate: crate_id, local_id }; - context.def_interner.register_name_for_auto_import( - name.to_string(), - module_def_id, + // If we error on path resolution don't also say it's unused (in case it ends up being unused) + if !has_path_resolution_error { + let module_id = ModuleId { + krate: crate_id, + local_id: resolved_import.module_scope, + }; + context.def_interner.usage_tracker.add_unused_item( + module_id, + name.clone(), + UnusedItem::Import, visibility, - Some(defining_module), ); + + if visibility != ItemVisibility::Private { + let local_id = resolved_import.module_scope; + let defining_module = ModuleId { krate: crate_id, local_id }; + context.def_interner.register_name_for_auto_import( + name.to_string(), + module_def_id, + visibility, + Some(defining_module), + ); + } } let last_segment = collected_import.path.last_ident(); @@ -479,14 +477,6 @@ impl DefCollector { errors.append(&mut more_errors); - for macro_processor in macro_processors { - macro_processor.process_typed_ast(&crate_id, context).unwrap_or_else( - |(macro_err, file_id)| { - errors.push((macro_err.into(), file_id)); - }, - ); - } - if error_on_unused_items { Self::check_unused_items(context, crate_id, &mut errors); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 508765f943c..f50a0608fab 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -23,7 +23,6 @@ use crate::usage_tracker::UnusedItem; use crate::{ graph::CrateId, hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait}, - macros_api::MacroProcessor, node_interner::{FunctionModifiers, TraitId, TypeAliasId}, parser::{SortedModule, SortedSubModule}, }; @@ -59,21 +58,15 @@ pub fn collect_defs( module_id: LocalModuleId, crate_id: CrateId, context: &mut Context, - macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut collector = ModCollector { def_collector, file_id, module_id }; let mut errors: Vec<(CompilationError, FileId)> = vec![]; // First resolve the module declarations for decl in ast.module_decls { - errors.extend(collector.parse_module_declaration( - context, - decl, - crate_id, - file_id, - module_id, - macro_processors, - )); + errors.extend( + collector.parse_module_declaration(context, decl, crate_id, file_id, module_id), + ); } errors.extend(collector.collect_submodules( @@ -82,7 +75,6 @@ pub fn collect_defs( module_id, ast.submodules, file_id, - macro_processors, )); // Then add the imports to defCollector to resolve once all modules in the hierarchy have been resolved @@ -474,6 +466,7 @@ impl<'a> ModCollector<'a> { let impl_method = NoirFunction::normal(FunctionDefinition::normal( name, + *is_unconstrained, generics, parameters, body, @@ -593,7 +586,6 @@ impl<'a> ModCollector<'a> { parent_module_id: LocalModuleId, submodules: Vec>, file_id: FileId, - macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; for submodule in submodules { @@ -636,7 +628,6 @@ impl<'a> ModCollector<'a> { child.local_id, crate_id, context, - macro_processors, )); } Err(error) => { @@ -658,7 +649,6 @@ impl<'a> ModCollector<'a> { crate_id: CrateId, parent_file_id: FileId, parent_module_id: LocalModuleId, - macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut doc_comments = mod_decl.doc_comments; let mod_decl = mod_decl.item; @@ -694,24 +684,7 @@ impl<'a> ModCollector<'a> { // Parse the AST for the module we just found and then recursively look for it's defs let (ast, parsing_errors) = context.parsed_file_results(child_file_id); - let mut ast = ast.into_sorted(); - - for macro_processor in macro_processors { - match macro_processor.process_untyped_ast( - ast.clone(), - &crate_id, - child_file_id, - context, - ) { - Ok(processed_ast) => { - ast = processed_ast; - } - Err((error, file_id)) => { - let def_error = DefCollectorErrorKind::MacroError(error); - errors.push((def_error.into(), file_id)); - } - } - } + let ast = ast.into_sorted(); errors.extend( parsing_errors.iter().map(|e| (e.clone().into(), child_file_id)).collect::>(), @@ -755,7 +728,6 @@ impl<'a> ModCollector<'a> { child_mod_id.local_id, crate_id, context, - macro_processors, )); } Err(error) => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs index 75b860bf2c6..d810e95218c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -1,7 +1,6 @@ use crate::graph::CrateId; use crate::hir::def_collector::dc_crate::{CompilationError, DefCollector}; use crate::hir::Context; -use crate::macros_api::MacroProcessor; use crate::node_interner::{FuncId, GlobalId, NodeInterner, StructId}; use crate::parser::{parse_program, ParsedModule, ParserError}; use crate::token::{FunctionAttribute, SecondaryAttribute, TestScope}; @@ -18,8 +17,6 @@ pub use module_data::*; mod namespace; pub use namespace::*; -use super::def_collector::errors::DefCollectorErrorKind; - /// The name that is used for a non-contract program's entry-point function. pub const MAIN_FUNCTION: &str = "main"; @@ -77,7 +74,6 @@ impl CrateDefMap { context: &mut Context, debug_comptime_in_file: Option<&str>, error_on_unused_imports: bool, - macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { // Check if this Crate has already been compiled // XXX: There is probably a better alternative for this. @@ -92,20 +88,7 @@ impl CrateDefMap { // First parse the root file. let root_file_id = context.crate_graph[crate_id].root_file_id; let (ast, parsing_errors) = context.parsed_file_results(root_file_id); - let mut ast = ast.into_sorted(); - - for macro_processor in macro_processors { - match macro_processor.process_untyped_ast(ast.clone(), &crate_id, root_file_id, context) - { - Ok(processed_ast) => { - ast = processed_ast; - } - Err((error, file_id)) => { - let def_error = DefCollectorErrorKind::MacroError(error); - errors.push((def_error.into(), file_id)); - } - } - } + let ast = ast.into_sorted(); // Allocate a default Module for the root, giving it a ModuleId let mut modules: Arena = Arena::default(); @@ -133,7 +116,6 @@ impl CrateDefMap { root_file_id, debug_comptime_in_file, error_on_unused_imports, - macro_processors, )); errors.extend( diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs index 2440109af15..6e64c509195 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs @@ -34,6 +34,8 @@ pub enum LexerErrorKind { InvalidEscape { escaped: char, span: Span }, #[error("Invalid quote delimiter `{delimiter}`, valid delimiters are `{{`, `[`, and `(`")] InvalidQuoteDelimiter { delimiter: SpannedToken }, + #[error("Non-ASCII characters are invalid in comments")] + NonAsciiComment { span: Span }, #[error("Expected `{end_delim}` to close this {start_delim}")] UnclosedQuote { start_delim: SpannedToken, end_delim: Token }, } @@ -65,6 +67,7 @@ impl LexerErrorKind { LexerErrorKind::UnterminatedStringLiteral { span } => *span, LexerErrorKind::InvalidEscape { span, .. } => *span, LexerErrorKind::InvalidQuoteDelimiter { delimiter } => delimiter.to_span(), + LexerErrorKind::NonAsciiComment { span, .. } => *span, LexerErrorKind::UnclosedQuote { start_delim, .. } => start_delim.to_span(), } } @@ -124,6 +127,9 @@ impl LexerErrorKind { LexerErrorKind::InvalidQuoteDelimiter { delimiter } => { (format!("Invalid quote delimiter `{delimiter}`"), "Valid delimiters are `{`, `[`, and `(`".to_string(), delimiter.to_span()) }, + LexerErrorKind::NonAsciiComment { span } => { + ("Non-ASCII character in comment".to_string(), "Invalid comment character: only ASCII is currently supported.".to_string(), *span) + } LexerErrorKind::UnclosedQuote { start_delim, end_delim } => { ("Unclosed `quote` expression".to_string(), format!("Expected a `{end_delim}` to close this `{start_delim}`"), start_delim.to_span()) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs index a4ec19ba363..95eb41fd6d0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs @@ -606,6 +606,11 @@ impl<'a> Lexer<'a> { }; let comment = self.eat_while(None, |ch| ch != '\n'); + if !comment.is_ascii() { + let span = Span::from(start..self.position); + return Err(LexerErrorKind::NonAsciiComment { span }); + } + if doc_style.is_none() && self.skip_comments { return self.next_token(); } @@ -651,6 +656,11 @@ impl<'a> Lexer<'a> { } if depth == 0 { + if !content.is_ascii() { + let span = Span::from(start..self.position); + return Err(LexerErrorKind::NonAsciiComment { span }); + } + if doc_style.is_none() && self.skip_comments { return self.next_token(); } @@ -1331,6 +1341,7 @@ mod tests { Err(LexerErrorKind::InvalidIntegerLiteral { .. }) | Err(LexerErrorKind::UnexpectedCharacter { .. }) + | Err(LexerErrorKind::NonAsciiComment { .. }) | Err(LexerErrorKind::UnterminatedBlockComment { .. }) => { expected_token_found = true; } @@ -1389,4 +1400,17 @@ mod tests { } } } + + #[test] + fn test_non_ascii_comments() { + let cases = vec!["// 🙂", "// schön", "/* in the middle 🙂 of a comment */"]; + + for source in cases { + let mut lexer = Lexer::new(source); + assert!( + lexer.any(|token| matches!(token, Err(LexerErrorKind::NonAsciiComment { .. }))), + "Expected NonAsciiComment error" + ); + } + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lib.rs b/noir/noir-repo/compiler/noirc_frontend/src/lib.rs index 9f7a0564789..b2d7c297b8c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lib.rs @@ -64,24 +64,4 @@ pub mod macros_api { }; pub use crate::hir::{def_map::ModuleDefId, Context as HirContext}; pub use crate::{StructType, Type}; - - /// Methods to process the AST before and after type checking - pub trait MacroProcessor { - /// Function to manipulate the AST before type checking has been completed. - fn process_untyped_ast( - &self, - ast: SortedModule, - crate_id: &CrateId, - file_id: FileId, - context: &HirContext, - ) -> Result; - - /// Function to manipulate the AST after type checking has been completed. - /// The AST after type checking has been done is called the HIR. - fn process_typed_ast( - &self, - crate_id: &CrateId, - context: &mut HirContext, - ) -> Result<(), (MacroError, FileId)>; - } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 672328c05bd..cb291902ae2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -1,12 +1,15 @@ #![cfg(test)] -#[cfg(test)] +mod bound_checks; +mod imports; mod name_shadowing; +mod references; +mod turbofish; +mod unused_items; // XXX: These tests repeat a lot of code // what we should do is have test cases which are passed to a test harness // A test harness will allow for more expressive and readable tests -use core::panic; use std::collections::BTreeMap; use fm::FileId; @@ -98,7 +101,6 @@ pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(Compilation let debug_comptime_in_file = None; let error_on_unused_imports = true; - let macro_processors = &[]; // Now we want to populate the CrateDefMap using the DefCollector errors.extend(DefCollector::collect_crate_and_dependencies( @@ -108,7 +110,6 @@ pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(Compilation root_file_id, debug_comptime_in_file, error_on_unused_imports, - macro_processors, )); } (program, context, errors) @@ -2342,174 +2343,6 @@ fn impl_not_found_for_inner_impl() { )); } -// Regression for #5388 -#[test] -fn comptime_let() { - let src = r#"fn main() { - comptime let my_var = 2; - assert_eq(my_var, 2); - }"#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 0); -} - -#[test] -fn overflowing_u8() { - let src = r#" - fn main() { - let _: u8 = 256; - }"#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - if let CompilationError::TypeError(error) = &errors[0].0 { - assert_eq!( - error.to_string(), - "The value `2⁸` cannot fit into `u8` which has range `0..=255`" - ); - } else { - panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); - } -} - -#[test] -fn underflowing_u8() { - let src = r#" - fn main() { - let _: u8 = -1; - }"#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - if let CompilationError::TypeError(error) = &errors[0].0 { - assert_eq!( - error.to_string(), - "The value `-1` cannot fit into `u8` which has range `0..=255`" - ); - } else { - panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); - } -} - -#[test] -fn overflowing_i8() { - let src = r#" - fn main() { - let _: i8 = 128; - }"#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - if let CompilationError::TypeError(error) = &errors[0].0 { - assert_eq!( - error.to_string(), - "The value `2⁷` cannot fit into `i8` which has range `-128..=127`" - ); - } else { - panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); - } -} - -#[test] -fn underflowing_i8() { - let src = r#" - fn main() { - let _: i8 = -129; - }"#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - if let CompilationError::TypeError(error) = &errors[0].0 { - assert_eq!( - error.to_string(), - "The value `-129` cannot fit into `i8` which has range `-128..=127`" - ); - } else { - panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); - } -} - -#[test] -fn turbofish_numeric_generic_nested_call() { - // Check for turbofish numeric generics used with function calls - let src = r#" - fn foo() -> [u8; N] { - [0; N] - } - - fn bar() -> [u8; N] { - foo::() - } - - global M: u32 = 3; - - fn main() { - let _ = bar::(); - } - "#; - assert_no_errors(src); - - // Check for turbofish numeric generics used with method calls - let src = r#" - struct Foo { - a: T - } - - impl Foo { - fn static_method() -> [u8; N] { - [0; N] - } - - fn impl_method(self) -> [T; N] { - [self.a; N] - } - } - - fn bar() -> [u8; N] { - let _ = Foo::static_method::(); - let x: Foo = Foo { a: 0 }; - x.impl_method::() - } - - global M: u32 = 3; - - fn main() { - let _ = bar::(); - } - "#; - assert_no_errors(src); -} - -#[test] -fn use_super() { - let src = r#" - fn some_func() {} - - mod foo { - use super::some_func; - - pub fn bar() { - some_func(); - } - } - "#; - assert_no_errors(src); -} - -#[test] -fn use_super_in_path() { - let src = r#" - fn some_func() {} - - mod foo { - pub fn func() { - super::some_func(); - } - } - "#; - assert_no_errors(src); -} - #[test] fn no_super() { let src = "use super::some_func;"; @@ -2801,150 +2634,6 @@ fn trait_constraint_on_tuple_type() { assert_no_errors(src); } -#[test] -fn turbofish_in_constructor_generics_mismatch() { - let src = r#" - struct Foo { - x: T - } - - fn main() { - let _ = Foo:: { x: 1 }; - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::TypeError(TypeCheckError::GenericCountMismatch { .. }), - )); -} - -#[test] -fn turbofish_in_constructor() { - let src = r#" - struct Foo { - x: T - } - - fn main() { - let x: Field = 0; - let _ = Foo:: { x: x }; - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::TypeError(TypeCheckError::TypeMismatch { - expected_typ, expr_typ, .. - }) = &errors[0].0 - else { - panic!("Expected a type mismatch error, got {:?}", errors[0].0); - }; - - assert_eq!(expected_typ, "i32"); - assert_eq!(expr_typ, "Field"); -} - -#[test] -fn turbofish_in_middle_of_variable_unsupported_yet() { - let src = r#" - struct Foo { - x: T - } - - impl Foo { - fn new(x: T) -> Self { - Foo { x } - } - } - - fn main() { - let _ = Foo::::new(1); - } - "#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - assert!(matches!( - errors[0].0, - CompilationError::TypeError(TypeCheckError::UnsupportedTurbofishUsage { .. }), - )); -} - -#[test] -fn turbofish_in_struct_pattern() { - let src = r#" - struct Foo { - x: T - } - - fn main() { - let value: Field = 0; - let Foo:: { x } = Foo { x: value }; - let _ = x; - } - "#; - assert_no_errors(src); -} - -#[test] -fn turbofish_in_struct_pattern_errors_if_type_mismatch() { - let src = r#" - struct Foo { - x: T - } - - fn main() { - let value: Field = 0; - let Foo:: { x } = Foo { x: value }; - let _ = x; - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::TypeError(TypeCheckError::TypeMismatchWithSource { .. }) = &errors[0].0 - else { - panic!("Expected a type mismatch error, got {:?}", errors[0].0); - }; -} - -#[test] -fn turbofish_in_struct_pattern_generic_count_mismatch() { - let src = r#" - struct Foo { - x: T - } - - fn main() { - let value = 0; - let Foo:: { x } = Foo { x: value }; - let _ = x; - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::TypeError(TypeCheckError::GenericCountMismatch { - item, - expected, - found, - .. - }) = &errors[0].0 - else { - panic!("Expected a generic count mismatch error, got {:?}", errors[0].0); - }; - - assert_eq!(item, "struct Foo"); - assert_eq!(*expected, 1); - assert_eq!(*found, 2); -} - #[test] fn incorrect_generic_count_on_struct_impl() { let src = r#" @@ -3255,306 +2944,6 @@ fn as_trait_path_syntax_no_impl() { assert!(matches!(&errors[0].0, TypeError(TypeCheckError::NoMatchingImplFound { .. }))); } -#[test] -fn errors_on_unused_private_import() { - let src = r#" - mod foo { - pub fn bar() {} - pub fn baz() {} - - pub trait Foo { - } - } - - use foo::bar; - use foo::baz; - use foo::Foo; - - impl Foo for Field { - } - - fn main() { - baz(); - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = - &errors[0].0 - else { - panic!("Expected an unused item error"); - }; - - assert_eq!(ident.to_string(), "bar"); - assert_eq!(*item_type, "import"); -} - -#[test] -fn errors_on_unused_pub_crate_import() { - let src = r#" - mod foo { - pub fn bar() {} - pub fn baz() {} - - pub trait Foo { - } - } - - pub(crate) use foo::bar; - use foo::baz; - use foo::Foo; - - impl Foo for Field { - } - - fn main() { - baz(); - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = - &errors[0].0 - else { - panic!("Expected an unused item error"); - }; - - assert_eq!(ident.to_string(), "bar"); - assert_eq!(*item_type, "import"); -} - -#[test] -fn warns_on_use_of_private_exported_item() { - let src = r#" - mod foo { - mod bar { - pub fn baz() {} - } - - use bar::baz; - - pub fn qux() { - baz(); - } - } - - fn main() { - foo::baz(); - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 2); // An existing bug causes this error to be duplicated - - assert!(matches!( - &errors[0].0, - CompilationError::ResolverError(ResolverError::PathResolutionError( - PathResolutionError::Private(..), - )) - )); -} - -#[test] -fn can_use_pub_use_item() { - let src = r#" - mod foo { - mod bar { - pub fn baz() {} - } - - pub use bar::baz; - } - - fn main() { - foo::baz(); - } - "#; - assert_no_errors(src); -} - -#[test] -fn warns_on_re_export_of_item_with_less_visibility() { - let src = r#" - mod foo { - mod bar { - pub(crate) fn baz() {} - } - - pub use bar::baz; - } - - fn main() { - foo::baz(); - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - assert!(matches!( - &errors[0].0, - CompilationError::DefinitionError( - DefCollectorErrorKind::CannotReexportItemWithLessVisibility { .. } - ) - )); -} - -#[test] -fn unquoted_integer_as_integer_token() { - let src = r#" - trait Serialize { - fn serialize() {} - } - - #[attr] - pub fn foobar() {} - - comptime fn attr(_f: FunctionDefinition) -> Quoted { - let serialized_len = 1; - // We are testing that when we unquote $serialized_len, it's unquoted - // as the token `1` and not as something else that later won't be parsed correctly - // in the context of a generic argument. - quote { - impl Serialize<$serialized_len> for Field { - fn serialize() { } - } - } - } - - fn main() {} - "#; - - assert_no_errors(src); -} - -#[test] -fn errors_on_unused_function() { - let src = r#" - contract some_contract { - // This function is unused, but it's a contract entrypoint - // so it should not produce a warning - fn foo() -> pub Field { - 1 - } - } - - - fn foo() { - bar(); - } - - fn bar() {} - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = - &errors[0].0 - else { - panic!("Expected an unused item error"); - }; - - assert_eq!(ident.to_string(), "foo"); - assert_eq!(*item_type, "function"); -} - -#[test] -fn errors_on_unused_struct() { - let src = r#" - struct Foo {} - struct Bar {} - - fn main() { - let _ = Bar {}; - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = - &errors[0].0 - else { - panic!("Expected an unused item error"); - }; - - assert_eq!(ident.to_string(), "Foo"); - assert_eq!(*item_type, "struct"); -} - -#[test] -fn errors_on_unused_trait() { - let src = r#" - trait Foo {} - trait Bar {} - - pub struct Baz { - } - - impl Bar for Baz {} - - fn main() { - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = - &errors[0].0 - else { - panic!("Expected an unused item error"); - }; - - assert_eq!(ident.to_string(), "Foo"); - assert_eq!(*item_type, "trait"); -} - -#[test] -fn constrained_reference_to_unconstrained() { - let src = r#" - fn main(mut x: u32, y: pub u32) { - let x_ref = &mut x; - if x == 5 { - unsafe { - mut_ref_input(x_ref, y); - } - } - - assert(x == 10); - } - - unconstrained fn mut_ref_input(x: &mut u32, y: u32) { - *x = y; - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::TypeError(TypeCheckError::ConstrainedReferenceToUnconstrained { .. }) = - &errors[0].0 - else { - panic!("Expected an error about passing a constrained reference to unconstrained"); - }; -} - -#[test] -fn comptime_type_in_runtime_code() { - let source = "pub fn foo(_f: FunctionDefinition) {}"; - let errors = get_program_errors(source); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::ResolverError(ResolverError::ComptimeTypeInRuntimeCode { .. }) - )); -} - #[test] fn arithmetic_generics_canonicalization_deduplication_regression() { let source = r#" @@ -3574,82 +2963,6 @@ fn arithmetic_generics_canonicalization_deduplication_regression() { assert_eq!(errors.len(), 0); } -#[test] -fn cannot_mutate_immutable_variable() { - let src = r#" - fn main() { - let array = [1]; - mutate(&mut array); - } - - fn mutate(_: &mut [Field; 1]) {} - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::TypeError(TypeCheckError::CannotMutateImmutableVariable { name, .. }) = - &errors[0].0 - else { - panic!("Expected a CannotMutateImmutableVariable error"); - }; - - assert_eq!(name, "array"); -} - -#[test] -fn cannot_mutate_immutable_variable_on_member_access() { - let src = r#" - struct Foo { - x: Field - } - - fn main() { - let foo = Foo { x: 0 }; - mutate(&mut foo.x); - } - - fn mutate(foo: &mut Field) { - *foo = 1; - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::TypeError(TypeCheckError::CannotMutateImmutableVariable { name, .. }) = - &errors[0].0 - else { - panic!("Expected a CannotMutateImmutableVariable error"); - }; - - assert_eq!(name, "foo"); -} - -#[test] -fn does_not_crash_when_passing_mutable_undefined_variable() { - let src = r#" - fn main() { - mutate(&mut undefined); - } - - fn mutate(foo: &mut Field) { - *foo = 1; - } - "#; - - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - - let CompilationError::ResolverError(ResolverError::VariableNotDeclared { name, .. }) = - &errors[0].0 - else { - panic!("Expected a VariableNotDeclared error"); - }; - - assert_eq!(name, "undefined"); -} - #[test] fn infer_globals_to_u32_from_type_use() { let src = r#" @@ -3727,24 +3040,51 @@ fn use_numeric_generic_in_trait_method() { } #[test] -fn macro_result_type_mismatch() { +fn errors_once_on_unused_import_that_is_not_accessible() { + // Tests that we don't get an "unused import" here given that the import is not accessible let src = r#" - fn main() { - comptime { - let x = unquote!(quote { "test" }); - let _: Field = x; - } - } - - comptime fn unquote(q: Quoted) -> Quoted { - q + mod moo { + struct Foo {} } + use moo::Foo; + fn main() {} "#; let errors = get_program_errors(src); assert_eq!(errors.len(), 1); assert!(matches!( errors[0].0, - CompilationError::TypeError(TypeCheckError::TypeMismatch { .. }) + CompilationError::DefinitionError(DefCollectorErrorKind::PathResolutionError( + PathResolutionError::Private { .. } + )) )); } + +#[test] +fn trait_unconstrained_methods_typechecked_correctly() { + // This test checks that we properly track whether a method has been declared as unconstrained on the trait definition + // and preserves that through typechecking. + let src = r#" + trait Foo { + unconstrained fn identity(self) -> Self { + self + } + + unconstrained fn foo(self) -> u64; + } + + impl Foo for Field { + unconstrained fn foo(self) -> u64 { + self as u64 + } + } + + unconstrained fn main() { + assert_eq(2.foo() as Field, 2.identity()); + } + "#; + + let errors = get_program_errors(src); + println!("{errors:?}"); + assert_eq!(errors.len(), 0); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/bound_checks.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/bound_checks.rs new file mode 100644 index 00000000000..271f9d7a1a7 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/bound_checks.rs @@ -0,0 +1,79 @@ +use crate::hir::def_collector::dc_crate::CompilationError; + +use super::get_program_errors; + +#[test] +fn overflowing_u8() { + let src = r#" + fn main() { + let _: u8 = 256; + }"#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + if let CompilationError::TypeError(error) = &errors[0].0 { + assert_eq!( + error.to_string(), + "The value `2⁸` cannot fit into `u8` which has range `0..=255`" + ); + } else { + panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); + } +} + +#[test] +fn underflowing_u8() { + let src = r#" + fn main() { + let _: u8 = -1; + }"#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + if let CompilationError::TypeError(error) = &errors[0].0 { + assert_eq!( + error.to_string(), + "The value `-1` cannot fit into `u8` which has range `0..=255`" + ); + } else { + panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); + } +} + +#[test] +fn overflowing_i8() { + let src = r#" + fn main() { + let _: i8 = 128; + }"#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + if let CompilationError::TypeError(error) = &errors[0].0 { + assert_eq!( + error.to_string(), + "The value `2⁷` cannot fit into `i8` which has range `-128..=127`" + ); + } else { + panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); + } +} + +#[test] +fn underflowing_i8() { + let src = r#" + fn main() { + let _: i8 = -129; + }"#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + if let CompilationError::TypeError(error) = &errors[0].0 { + assert_eq!( + error.to_string(), + "The value `-129` cannot fit into `i8` which has range `-128..=127`" + ); + } else { + panic!("Expected OverflowingAssignment error, got {:?}", errors[0].0); + } +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/imports.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/imports.rs new file mode 100644 index 00000000000..dfdc60e15e4 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/imports.rs @@ -0,0 +1,112 @@ +use crate::hir::{ + def_collector::{dc_crate::CompilationError, errors::DefCollectorErrorKind}, + resolution::{errors::ResolverError, import::PathResolutionError}, +}; + +use super::{assert_no_errors, get_program_errors}; + +#[test] +fn use_super() { + let src = r#" + fn some_func() {} + + mod foo { + use super::some_func; + + pub fn bar() { + some_func(); + } + } + "#; + assert_no_errors(src); +} + +#[test] +fn use_super_in_path() { + let src = r#" + fn some_func() {} + + mod foo { + pub fn func() { + super::some_func(); + } + } + "#; + assert_no_errors(src); +} + +#[test] +fn warns_on_use_of_private_exported_item() { + let src = r#" + mod foo { + mod bar { + pub fn baz() {} + } + + use bar::baz; + + pub fn qux() { + baz(); + } + } + + fn main() { + foo::baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 2); // An existing bug causes this error to be duplicated + + assert!(matches!( + &errors[0].0, + CompilationError::ResolverError(ResolverError::PathResolutionError( + PathResolutionError::Private(..), + )) + )); +} + +#[test] +fn can_use_pub_use_item() { + let src = r#" + mod foo { + mod bar { + pub fn baz() {} + } + + pub use bar::baz; + } + + fn main() { + foo::baz(); + } + "#; + assert_no_errors(src); +} + +#[test] +fn warns_on_re_export_of_item_with_less_visibility() { + let src = r#" + mod foo { + mod bar { + pub(crate) fn baz() {} + } + + pub use bar::baz; + } + + fn main() { + foo::baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + assert!(matches!( + &errors[0].0, + CompilationError::DefinitionError( + DefCollectorErrorKind::CannotReexportItemWithLessVisibility { .. } + ) + )); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs new file mode 100644 index 00000000000..d980cba5cfd --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs @@ -0,0 +1,65 @@ +use crate::hir::def_collector::dc_crate::CompilationError; + +use super::get_program_errors; + +#[test] +fn comptime_type_in_runtime_code() { + let source = "pub fn foo(_f: FunctionDefinition) {}"; + let errors = get_program_errors(source); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::ComptimeTypeInRuntimeCode { .. }) + )); +} + +#[test] +fn macro_result_type_mismatch() { + let src = r#" + fn main() { + comptime { + let x = unquote!(quote { "test" }); + let _: Field = x; + } + } + + comptime fn unquote(q: Quoted) -> Quoted { + q + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::TypeMismatch { .. }) + )); +} + +#[test] +fn unquoted_integer_as_integer_token() { + let src = r#" + trait Serialize { + fn serialize() {} + } + + #[attr] + pub fn foobar() {} + + comptime fn attr(_f: FunctionDefinition) -> Quoted { + let serialized_len = 1; + // We are testing that when we unquote $serialized_len, it's unquoted + // as the token `1` and not as something else that later won't be parsed correctly + // in the context of a generic argument. + quote { + impl Serialize<$serialized_len> for Field { + fn serialize() { } + } + } + } + + fn main() {} + "#; + + assert_no_errors(src); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/references.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/references.rs new file mode 100644 index 00000000000..ce72240d146 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/references.rs @@ -0,0 +1,111 @@ +use crate::hir::{ + def_collector::dc_crate::CompilationError, resolution::errors::ResolverError, + type_check::TypeCheckError, +}; + +use super::get_program_errors; + +#[test] +fn cannot_mutate_immutable_variable() { + let src = r#" + fn main() { + let array = [1]; + mutate(&mut array); + } + + fn mutate(_: &mut [Field; 1]) {} + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::CannotMutateImmutableVariable { name, .. }) = + &errors[0].0 + else { + panic!("Expected a CannotMutateImmutableVariable error"); + }; + + assert_eq!(name, "array"); +} + +#[test] +fn cannot_mutate_immutable_variable_on_member_access() { + let src = r#" + struct Foo { + x: Field + } + + fn main() { + let foo = Foo { x: 0 }; + mutate(&mut foo.x); + } + + fn mutate(foo: &mut Field) { + *foo = 1; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::CannotMutateImmutableVariable { name, .. }) = + &errors[0].0 + else { + panic!("Expected a CannotMutateImmutableVariable error"); + }; + + assert_eq!(name, "foo"); +} + +#[test] +fn does_not_crash_when_passing_mutable_undefined_variable() { + let src = r#" + fn main() { + mutate(&mut undefined); + } + + fn mutate(foo: &mut Field) { + *foo = 1; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::VariableNotDeclared { name, .. }) = + &errors[0].0 + else { + panic!("Expected a VariableNotDeclared error"); + }; + + assert_eq!(name, "undefined"); +} + +#[test] +fn constrained_reference_to_unconstrained() { + let src = r#" + fn main(mut x: u32, y: pub u32) { + let x_ref = &mut x; + if x == 5 { + unsafe { + mut_ref_input(x_ref, y); + } + } + + assert(x == 10); + } + + unconstrained fn mut_ref_input(x: &mut u32, y: u32) { + *x = y; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::ConstrainedReferenceToUnconstrained { .. }) = + &errors[0].0 + else { + panic!("Expected an error about passing a constrained reference to unconstrained"); + }; +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/turbofish.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/turbofish.rs new file mode 100644 index 00000000000..43d536fd196 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/turbofish.rs @@ -0,0 +1,198 @@ +use crate::hir::{def_collector::dc_crate::CompilationError, type_check::TypeCheckError}; + +use super::{assert_no_errors, get_program_errors}; + +#[test] +fn turbofish_numeric_generic_nested_call() { + // Check for turbofish numeric generics used with function calls + let src = r#" + fn foo() -> [u8; N] { + [0; N] + } + + fn bar() -> [u8; N] { + foo::() + } + + global M: u32 = 3; + + fn main() { + let _ = bar::(); + } + "#; + assert_no_errors(src); + + // Check for turbofish numeric generics used with method calls + let src = r#" + struct Foo { + a: T + } + + impl Foo { + fn static_method() -> [u8; N] { + [0; N] + } + + fn impl_method(self) -> [T; N] { + [self.a; N] + } + } + + fn bar() -> [u8; N] { + let _ = Foo::static_method::(); + let x: Foo = Foo { a: 0 }; + x.impl_method::() + } + + global M: u32 = 3; + + fn main() { + let _ = bar::(); + } + "#; + assert_no_errors(src); +} + +#[test] +fn turbofish_in_constructor_generics_mismatch() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let _ = Foo:: { x: 1 }; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::GenericCountMismatch { .. }), + )); +} + +#[test] +fn turbofish_in_constructor() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let x: Field = 0; + let _ = Foo:: { x: x }; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, expr_typ, .. + }) = &errors[0].0 + else { + panic!("Expected a type mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(expected_typ, "i32"); + assert_eq!(expr_typ, "Field"); +} + +#[test] +fn turbofish_in_middle_of_variable_unsupported_yet() { + let src = r#" + struct Foo { + x: T + } + + impl Foo { + fn new(x: T) -> Self { + Foo { x } + } + } + + fn main() { + let _ = Foo::::new(1); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::UnsupportedTurbofishUsage { .. }), + )); +} + +#[test] +fn turbofish_in_struct_pattern() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let value: Field = 0; + let Foo:: { x } = Foo { x: value }; + let _ = x; + } + "#; + assert_no_errors(src); +} + +#[test] +fn turbofish_in_struct_pattern_errors_if_type_mismatch() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let value: Field = 0; + let Foo:: { x } = Foo { x: value }; + let _ = x; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::TypeMismatchWithSource { .. }) = &errors[0].0 + else { + panic!("Expected a type mismatch error, got {:?}", errors[0].0); + }; +} + +#[test] +fn turbofish_in_struct_pattern_generic_count_mismatch() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let value = 0; + let Foo:: { x } = Foo { x: value }; + let _ = x; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::GenericCountMismatch { + item, + expected, + found, + .. + }) = &errors[0].0 + else { + panic!("Expected a generic count mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(item, "struct Foo"); + assert_eq!(*expected, 1); + assert_eq!(*found, 2); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/unused_items.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/unused_items.rs new file mode 100644 index 00000000000..b49414d8b03 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/unused_items.rs @@ -0,0 +1,159 @@ +use crate::hir::{def_collector::dc_crate::CompilationError, resolution::errors::ResolverError}; + +use super::get_program_errors; + +#[test] +fn errors_on_unused_private_import() { + let src = r#" + mod foo { + pub fn bar() {} + pub fn baz() {} + + pub trait Foo { + } + } + + use foo::bar; + use foo::baz; + use foo::Foo; + + impl Foo for Field { + } + + fn main() { + baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = + &errors[0].0 + else { + panic!("Expected an unused item error"); + }; + + assert_eq!(ident.to_string(), "bar"); + assert_eq!(*item_type, "import"); +} + +#[test] +fn errors_on_unused_pub_crate_import() { + let src = r#" + mod foo { + pub fn bar() {} + pub fn baz() {} + + pub trait Foo { + } + } + + pub(crate) use foo::bar; + use foo::baz; + use foo::Foo; + + impl Foo for Field { + } + + fn main() { + baz(); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = + &errors[0].0 + else { + panic!("Expected an unused item error"); + }; + + assert_eq!(ident.to_string(), "bar"); + assert_eq!(*item_type, "import"); +} + +#[test] +fn errors_on_unused_function() { + let src = r#" + contract some_contract { + // This function is unused, but it's a contract entrypoint + // so it should not produce a warning + fn foo() -> pub Field { + 1 + } + } + + + fn foo() { + bar(); + } + + fn bar() {} + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = + &errors[0].0 + else { + panic!("Expected an unused item error"); + }; + + assert_eq!(ident.to_string(), "foo"); + assert_eq!(*item_type, "function"); +} + +#[test] +fn errors_on_unused_struct() { + let src = r#" + struct Foo {} + struct Bar {} + + fn main() { + let _ = Bar {}; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = + &errors[0].0 + else { + panic!("Expected an unused item error"); + }; + + assert_eq!(ident.to_string(), "Foo"); + assert_eq!(*item_type, "struct"); +} + +#[test] +fn errors_on_unused_trait() { + let src = r#" + trait Foo {} + trait Bar {} + + pub struct Baz { + } + + impl Bar for Baz {} + + fn main() { + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::UnusedItem { ident, item_type }) = + &errors[0].0 + else { + panic!("Expected an unused item error"); + }; + + assert_eq!(ident.to_string(), "Foo"); + assert_eq!(*item_type, "trait"); +} diff --git a/noir/noir-repo/cspell.json b/noir/noir-repo/cspell.json index 3ace51689fb..dbc5fb5a43e 100644 --- a/noir/noir-repo/cspell.json +++ b/noir/noir-repo/cspell.json @@ -213,6 +213,7 @@ "udiv", "umap", "underconstrained", + "underflowing", "uninstantiated", "unnormalized", "unoptimized", @@ -226,8 +227,7 @@ "wasmer", "Weierstraß", "zkhash", - "zshell", - "Linea" + "zshell" ], "ignorePaths": [ "./**/node_modules/**", diff --git a/noir/noir-repo/noir_stdlib/src/collections/map.nr b/noir/noir-repo/noir_stdlib/src/collections/map.nr index d3c4d3d99b4..a336a01d101 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/map.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/map.nr @@ -4,7 +4,7 @@ use crate::default::Default; use crate::hash::{Hash, Hasher, BuildHasher}; use crate::collections::bounded_vec::BoundedVec; -// We use load factor α_max = 0.75. +// We use load factor alpha_max = 0.75. // Upon exceeding it, assert will fail in order to inform the user // about performance degradation, so that he can adjust the capacity. global MAX_LOAD_FACTOR_NUMERATOR = 3; @@ -624,7 +624,7 @@ impl HashMap { (hash + (attempt + attempt * attempt) / 2) % N } - // Amount of elements in the table in relation to available slots exceeds α_max. + // Amount of elements in the table in relation to available slots exceeds alpha_max. // To avoid a comparatively more expensive division operation // we conduct cross-multiplication instead. // n / m >= MAX_LOAD_FACTOR_NUMERATOR / MAX_LOAD_FACTOR_DEN0MINATOR diff --git a/noir/noir-repo/noir_stdlib/src/ec/mod.nr b/noir/noir-repo/noir_stdlib/src/ec/mod.nr index 093852acc79..3c1ba87eb9f 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/mod.nr @@ -3,15 +3,15 @@ // ======== // The following three elliptic curve representations are admissible: mod tecurve; // Twisted Edwards curves -mod swcurve; // Elliptic curves in Short Weierstraß form +mod swcurve; // Elliptic curves in Short Weierstrass form mod montcurve; // Montgomery curves mod consts; // Commonly used curve presets // // Note that Twisted Edwards and Montgomery curves are (birationally) equivalent, so that -// they may be freely converted between one another, whereas Short Weierstraß curves are +// they may be freely converted between one another, whereas Short Weierstrass curves are // more general. Diagramatically: // -// tecurve == montcurve ⊂ swcurve +// tecurve == montcurve `subset` swcurve // // Each module is further divided into two submodules, 'affine' and 'curvegroup', depending // on the preferred coordinate representation. Affine coordinates are none other than the usual @@ -47,7 +47,7 @@ mod consts; // Commonly used curve presets // coordinates by calling the `into_group` (resp. `into_affine`) method on them. Finally, // Points may be freely mapped between their respective Twisted Edwards and Montgomery // representations by calling the `into_montcurve` or `into_tecurve` methods. For mappings -// between Twisted Edwards/Montgomery curves and Short Weierstraß curves, see the Curve section +// between Twisted Edwards/Montgomery curves and Short Weierstrass curves, see the Curve section // below, as the underlying mappings are those of curves rather than ambient spaces. // As a rule, Points in affine (or CurveGroup) coordinates are mapped to Points in affine // (resp. CurveGroup) coordinates. @@ -91,21 +91,21 @@ mod consts; // Commonly used curve presets // Curve configurations may also be converted between different curve representations by calling the `into_swcurve`, // `into_montcurve` and `into_tecurve` methods subject to the relation between the curve representations mentioned // above. Note that it is possible to map Points from a Twisted Edwards/Montgomery curve to the corresponding -// Short Weierstraß representation and back, and the methods to do so are exposed as `map_into_swcurve` and +// Short Weierstrass representation and back, and the methods to do so are exposed as `map_into_swcurve` and // `map_from_swcurve`, which each take one argument, the point to be mapped. // // Curve maps // ========== // There are a few different ways of mapping Field elements to elliptic curves. Here we provide the simplified // Shallue-van de Woestijne-Ulas and Elligator 2 methods, the former being applicable to all curve types -// provided above subject to the constraint that the coefficients of the corresponding Short Weierstraß curve satisfies +// provided above subject to the constraint that the coefficients of the corresponding Short Weierstrass curve satisfies // a*b != 0 and the latter being applicable to Montgomery and Twisted Edwards curves subject to the constraint that // the coefficients of the corresponding Montgomery curve satisfy j*k != 0 and (j^2 - 4)/k^2 is non-square. // // The simplified Shallue-van de Woestijne-Ulas method is exposed as the method `swu_map` on the Curve configuration and // depends on two parameters, a Field element z != -1 for which g(x) - z is irreducible over Field and g(b/(z*a)) is // square, where g(x) = x^3 + a*x + b is the right-hand side of the defining equation of the corresponding Short -// Weierstraß curve, and a Field element u to be mapped onto the curve. For example, in the case of bjj_affine above, +// Weierstrass curve, and a Field element u to be mapped onto the curve. For example, in the case of bjj_affine above, // it may be determined using the scripts provided at that z = 5. // // The Elligator 2 method is exposed as the method `elligator2_map` on the Curve configurations of Montgomery and diff --git a/noir/noir-repo/noir_stdlib/src/ec/montcurve.nr b/noir/noir-repo/noir_stdlib/src/ec/montcurve.nr index 68b5c67dcba..395e8528b45 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/montcurve.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/montcurve.nr @@ -145,7 +145,7 @@ mod affine { TECurve::new((j + 2) / k, (j - 2) / k, gen.into_tecurve()) } - // Conversion to equivalent Short Weierstraß curve + // Conversion to equivalent Short Weierstrass curve pub fn into_swcurve(self) -> SWCurve { let j = self.j; let k = self.k; @@ -155,7 +155,7 @@ mod affine { SWCurve::new(a0, b0, self.map_into_swcurve(self.gen)) } - // Point mapping into equivalent Short Weierstraß curve + // Point mapping into equivalent Short Weierstrass curve pub fn map_into_swcurve(self, p: Point) -> SWPoint { if p.is_zero() { SWPoint::zero() @@ -164,7 +164,7 @@ mod affine { } } - // Point mapping from equivalent Short Weierstraß curve + // Point mapping from equivalent Short Weierstrass curve fn map_from_swcurve(self, p: SWPoint) -> Point { let SWPoint {x, y, infty} = p; let j = self.j; @@ -347,7 +347,7 @@ mod curvegroup { TECurve::new((j + 2) / k, (j - 2) / k, gen.into_tecurve()) } - // Conversion to equivalent Short Weierstraß curve + // Conversion to equivalent Short Weierstrass curve fn into_swcurve(self) -> SWCurve { let j = self.j; let k = self.k; @@ -357,12 +357,12 @@ mod curvegroup { SWCurve::new(a0, b0, self.map_into_swcurve(self.gen)) } - // Point mapping into equivalent Short Weierstraß curve + // Point mapping into equivalent Short Weierstrass curve pub fn map_into_swcurve(self, p: Point) -> SWPoint { self.into_affine().map_into_swcurve(p.into_affine()).into_group() } - // Point mapping from equivalent Short Weierstraß curve + // Point mapping from equivalent Short Weierstrass curve fn map_from_swcurve(self, p: SWPoint) -> Point { self.into_affine().map_from_swcurve(p.into_affine()).into_group() } diff --git a/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr b/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr index 238b0ce3c91..839069e1fd8 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr @@ -1,5 +1,5 @@ mod affine { - // Affine representation of Short Weierstraß curves + // Affine representation of Short Weierstrass curves // Points are represented by two-dimensional Cartesian coordinates. // Group operations are implemented in terms of those in CurveGroup (in this case, extended Twisted Edwards) coordinates // for reasons of efficiency, cf. . @@ -10,7 +10,7 @@ mod affine { use crate::cmp::Eq; // Curve specification - pub struct Curve { // Short Weierstraß curve + pub struct Curve { // Short Weierstrass curve // Coefficients in defining equation y^2 = x^3 + ax + b a: Field, b: Field, @@ -187,14 +187,14 @@ mod affine { } mod curvegroup { - // CurveGroup representation of Weierstraß curves + // CurveGroup representation of Weierstrass curves // Points are represented by three-dimensional Jacobian coordinates. // See for details. use crate::ec::swcurve::affine; use crate::cmp::Eq; // Curve specification - pub struct Curve { // Short Weierstraß curve + pub struct Curve { // Short Weierstrass curve // Coefficients in defining equation y^2 = x^3 + axz^4 + bz^6 a: Field, b: Field, diff --git a/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr b/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr index 760d9dc2b82..b306873806d 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr @@ -166,17 +166,17 @@ mod affine { MCurve::new(j, k, gen_montcurve) } - // Conversion to equivalent Short Weierstraß curve + // Conversion to equivalent Short Weierstrass curve pub fn into_swcurve(self) -> SWCurve { self.into_montcurve().into_swcurve() } - // Point mapping into equivalent Short Weierstraß curve + // Point mapping into equivalent Short Weierstrass curve pub fn map_into_swcurve(self, p: Point) -> SWPoint { self.into_montcurve().map_into_swcurve(p.into_montcurve()) } - // Point mapping from equivalent Short Weierstraß curve + // Point mapping from equivalent Short Weierstrass curve fn map_from_swcurve(self, p: SWPoint) -> Point { self.into_montcurve().map_from_swcurve(p).into_tecurve() } @@ -195,7 +195,7 @@ mod affine { mod curvegroup { // CurveGroup coordinate representation of Twisted Edwards curves // Points are represented by four-dimensional projective coordinates, viz. extended Twisted Edwards coordinates. - // See §3 of for details. + // See section 3 of for details. use crate::ec::tecurve::affine; use crate::ec::montcurve::curvegroup::Curve as MCurve; use crate::ec::montcurve::curvegroup::Point as MPoint; @@ -317,7 +317,7 @@ mod curvegroup { Point::new(x, y, t, z) } - // Point doubling, cf. §3.3 + // Point doubling, cf. section 3.3 pub fn double(self, p: Point) -> Point { let Point{x, y, t: _t, z} = p; @@ -385,17 +385,17 @@ mod curvegroup { self.into_affine().into_montcurve().into_group() } - // Conversion to equivalent Short Weierstraß curve + // Conversion to equivalent Short Weierstrass curve fn into_swcurve(self) -> SWCurve { self.into_montcurve().into_swcurve() } - // Point mapping into equivalent short Weierstraß curve + // Point mapping into equivalent short Weierstrass curve pub fn map_into_swcurve(self, p: Point) -> SWPoint { self.into_montcurve().map_into_swcurve(p.into_montcurve()) } - // Point mapping from equivalent short Weierstraß curve + // Point mapping from equivalent short Weierstrass curve fn map_from_swcurve(self, p: SWPoint) -> Point { self.into_montcurve().map_from_swcurve(p).into_tecurve() } diff --git a/noir/noir-repo/noir_stdlib/src/field/mod.nr b/noir/noir-repo/noir_stdlib/src/field/mod.nr index 176f102321d..e1d08c6c230 100644 --- a/noir/noir-repo/noir_stdlib/src/field/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/field/mod.nr @@ -118,7 +118,7 @@ impl Field { r } - // Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. + // Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x `elem` {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. pub fn sgn0(self) -> u1 { self as u1 } diff --git a/noir/noir-repo/noir_stdlib/src/hash/keccak.nr b/noir/noir-repo/noir_stdlib/src/hash/keccak.nr index 96e53429ac5..5346ff9fae6 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/keccak.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/keccak.nr @@ -1,10 +1,10 @@ use crate::collections::vec::Vec; use crate::runtime::is_unconstrained; -global LIMBS_PER_BLOCK = 17; //BLOCK_SIZE / 8; +global BLOCK_SIZE_IN_BYTES: u32 = 136; //(1600 - BITS * 2) / WORD_SIZE; +global WORD_SIZE: u32 = 8; // Limbs are made up of u64s so 8 bytes each. +global LIMBS_PER_BLOCK: u32 = BLOCK_SIZE_IN_BYTES / WORD_SIZE; global NUM_KECCAK_LANES: u32 = 25; -global BLOCK_SIZE = 136; //(1600 - BITS * 2) / WORD_SIZE; -global WORD_SIZE = 8; #[foreign(keccakf1600)] fn keccakf1600(input: [u64; 25]) -> [u64; 25] {} @@ -12,7 +12,7 @@ fn keccakf1600(input: [u64; 25]) -> [u64; 25] {} #[no_predicates] pub(crate) fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] { assert(N >= message_size); - let mut block_bytes = [0; BLOCK_SIZE]; + let mut block_bytes = [0; BLOCK_SIZE_IN_BYTES]; if is_unconstrained() { for i in 0..message_size { block_bytes[i] = input[i]; @@ -26,38 +26,26 @@ pub(crate) fn keccak256(input: [u8; N], message_size: u32) -> [u8; 3 } //1. format_input_lanes - let max_blocks = (N + BLOCK_SIZE) / BLOCK_SIZE; + let max_blocks = (N + BLOCK_SIZE_IN_BYTES) / BLOCK_SIZE_IN_BYTES; //maximum number of bytes to hash - let max_blocks_length = (BLOCK_SIZE * (max_blocks)); - let real_max_blocks = (message_size + BLOCK_SIZE) / BLOCK_SIZE; - let real_blocks_bytes = real_max_blocks * BLOCK_SIZE; + let max_blocks_length = (BLOCK_SIZE_IN_BYTES * max_blocks); + let real_max_blocks = (message_size + BLOCK_SIZE_IN_BYTES) / BLOCK_SIZE_IN_BYTES; + let real_blocks_bytes = real_max_blocks * BLOCK_SIZE_IN_BYTES; block_bytes[message_size] = 1; block_bytes[real_blocks_bytes - 1] = 0x80; - let num_limbs = max_blocks * LIMBS_PER_BLOCK; //max_blocks_length / WORD_SIZE; - let mut sliced_buffer = Vec::new(); // populate a vector of 64-bit limbs from our byte array + let num_limbs = max_blocks_length / WORD_SIZE; + let mut sliced_buffer = Vec::new(); for i in 0..num_limbs { let limb_start = WORD_SIZE * i; let mut sliced = 0; - if (limb_start + WORD_SIZE > max_blocks_length) { - let slice_size = max_blocks_length - limb_start; - let byte_shift = (WORD_SIZE - slice_size) * 8; - let mut v = 1; - for k in 0..slice_size { - sliced += v * (block_bytes[limb_start+k] as Field); - v *= 256; - } - let w = 1 << (byte_shift as u8); - sliced *= w as Field; - } else { - let mut v = 1; - for k in 0..WORD_SIZE { - sliced += v * (block_bytes[limb_start+k] as Field); - v *= 256; - } + let mut v = 1; + for k in 0..WORD_SIZE { + sliced += v * (block_bytes[limb_start+k] as Field); + v *= 256; } sliced_buffer.push(sliced as u64); @@ -81,9 +69,9 @@ pub(crate) fn keccak256(input: [u8; N], message_size: u32) -> [u8; 3 state = keccakf1600(state); } } else { - // `real_max_blocks` is guaranteed to at least be `1` + // `real_max_blocks` is guaranteed to at least be `1` // We peel out the first block as to avoid a conditional inside of the loop. - // Otherwise, a dynamic predicate can cause a blowup in a constrained runtime. + // Otherwise, a dynamic predicate can cause a blowup in a constrained runtime. for j in 0..LIMBS_PER_BLOCK { state[j] = sliced_buffer.get(j); } diff --git a/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr b/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr index 848d561f755..a1c78a9b31c 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr @@ -4,7 +4,7 @@ mod consts; use crate::hash::poseidon::absorb; -// Variable-length Poseidon-128 sponge as suggested in second bullet point of §3 of https://eprint.iacr.org/2019/458.pdf +// Variable-length Poseidon-128 sponge as suggested in second bullet point of section 3 of https://eprint.iacr.org/2019/458.pdf #[field(bn254)] pub fn sponge(msg: [Field; N]) -> Field { absorb(consts::x5_5_config(), [0; 5], 4, 1, msg)[1] diff --git a/noir/noir-repo/noir_stdlib/src/hash/sha256.nr b/noir/noir-repo/noir_stdlib/src/hash/sha256.nr index e99c7678176..27eb673e035 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/sha256.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/sha256.nr @@ -187,18 +187,18 @@ pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { if !crate::runtime::is_unconstrained() { for i in 0..56 { - if i < msg_byte_ptr { - assert_eq(msg_block[i], last_block[i]); - } else { - assert_eq(msg_block[i], zero); - } + let predicate = (i < msg_byte_ptr) as u8; + let expected_byte = predicate * last_block[i]; + assert_eq(msg_block[i], expected_byte); } + // We verify the message length was inserted correctly by reversing the byte decomposition. let len = 8 * message_size; - let len_bytes: [u8; 8] = (len as Field).to_be_bytes(); + let mut reconstructed_len: Field = 0; for i in 56..64 { - assert_eq(msg_block[i], len_bytes[i - 56]); + reconstructed_len = 256 * reconstructed_len + msg_block[i] as Field; } + assert_eq(reconstructed_len, len as Field); } hash_final_block(msg_block, h) @@ -255,4 +255,3 @@ fn hash_final_block(msg_block: [u8; 64], mut state: [u32; 8]) -> [u8; 32] { out_h } - diff --git a/noir/noir-repo/test_programs/compile_success_empty/brillig_field_binary_operations/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/brillig_field_binary_operations/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/brillig_integer_binary_operations/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/brillig_integer_binary_operations/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/brillig_modulo/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/brillig_modulo/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_recursion_regression/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/comptime_recursion_regression/Prover.toml deleted file mode 100644 index 745ce7c2361..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_recursion_regression/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = 5 -y = 6 diff --git a/noir/noir-repo/test_programs/compile_success_empty/conditional_regression_579/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/conditional_regression_579/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/conditional_regression_to_bits/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/conditional_regression_to_bits/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/instruction_deduplication/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/instruction_deduplication/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/let_stmt/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/let_stmt/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/no_duplicate_methods/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/no_duplicate_methods/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/numeric_generics/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/numeric_generics/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/references_aliasing/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/references_aliasing/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/ret_fn_ret_cl/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/ret_fn_ret_cl/Prover.toml deleted file mode 100644 index 3a627b9188b..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/ret_fn_ret_cl/Prover.toml +++ /dev/null @@ -1 +0,0 @@ -x = "10" diff --git a/noir/noir-repo/test_programs/compile_success_empty/simple_program_no_body/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/simple_program_no_body/Prover.toml deleted file mode 100644 index c2b2ccfd9f1..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/simple_program_no_body/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -_x = "3" -_y = "4" diff --git a/noir/noir-repo/test_programs/compile_success_empty/simple_range/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/simple_range/Prover.toml deleted file mode 100644 index 07890234a19..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/simple_range/Prover.toml +++ /dev/null @@ -1 +0,0 @@ -x = "3" diff --git a/noir/noir-repo/test_programs/compile_success_empty/slice_init_with_complex_type/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/slice_init_with_complex_type/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/trait_allowed_item_name_matches/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/trait_allowed_item_name_matches/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/trait_associated_member_names_clashes/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/trait_associated_member_names_clashes/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/trait_default_implementation/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/trait_default_implementation/Prover.toml deleted file mode 100644 index 71805e71e8e..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/trait_default_implementation/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = "5" -y = "1" \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/trait_function_calls/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/trait_function_calls/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/trait_method_mut_self/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/trait_method_mut_self/Prover.toml deleted file mode 100644 index f28f2f8cc48..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/trait_method_mut_self/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = "5" -y = "10" diff --git a/noir/noir-repo/test_programs/compile_success_empty/trait_multi_module_test/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/trait_multi_module_test/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/compile_success_empty/trait_override_implementation/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/trait_override_implementation/Prover.toml deleted file mode 100644 index 71805e71e8e..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/trait_override_implementation/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = "5" -y = "1" \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/traits/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/traits/Prover.toml deleted file mode 100644 index 71805e71e8e..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/traits/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = "5" -y = "1" \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/turbofish_call_func_diff_types/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/turbofish_call_func_diff_types/Prover.toml deleted file mode 100644 index f28f2f8cc48..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/turbofish_call_func_diff_types/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = "5" -y = "10" diff --git a/noir/noir-repo/test_programs/compile_success_empty/vectors/Prover.toml b/noir/noir-repo/test_programs/compile_success_empty/vectors/Prover.toml deleted file mode 100644 index f28f2f8cc48..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/vectors/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = "5" -y = "10" diff --git a/noir/noir-repo/test_programs/execution_success/brillig_rc_regression_6123/Nargo.toml b/noir/noir-repo/test_programs/execution_success/brillig_rc_regression_6123/Nargo.toml new file mode 100644 index 00000000000..533777df67f --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/brillig_rc_regression_6123/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_rc_regression_6123" +type = "bin" +authors = [""] +compiler_version = ">=0.34.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/brillig_rc_regression_6123/src/main.nr b/noir/noir-repo/test_programs/execution_success/brillig_rc_regression_6123/src/main.nr new file mode 100644 index 00000000000..3eb29659944 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/brillig_rc_regression_6123/src/main.nr @@ -0,0 +1,41 @@ +struct Builder { + note_hashes: BoundedVec, + nullifiers: BoundedVec, +} + +impl Builder { + fn append_note_hashes_with_logs(&mut self, num_note_hashes: u32) { + let index_offset = self.note_hashes.len(); + for i in 0..self.note_hashes.max_len() { + if i < num_note_hashes { + self.add_new_note_hash((index_offset + i) as Field); + } + } + } + + fn add_new_note_hash(&mut self, value: Field) { + self.note_hashes.push(value); + } +} + +fn swap_items(vec: &mut BoundedVec, from_index: u32, to_index: u32) { + let tmp = vec.storage[from_index]; + vec.storage[from_index] = vec.storage[to_index]; + vec.storage[to_index] = tmp; +} + +unconstrained fn main() { + let mut builder = Builder { note_hashes: BoundedVec::new(), nullifiers: BoundedVec::new() }; + + builder.append_note_hashes_with_logs(2); + builder.nullifiers.storage[1] = 27; + // Get ordered items before shuffling. + let note_hashes = builder.note_hashes.storage; + let original_first_note_hash = note_hashes[0]; + // Shuffle. + swap_items(&mut builder.note_hashes, 1, 0); + + for i in 0..1 { + assert_eq(note_hashes[i], original_first_note_hash); + } +} diff --git a/noir/noir-repo/test_programs/noir_test_failure/should_fail_mismatch/Prover.toml b/noir/noir-repo/test_programs/noir_test_failure/should_fail_mismatch/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/noir_test_failure/should_fail_suite_with_one_failure/Prover.toml b/noir/noir-repo/test_programs/noir_test_failure/should_fail_suite_with_one_failure/Prover.toml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/noir/noir-repo/test_programs/noir_test_success/regression_4080/Prover.toml b/noir/noir-repo/test_programs/noir_test_success/regression_4080/Prover.toml deleted file mode 100644 index 0e5dfd5638d..00000000000 --- a/noir/noir-repo/test_programs/noir_test_success/regression_4080/Prover.toml +++ /dev/null @@ -1 +0,0 @@ -x = "5" diff --git a/noir/noir-repo/tooling/debugger/ignored-tests.txt b/noir/noir-repo/tooling/debugger/ignored-tests.txt index 78e14397938..0037b8e5d5f 100644 --- a/noir/noir-repo/tooling/debugger/ignored-tests.txt +++ b/noir/noir-repo/tooling/debugger/ignored-tests.txt @@ -4,4 +4,5 @@ is_unconstrained macros references regression_4709 -reference_only_used_as_alias \ No newline at end of file +reference_only_used_as_alias +brillig_rc_regression_6123 \ No newline at end of file diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_action/remove_unused_import.rs b/noir/noir-repo/tooling/lsp/src/requests/code_action/remove_unused_import.rs index c660dd57e47..f1e12d64ef5 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/code_action/remove_unused_import.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/code_action/remove_unused_import.rs @@ -163,7 +163,7 @@ mod tests { let src = r#" mod moo { - fn foo() {} + pub fn foo() {} } use moo::fo>||||| FmtVisitor<'me> { pub(crate) fn slice(&self, span: impl Into) -> &'me str { let span = span.into(); - &self.source[span.start() as usize..span.end() as usize] + str_slice(self.source, span.start() as usize, span.end() as usize) } pub(crate) fn span_after(&self, span: impl Into, token: Token) -> Span { @@ -188,7 +188,7 @@ impl<'me> FmtVisitor<'me> { match comment.token() { Token::LineComment(_, _) | Token::BlockComment(_, _) => { - let comment = &slice[span.start() as usize..span.end() as usize]; + let comment = str_slice(slice, span.start() as usize, span.end() as usize); if result.ends_with('\n') { result.push_str(&indent); } else if !self.at_start() { @@ -247,6 +247,19 @@ impl<'me> FmtVisitor<'me> { } } +pub(crate) fn str_slice(s: &str, start: usize, end: usize) -> &str { + &s[start..ceil_char_boundary(s, end)] +} + +pub(crate) fn ceil_char_boundary(s: &str, byte_index: usize) -> usize { + for i in byte_index..s.len() { + if s.is_char_boundary(i) { + return i; + } + } + s.len() +} + #[derive(Clone, Copy, Debug, Default)] pub(crate) struct Indent { block_indent: usize,