From c13a097b08b4270d5c9bbdec9c2624f09bd152a4 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Mon, 5 Aug 2024 08:02:13 +0000 Subject: [PATCH] [1 changes] feat: LSP hover now includes "Go to" links (https://github.com/noir-lang/noir/pull/5677) feat: add `Type` methods: `as_tuple`, `as_slice`, `as_array`, `as_constant`, `is_bool` (https://github.com/noir-lang/noir/pull/5678) fix: Derive generic types (https://github.com/noir-lang/noir/pull/5674) feat: Add a limited form of arithmetic on generics (https://github.com/noir-lang/noir/pull/5625) feat: add `Type::is_field` and `Type::as_integer` (https://github.com/noir-lang/noir/pull/5670) fix: Fix where clause issue in items generated from attributes (https://github.com/noir-lang/noir/pull/5673) feat(noir_js): Expose UltraHonk and integration tests (https://github.com/noir-lang/noir/pull/5656) fix: workaround from_slice with nested slices (https://github.com/noir-lang/noir/pull/5648) fix: Switch verify proof to arrays (https://github.com/noir-lang/noir/pull/5664) feat: Resolve arguments to attributes (https://github.com/noir-lang/noir/pull/5649) fix: Elaborate struct & trait annotations in the correct module (https://github.com/noir-lang/noir/pull/5643) fix: let a trait impl that relies on another trait work (https://github.com/noir-lang/noir/pull/5646) --- .noir-sync-commit | 2 +- .../.github/ISSUE_TEMPLATE/bug_report.yml | 120 --- .../ISSUE_TEMPLATE/feature_request.yml | 71 -- noir/noir-repo/Cargo.lock | 1 + .../acvm-repo/acir/src/circuit/mod.rs | 3 + .../acir/src/native_types/expression/mod.rs | 54 ++ noir/noir-repo/acvm-repo/acvm/Cargo.toml | 1 + .../acvm/src/compiler/transformers/csat.rs | 65 +- .../acvm/tests/solver.proptest-regressions | 13 + noir/noir-repo/acvm-repo/acvm/tests/solver.rs | 190 +++- noir/noir-repo/acvm-repo/acvm_js/build.sh | 2 +- noir/noir-repo/aztec_macros/src/lib.rs | 22 +- ...te_note_hash_and_optionally_a_nullifier.rs | 14 +- .../src/transforms/contract_interface.rs | 5 +- .../aztec_macros/src/transforms/events.rs | 60 +- .../aztec_macros/src/transforms/functions.rs | 13 +- .../src/transforms/note_interface.rs | 90 +- .../aztec_macros/src/transforms/storage.rs | 18 +- .../aztec_macros/src/utils/ast_utils.rs | 18 +- .../aztec_macros/src/utils/hir_utils.rs | 4 +- noir/noir-repo/aztec_macros/src/utils/mod.rs | 1 + .../aztec_macros/src/utils/parse_utils.rs | 534 ++++++++++++ noir/noir-repo/compiler/fm/src/file_map.rs | 18 +- .../circuits/recursion/src/main.nr | 7 +- .../test/node/prove_and_verify.test.ts | 142 ++- .../compiler/noirc_driver/src/abi_gen.rs | 3 +- .../compiler/noirc_driver/src/lib.rs | 58 +- .../noirc_driver/tests/stdlib_warnings.rs | 2 +- .../compiler/noirc_errors/src/position.rs | 4 + .../compiler/noirc_evaluator/src/ssa.rs | 33 +- .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 83 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 102 ++- .../src/ssa/function_builder/data_bus.rs | 100 ++- .../noirc_evaluator/src/ssa/opt/die.rs | 4 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 2 +- .../noirc_frontend/src/ast/expression.rs | 60 +- .../compiler/noirc_frontend/src/ast/mod.rs | 32 +- .../noirc_frontend/src/ast/statement.rs | 109 ++- .../noirc_frontend/src/ast/structure.rs | 1 - .../compiler/noirc_frontend/src/ast/traits.rs | 3 - .../compiler/noirc_frontend/src/debug/mod.rs | 76 +- .../noirc_frontend/src/elaborator/comptime.rs | 418 ++++++++- .../src/elaborator/expressions.rs | 31 +- .../noirc_frontend/src/elaborator/mod.rs | 567 ++++-------- .../noirc_frontend/src/elaborator/patterns.rs | 78 +- .../noirc_frontend/src/elaborator/scope.rs | 45 +- .../noirc_frontend/src/elaborator/traits.rs | 29 +- .../noirc_frontend/src/elaborator/types.rs | 176 ++-- .../noirc_frontend/src/elaborator/unquote.rs | 2 +- .../noirc_frontend/src/hir/comptime/errors.rs | 300 +++++-- .../src/hir/comptime/hir_to_display_ast.rs | 27 +- .../src/hir/comptime/interpreter.rs | 406 +++++---- .../src/hir/comptime/interpreter/builtin.rs | 601 +++++++++---- .../src/hir/comptime/interpreter/foreign.rs | 10 +- .../src/hir/comptime/interpreter/unquote.rs | 27 +- .../noirc_frontend/src/hir/comptime/tests.rs | 2 +- .../noirc_frontend/src/hir/comptime/value.rs | 147 +++- .../src/hir/def_collector/dc_crate.rs | 34 +- .../src/hir/def_collector/dc_mod.rs | 31 +- .../src/hir/def_collector/errors.rs | 19 + .../noirc_frontend/src/hir/def_map/mod.rs | 6 +- .../compiler/noirc_frontend/src/hir/mod.rs | 6 +- .../src/hir/resolution/errors.rs | 14 +- .../src/hir/resolution/import.rs | 18 +- .../src/hir/type_check/errors.rs | 6 + .../noirc_frontend/src/hir_def/function.rs | 8 + .../noirc_frontend/src/hir_def/types.rs | 250 +++++- .../noirc_frontend/src/lexer/errors.rs | 7 + .../noirc_frontend/src/lexer/token.rs | 10 + .../compiler/noirc_frontend/src/locations.rs | 4 +- .../src/monomorphization/mod.rs | 126 ++- .../noirc_frontend/src/node_interner.rs | 86 +- .../noirc_frontend/src/noir_parser.lalrpop | 18 +- .../noirc_frontend/src/parser/errors.rs | 2 + .../compiler/noirc_frontend/src/parser/mod.rs | 2 +- .../noirc_frontend/src/parser/parser.rs | 175 ++-- .../noirc_frontend/src/parser/parser.rs:28:9 | 45 + .../src/parser/parser/function.rs | 12 +- .../noirc_frontend/src/parser/parser/path.rs | 42 +- .../src/parser/parser/primitives.rs | 27 +- .../src/parser/parser/structs.rs | 13 +- .../src/parser/parser/traits.rs | 27 +- .../noirc_frontend/src/parser/parser/types.rs | 28 +- .../compiler/noirc_frontend/src/tests.rs | 429 ++++++++- noir/noir-repo/cspell.json | 2 + .../docs/docs/explainers/cspell.json | 5 + .../docs/explainers/explainer-writing-noir.md | 173 ++++ .../getting_started/backend}/_category_.json | 2 +- .../docs/getting_started/backend/index.md | 31 + .../getting_started/barretenberg/index.md | 47 - .../docs/getting_started/hello_noir/index.md | 59 +- .../hello_noir/project_breakdown.md | 3 +- .../getting_started/installation/index.md | 6 +- .../docs/noir/standard_library/recursion.md | 12 +- .../tooling => reference}/noir_codegen.md | 2 +- .../docs/docs/tutorials/noirjs_app.md | 123 ++- noir/noir-repo/docs/docusaurus.config.ts | 1 - .../docs/src/components/Notes/_blackbox.mdx | 2 +- .../getting_started/01_hello_world.md | 2 +- .../data_types/02_booleans.md | 4 +- .../language_concepts/data_types/04_arrays.md | 2 +- .../modules_packages_crates/dependencies.md | 2 +- .../version-v0.19.4/nargo/01_commands.md | 2 +- .../standard_library/black_box_fns.md | 18 +- .../getting_started/backend}/_category_.json | 2 +- .../getting_started/backend/index.md | 31 + .../getting_started/barretenberg/index.md | 47 - .../getting_started/hello_noir/index.md | 59 +- .../hello_noir/project_breakdown.md | 4 +- .../getting_started/installation/index.md | 6 +- .../version-v0.31.0/tutorials/noirjs_app.md | 109 ++- .../getting_started/backend}/_category_.json | 2 +- .../getting_started/backend/index.md | 31 + .../getting_started/barretenberg/index.md | 47 - .../getting_started/hello_noir/index.md | 59 +- .../hello_noir/project_breakdown.md | 3 +- .../getting_started/installation/index.md | 6 +- .../version-v0.32.0/tutorials/noirjs_app.md | 109 ++- .../recursion/recurse_leaf/src/main.nr | 7 +- .../recursion/recurse_node/src/main.nr | 7 +- noir/noir-repo/noir_stdlib/src/cmp.nr | 28 + noir/noir-repo/noir_stdlib/src/default.nr | 28 + noir/noir-repo/noir_stdlib/src/hash/mod.nr | 11 +- .../noir_stdlib/src/hash/poseidon2.nr | 2 +- noir/noir-repo/noir_stdlib/src/lib.nr | 7 +- noir/noir-repo/noir_stdlib/src/meta/mod.nr | 34 +- noir/noir-repo/noir_stdlib/src/meta/quoted.nr | 3 + .../src/meta/{type_def.nr => struct_def.nr} | 9 +- .../noir_stdlib/src/meta/trait_def.nr | 21 + noir/noir-repo/noir_stdlib/src/meta/typ.nr | 34 + noir/noir-repo/noir_stdlib/src/prelude.nr | 1 + noir/noir-repo/noir_stdlib/src/uint128.nr | 4 +- noir/noir-repo/scripts/install_bb.sh | 2 +- .../non_comptime_local_fn_call/Nargo.toml | 7 - .../non_comptime_local_fn_call/src/main.nr | 9 - .../Nargo.toml | 7 + .../src/main.nr | 6 + .../Nargo.toml | 7 + .../src/main.nr | 12 + .../arithmetic_generics/Nargo.toml | 7 + .../arithmetic_generics/src/main.nr | 103 +++ .../attribute_args/src/main.nr | 11 +- .../comptime_fmt_strings/Nargo.toml | 7 + .../comptime_fmt_strings/src/main.nr | 15 + .../comptime_trait_constraint/src/main.nr | 8 +- .../comptime_traits/src/main.nr | 4 +- .../comptime_type/Nargo.toml | 7 + .../comptime_type/src/main.nr | 70 ++ .../derive_impl/src/main.nr | 4 +- .../quoted_as_type/Nargo.toml | 7 + .../quoted_as_type/src/main.nr | 21 + .../regression_5671/Nargo.toml | 7 + .../regression_5671/src/main.nr | 20 + .../trait_call_in_global}/Nargo.toml | 2 +- .../trait_call_in_global/src/main.nr | 5 + .../zeroed_slice/Nargo.toml | 7 + .../zeroed_slice/src/main.nr | 3 + .../execution_success/databus/src/main.nr | 2 +- .../execution_success/derive/Nargo.toml | 7 + .../execution_success/derive/src/main.nr | 44 + .../double_verify_nested_proof/src/main.nr | 14 +- .../double_verify_proof/src/main.nr | 14 +- .../double_verify_proof_recursive/src/main.nr | 14 +- .../regression_5615/Nargo.toml | 7 + .../regression_5615/src/main.nr | 12 + .../execution_success/slice_regex/Nargo.toml | 7 + .../execution_success/slice_regex/src/main.nr | 811 ++++++++++++++++++ .../verify_honk_proof/Prover.toml | 4 - .../verify_honk_proof/src/main.nr | 21 - noir/noir-repo/tooling/lsp/src/lib.rs | 46 +- .../tooling/lsp/src/notifications/mod.rs | 52 +- .../lsp/src/requests/code_lens_request.rs | 5 +- .../lsp/src/requests/document_symbol.rs | 2 +- .../tooling/lsp/src/requests/hover.rs | 208 ++++- .../tooling/lsp/src/requests/inlay_hint.rs | 129 ++- .../noir-repo/tooling/lsp/src/requests/mod.rs | 13 +- .../tooling/lsp/src/requests/references.rs | 6 +- .../tooling/lsp/src/requests/test_run.rs | 2 +- .../tooling/lsp/src/requests/tests.rs | 2 +- .../lsp/test_programs/inlay_hints/src/main.nr | 12 + noir/noir-repo/tooling/nargo_cli/build.rs | 2 +- .../tooling/nargo_cli/src/cli/check_cmd.rs | 19 +- .../tooling/nargo_cli/src/cli/compile_cmd.rs | 8 +- .../tooling/nargo_cli/src/cli/export_cmd.rs | 9 +- .../tooling/nargo_cli/src/cli/test_cmd.rs | 19 +- .../tooling/nargo_cli/tests/stdlib-tests.rs | 2 +- .../tooling/nargo_fmt/src/rewrite/expr.rs | 29 +- noir/noir-repo/tooling/nargo_fmt/src/utils.rs | 10 +- .../tooling/nargo_fmt/src/visitor/item.rs | 5 +- .../tooling/nargo_fmt/tests/expected/expr.nr | 4 +- .../tooling/nargo_fmt/tests/input/expr.nr | 2 +- .../noir_js_backend_barretenberg/package.json | 2 +- .../src/backend.ts | 144 +++- .../noir_js_backend_barretenberg/src/index.ts | 4 +- .../src/public_inputs.ts | 10 +- .../src/verifier.ts | 80 +- .../noir-repo/tooling/noirc_abi_wasm/build.sh | 2 +- noir/noir-repo/yarn.lock | 13 +- 198 files changed, 7260 insertions(+), 2556 deletions(-) delete mode 100644 noir/noir-repo/.github/ISSUE_TEMPLATE/bug_report.yml delete mode 100644 noir/noir-repo/.github/ISSUE_TEMPLATE/feature_request.yml create mode 100644 noir/noir-repo/acvm-repo/acvm/tests/solver.proptest-regressions create mode 100644 noir/noir-repo/aztec_macros/src/utils/parse_utils.rs create mode 100644 noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs:28:9 create mode 100644 noir/noir-repo/docs/docs/explainers/cspell.json create mode 100644 noir/noir-repo/docs/docs/explainers/explainer-writing-noir.md rename noir/noir-repo/docs/{versioned_docs/version-v0.31.0/getting_started/barretenberg => docs/getting_started/backend}/_category_.json (63%) create mode 100644 noir/noir-repo/docs/docs/getting_started/backend/index.md delete mode 100644 noir/noir-repo/docs/docs/getting_started/barretenberg/index.md rename noir/noir-repo/docs/docs/{getting_started/tooling => reference}/noir_codegen.md (97%) rename noir/noir-repo/docs/versioned_docs/{version-v0.32.0/getting_started/barretenberg => version-v0.31.0/getting_started/backend}/_category_.json (63%) create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/backend/index.md delete mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/barretenberg/index.md rename noir/noir-repo/docs/{docs/getting_started/barretenberg => versioned_docs/version-v0.32.0/getting_started/backend}/_category_.json (63%) create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/backend/index.md delete mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/index.md rename noir/noir-repo/noir_stdlib/src/meta/{type_def.nr => struct_def.nr} (58%) create mode 100644 noir/noir-repo/noir_stdlib/src/meta/typ.nr delete mode 100644 noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml delete mode 100644 noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_constructor/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_constructor/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_new/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_new/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_success_empty/arithmetic_generics/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_success_empty/arithmetic_generics/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_success_empty/comptime_type/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_success_empty/comptime_type/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_success_empty/quoted_as_type/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_success_empty/quoted_as_type/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_success_empty/regression_5671/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_success_empty/regression_5671/src/main.nr rename noir/noir-repo/test_programs/{execution_success/verify_honk_proof => compile_success_empty/trait_call_in_global}/Nargo.toml (64%) create mode 100644 noir/noir-repo/test_programs/compile_success_empty/trait_call_in_global/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_success_empty/zeroed_slice/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_success_empty/zeroed_slice/src/main.nr create mode 100644 noir/noir-repo/test_programs/execution_success/derive/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/derive/src/main.nr create mode 100644 noir/noir-repo/test_programs/execution_success/regression_5615/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/regression_5615/src/main.nr create mode 100644 noir/noir-repo/test_programs/execution_success/slice_regex/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/slice_regex/src/main.nr delete mode 100644 noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml delete mode 100644 noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr diff --git a/.noir-sync-commit b/.noir-sync-commit index b2216c735d4..9d25821fcce 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -453ed590ae3ae6ee8a8d3113419fc51b825b2538 +d466d491ea50b495be7d5a45a8c3d85771f9b1c0 diff --git a/noir/noir-repo/.github/ISSUE_TEMPLATE/bug_report.yml b/noir/noir-repo/.github/ISSUE_TEMPLATE/bug_report.yml deleted file mode 100644 index 71207793e53..00000000000 --- a/noir/noir-repo/.github/ISSUE_TEMPLATE/bug_report.yml +++ /dev/null @@ -1,120 +0,0 @@ -name: Bug Report -description: Report an unexpected behavior. -labels: ["bug"] -body: - - type: markdown - attributes: - value: | - # Description - Thanks for taking the time to create the Issue and welcome to the Noir community! - - type: textarea - id: aim - attributes: - label: Aim - description: Describe what you tried to achieve. - validations: - required: true - - type: textarea - id: expected - attributes: - label: Expected Behavior - description: Describe what you expected to happen. - validations: - required: true - - type: textarea - id: bug - attributes: - label: Bug - description: Describe the bug. Supply error codes / terminal logs if applicable. - validations: - required: true - - type: textarea - id: reproduction - attributes: - label: To Reproduce - description: Describe the steps to reproduce the behavior. - value: | - 1. - 2. - 3. - 4. - - type: dropdown - id: impact - attributes: - label: Project Impact - description: How does this affect a project you or others are working on? - options: - - "Nice-to-have" - - "Blocker" - - type: textarea - id: impact_context - attributes: - label: Impact Context - description: If a nice-to-have / blocker, supplement how does this Issue affect the project. - - type: dropdown - id: workaround - attributes: - label: Workaround - description: Is there a workaround for this Issue? - options: - - "Yes" - - type: textarea - id: workaround_description - attributes: - label: Workaround Description - description: If yes, supplement how could the Issue be worked around. - - type: textarea - id: additional - attributes: - label: Additional Context - description: Supplement further information if applicable. - - type: markdown - attributes: - value: | - # Environment - Specify your version of Noir tooling used. - - type: markdown - attributes: - value: | - ## Nargo (CLI) - - type: dropdown - id: nargo-install - attributes: - label: Installation Method - description: How did you install Nargo? - options: - - Binary (`noirup` default) - - Compiled from source - - type: input - id: nargo-version - attributes: - label: Nargo Version - description: Output of running `nargo --version` - placeholder: "nargo version = 0.23.0 noirc version = 0.23.0+5be9f9d7e2f39ca228df10e5a530474af0331704 (git version hash: 5be9f9d7e2f39ca228df10e5a530474af0331704, is dirty: false)" - - type: markdown - attributes: - value: | - ## NoirJS (JavaScript) - - type: input - id: noirjs-version - attributes: - label: NoirJS Version - description: Version number of `noir_js` in `package.json` - placeholder: "0.23.0" - - type: markdown - attributes: - value: | - # Pull Request - - type: dropdown - id: pr_preference - attributes: - label: Would you like to submit a PR for this Issue? - description: Fellow contributors are happy to provide support where applicable. - options: - - "Maybe" - - "Yes" - - type: textarea - id: pr_support - attributes: - label: Support Needs - description: Support from other contributors you are looking for to create a PR for this Issue. diff --git a/noir/noir-repo/.github/ISSUE_TEMPLATE/feature_request.yml b/noir/noir-repo/.github/ISSUE_TEMPLATE/feature_request.yml deleted file mode 100644 index abbfe392454..00000000000 --- a/noir/noir-repo/.github/ISSUE_TEMPLATE/feature_request.yml +++ /dev/null @@ -1,71 +0,0 @@ -name: Feature Request -description: Suggest an idea for this project. -labels: ["enhancement"] -body: - - type: markdown - attributes: - value: | - ## Description - Thanks for taking the time to create the Issue and welcome to the Noir community! - - type: textarea - id: problem - attributes: - label: Problem - description: Describe what you feel lacking. Supply code / step-by-step examples if applicable. - validations: - required: true - - type: textarea - id: solution - attributes: - label: Happy Case - description: Describe how you think it should work. Supply pseudocode / step-by-step examples if applicable. - validations: - required: true - - type: dropdown - id: impact - attributes: - label: Project Impact - description: How does this affect a project you or others are working on? - options: - - "Nice-to-have" - - "Blocker" - - type: textarea - id: impact_context - attributes: - label: Impact Context - description: If a nice-to-have / blocker, supplement how does this Issue affect the project. - - type: dropdown - id: workaround - attributes: - label: Workaround - description: Is there a workaround for this Issue? - options: - - "Yes" - - type: textarea - id: workaround_description - attributes: - label: Workaround Description - description: If yes, supplement how could the Issue be worked around. - - type: textarea - id: additional - attributes: - label: Additional Context - description: Supplement further information if applicable. - - type: markdown - attributes: - value: | - ## Pull Request - - type: dropdown - id: pr-preference - attributes: - label: Would you like to submit a PR for this Issue? - description: Fellow contributors are happy to provide support where applicable. - multiple: false - options: - - "Maybe" - - "Yes" - - type: textarea - id: pr-support - attributes: - label: Support Needs - description: Support from other contributors you are looking for to create a PR for this Issue. diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index f6011b705e5..bd70c8fef2c 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -48,6 +48,7 @@ dependencies = [ "brillig_vm", "indexmap 1.9.3", "num-bigint", + "proptest", "serde", "thiserror", "tracing", diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs index 5d749e709b3..00d0933a3aa 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs @@ -377,11 +377,13 @@ mod tests { output: Witness(3), }) } + fn range_opcode() -> Opcode { Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { input: FunctionInput::witness(Witness(1), 8), }) } + fn keccakf1600_opcode() -> Opcode { let inputs: Box<[FunctionInput; 25]> = Box::new(std::array::from_fn(|i| FunctionInput::witness(Witness(i as u32 + 1), 8))); @@ -389,6 +391,7 @@ mod tests { Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Keccakf1600 { inputs, outputs }) } + fn schnorr_verify_opcode() -> Opcode { let public_key_x = FunctionInput::witness(Witness(1), FieldElement::max_num_bits()); let public_key_y = FunctionInput::witness(Witness(2), FieldElement::max_num_bits()); diff --git a/noir/noir-repo/acvm-repo/acir/src/native_types/expression/mod.rs b/noir/noir-repo/acvm-repo/acir/src/native_types/expression/mod.rs index 1feda5703c8..2bbbc39d0ca 100644 --- a/noir/noir-repo/acvm-repo/acir/src/native_types/expression/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/native_types/expression/mod.rs @@ -273,6 +273,60 @@ impl Expression { Expression { mul_terms, linear_combinations, q_c } } + + /// Determine the width of this expression. + /// The width meaning the number of unique witnesses needed for this expression. + pub fn width(&self) -> usize { + let mut width = 0; + + for mul_term in &self.mul_terms { + // The coefficient should be non-zero, as this method is ran after the compiler removes all zero coefficient terms + assert_ne!(mul_term.0, F::zero()); + + let mut found_x = false; + let mut found_y = false; + + for term in self.linear_combinations.iter() { + let witness = &term.1; + let x = &mul_term.1; + let y = &mul_term.2; + if witness == x { + found_x = true; + }; + if witness == y { + found_y = true; + }; + if found_x & found_y { + break; + } + } + + // If the multiplication is a squaring then we must assign the two witnesses to separate wires and so we + // can never get a zero contribution to the width. + let multiplication_is_squaring = mul_term.1 == mul_term.2; + + let mul_term_width_contribution = if !multiplication_is_squaring && (found_x & found_y) + { + // Both witnesses involved in the multiplication exist elsewhere in the expression. + // They both do not contribute to the width of the expression as this would be double-counting + // due to their appearance in the linear terms. + 0 + } else if found_x || found_y { + // One of the witnesses involved in the multiplication exists elsewhere in the expression. + // The multiplication then only contributes 1 new witness to the width. + 1 + } else { + // Worst case scenario, the multiplication is using completely unique witnesses so has a contribution of 2. + 2 + }; + + width += mul_term_width_contribution; + } + + width += self.linear_combinations.len(); + + width + } } impl From for Expression { diff --git a/noir/noir-repo/acvm-repo/acvm/Cargo.toml b/noir/noir-repo/acvm-repo/acvm/Cargo.toml index 5b6397a1011..4cda53de241 100644 --- a/noir/noir-repo/acvm-repo/acvm/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acvm/Cargo.toml @@ -38,3 +38,4 @@ bls12_381 = [ [dev-dependencies] ark-bls12-381 = { version = "^0.4.0", default-features = false, features = ["curve"] } +proptest.workspace = true diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/csat.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/csat.rs index 19cc18ca7f3..f258e0a8818 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/csat.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/csat.rs @@ -415,71 +415,8 @@ fn fits_in_one_identity(expr: &Expression, width: usize) -> boo if expr.mul_terms.len() > 1 { return false; }; - // A Polynomial with more terms than fan-in cannot fit within a single opcode - if expr.linear_combinations.len() > width { - return false; - } - - // A polynomial with no mul term and a fan-in that fits inside of the width can fit into a single opcode - if expr.mul_terms.is_empty() { - return true; - } - - // A polynomial with width-2 fan-in terms and a single non-zero mul term can fit into one opcode - // Example: Axy + Dz . Notice, that the mul term places a constraint on the first two terms, but not the last term - // XXX: This would change if our arithmetic polynomial equation was changed to Axyz for example, but for now it is not. - if expr.linear_combinations.len() <= (width - 2) { - return true; - } - - // We now know that we have a single mul term. We also know that the mul term must match up with at least one of the other terms - // A polynomial whose mul terms are non zero which do not match up with two terms in the fan-in cannot fit into one opcode - // An example of this is: Axy + Bx + Cy + ... - // Notice how the bivariate monomial xy has two univariate monomials with their respective coefficients - // XXX: note that if x or y is zero, then we could apply a further optimization, but this would be done in another algorithm. - // It would be the same as when we have zero coefficients - Can only work if wire is constrained to be zero publicly - let mul_term = &expr.mul_terms[0]; - - // The coefficient should be non-zero, as this method is ran after the compiler removes all zero coefficient terms - assert_ne!(mul_term.0, F::zero()); - - let mut found_x = false; - let mut found_y = false; - - for term in expr.linear_combinations.iter() { - let witness = &term.1; - let x = &mul_term.1; - let y = &mul_term.2; - if witness == x { - found_x = true; - }; - if witness == y { - found_y = true; - }; - if found_x & found_y { - break; - } - } - - // If the multiplication is a squaring then we must assign the two witnesses to separate wires and so we - // can never get a zero contribution to the width. - let multiplication_is_squaring = mul_term.1 == mul_term.2; - - let mul_term_width_contribution = if !multiplication_is_squaring && (found_x & found_y) { - // Both witnesses involved in the multiplication exist elsewhere in the expression. - // They both do not contribute to the width of the expression as this would be double-counting - // due to their appearance in the linear terms. - 0 - } else if found_x || found_y { - // One of the witnesses involved in the multiplication exists elsewhere in the expression. - // The multiplication then only contributes 1 new witness to the width. - 1 - } else { - // Worst case scenario, the multiplication is using completely unique witnesses so has a contribution of 2. - 2 - }; - mul_term_width_contribution + expr.linear_combinations.len() <= width + expr.width() <= width } #[cfg(test)] diff --git a/noir/noir-repo/acvm-repo/acvm/tests/solver.proptest-regressions b/noir/noir-repo/acvm-repo/acvm/tests/solver.proptest-regressions new file mode 100644 index 00000000000..35627c1fbae --- /dev/null +++ b/noir/noir-repo/acvm-repo/acvm/tests/solver.proptest-regressions @@ -0,0 +1,13 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc e4dd0e141df173f5dfdfb186bba4154247ec284b71d8f294fa3282da953a0e92 # shrinks to x = 0, y = 1 +cc 419ed6fdf1bf1f2513889c42ec86c665c9d0500ceb075cbbd07f72444dbd78c6 # shrinks to x = 266672725 +cc 0810fc9e126b56cf0a0ddb25e0dc498fa3b2f1980951550403479fc01c209833 # shrinks to modulus = [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48], zero_or_ones_constant = false, use_constant = false +cc 735ee9beb1a1dbb82ded6f30e544d7dfde149957e5d45a8c96fc65a690b6b71c # shrinks to (xs, modulus) = ([(0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (49, false)], [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48]) +cc ca81bc11114a2a2b34021f44ecc1e10cb018e35021ef4d728e07a6791dad38d6 # shrinks to (xs, modulus) = ([(0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (49, false)], [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48]) +cc 6c1d571a0111e6b4c244dc16da122ebab361e77b71db7770d638076ab21a717b # shrinks to (xs, modulus) = ([(0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (49, false)], [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48]) +cc ccb7061ab6b85e2554d00bf03d74204977ed7a4109d7e2d5c6b5aaa2179cfaf9 # shrinks to (xs, modulus) = ([(0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (0, false), (49, false)], [71, 253, 124, 216, 22, 140, 32, 60, 141, 202, 113, 104, 145, 106, 129, 151, 93, 88, 129, 129, 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48]) diff --git a/noir/noir-repo/acvm-repo/acvm/tests/solver.rs b/noir/noir-repo/acvm-repo/acvm/tests/solver.rs index e55dbb73ae1..279b0444609 100644 --- a/noir/noir-repo/acvm-repo/acvm/tests/solver.rs +++ b/noir/noir-repo/acvm-repo/acvm/tests/solver.rs @@ -5,7 +5,7 @@ use acir::{ brillig::{BinaryFieldOp, HeapArray, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray}, circuit::{ brillig::{BrilligBytecode, BrilligInputs, BrilligOutputs}, - opcodes::{BlockId, BlockType, MemOp}, + opcodes::{BlackBoxFuncCall, BlockId, BlockType, FunctionInput, MemOp}, Opcode, OpcodeLocation, }, native_types::{Expression, Witness, WitnessMap}, @@ -16,6 +16,10 @@ use acvm::pwg::{ACVMStatus, ErrorLocation, ForeignCallWaitInfo, OpcodeResolution use acvm_blackbox_solver::StubbedBlackBoxSolver; use brillig_vm::brillig::HeapValueType; +use proptest::arbitrary::any; +use proptest::prelude::*; +use proptest::result::maybe_ok; + // Reenable these test cases once we move the brillig implementation of inversion down into the acvm stdlib. #[test] @@ -722,3 +726,187 @@ fn memory_operations() { assert_eq!(witness_map[&Witness(8)], FieldElement::from(6u128)); } + +// Solve the given BlackBoxFuncCall with witnesses: 1, 2 as x, y, resp. +#[cfg(test)] +fn solve_blackbox_func_call( + blackbox_func_call: impl Fn( + Option, + Option, + ) -> BlackBoxFuncCall, + x: (FieldElement, bool), // if false, use a Witness + y: (FieldElement, bool), // if false, use a Witness +) -> FieldElement { + let (x, x_constant) = x; + let (y, y_constant) = y; + + let initial_witness = WitnessMap::from(BTreeMap::from_iter([(Witness(1), x), (Witness(2), y)])); + + let mut lhs = None; + if x_constant { + lhs = Some(x); + } + + let mut rhs = None; + if y_constant { + rhs = Some(y); + } + + let op = Opcode::BlackBoxFuncCall(blackbox_func_call(lhs, rhs)); + let opcodes = vec![op]; + let unconstrained_functions = vec![]; + let mut acvm = + ACVM::new(&StubbedBlackBoxSolver, &opcodes, initial_witness, &unconstrained_functions, &[]); + let solver_status = acvm.solve(); + assert_eq!(solver_status, ACVMStatus::Solved); + let witness_map = acvm.finalize(); + + witness_map[&Witness(3)] +} + +fn function_input_from_option( + witness: Witness, + opt_constant: Option, +) -> FunctionInput { + opt_constant + .map(|constant| FunctionInput::constant(constant, FieldElement::max_num_bits())) + .unwrap_or(FunctionInput::witness(witness, FieldElement::max_num_bits())) +} + +fn and_op(x: Option, y: Option) -> BlackBoxFuncCall { + let lhs = function_input_from_option(Witness(1), x); + let rhs = function_input_from_option(Witness(2), y); + BlackBoxFuncCall::AND { lhs, rhs, output: Witness(3) } +} + +fn xor_op(x: Option, y: Option) -> BlackBoxFuncCall { + let lhs = function_input_from_option(Witness(1), x); + let rhs = function_input_from_option(Witness(2), y); + BlackBoxFuncCall::XOR { lhs, rhs, output: Witness(3) } +} + +fn prop_assert_commutative( + op: impl Fn(Option, Option) -> BlackBoxFuncCall, + x: (FieldElement, bool), + y: (FieldElement, bool), +) -> (FieldElement, FieldElement) { + (solve_blackbox_func_call(&op, x, y), solve_blackbox_func_call(&op, y, x)) +} + +fn prop_assert_associative( + op: impl Fn(Option, Option) -> BlackBoxFuncCall, + x: (FieldElement, bool), + y: (FieldElement, bool), + z: (FieldElement, bool), + use_constant_xy: bool, + use_constant_yz: bool, +) -> (FieldElement, FieldElement) { + let f_xy = (solve_blackbox_func_call(&op, x, y), use_constant_xy); + let f_f_xy_z = solve_blackbox_func_call(&op, f_xy, z); + + let f_yz = (solve_blackbox_func_call(&op, y, z), use_constant_yz); + let f_x_f_yz = solve_blackbox_func_call(&op, x, f_yz); + + (f_f_xy_z, f_x_f_yz) +} + +fn prop_assert_identity_l( + op: impl Fn(Option, Option) -> BlackBoxFuncCall, + op_identity: (FieldElement, bool), + x: (FieldElement, bool), +) -> (FieldElement, FieldElement) { + (solve_blackbox_func_call(op, op_identity, x), x.0) +} + +fn prop_assert_zero_l( + op: impl Fn(Option, Option) -> BlackBoxFuncCall, + op_zero: (FieldElement, bool), + x: (FieldElement, bool), +) -> (FieldElement, FieldElement) { + (solve_blackbox_func_call(op, op_zero, x), FieldElement::zero()) +} + +prop_compose! { + // Use both `u128` and hex proptest strategies + fn field_element() + (u128_or_hex in maybe_ok(any::(), "[0-9a-f]{64}"), + constant_input: bool) + -> (FieldElement, bool) + { + match u128_or_hex { + Ok(number) => (FieldElement::from(number), constant_input), + Err(hex) => (FieldElement::from_hex(&hex).expect("should accept any 32 byte hex string"), constant_input), + } + } +} + +fn field_element_ones() -> FieldElement { + let exponent: FieldElement = (253_u128).into(); + FieldElement::from(2u128).pow(&exponent) - FieldElement::one() +} + +proptest! { + + #[test] + fn and_commutative(x in field_element(), y in field_element()) { + let (lhs, rhs) = prop_assert_commutative(and_op, x, y); + prop_assert_eq!(lhs, rhs); + } + + #[test] + fn xor_commutative(x in field_element(), y in field_element()) { + let (lhs, rhs) = prop_assert_commutative(xor_op, x, y); + prop_assert_eq!(lhs, rhs); + } + + #[test] + fn and_associative(x in field_element(), y in field_element(), z in field_element(), use_constant_xy: bool, use_constant_yz: bool) { + let (lhs, rhs) = prop_assert_associative(and_op, x, y, z, use_constant_xy, use_constant_yz); + prop_assert_eq!(lhs, rhs); + } + + #[test] + // TODO(https://github.com/noir-lang/noir/issues/5638) + #[should_panic(expected = "assertion failed: `(left == right)`")] + fn xor_associative(x in field_element(), y in field_element(), z in field_element(), use_constant_xy: bool, use_constant_yz: bool) { + let (lhs, rhs) = prop_assert_associative(xor_op, x, y, z, use_constant_xy, use_constant_yz); + prop_assert_eq!(lhs, rhs); + } + + // test that AND(x, x) == x + #[test] + fn and_self_identity(x in field_element()) { + prop_assert_eq!(solve_blackbox_func_call(and_op, x, x), x.0); + } + + // test that XOR(x, x) == 0 + #[test] + fn xor_self_zero(x in field_element()) { + prop_assert_eq!(solve_blackbox_func_call(xor_op, x, x), FieldElement::zero()); + } + + #[test] + fn and_identity_l(x in field_element(), ones_constant: bool) { + let ones = (field_element_ones(), ones_constant); + let (lhs, rhs) = prop_assert_identity_l(and_op, ones, x); + if x <= ones { + prop_assert_eq!(lhs, rhs); + } else { + prop_assert!(lhs != rhs); + } + } + + #[test] + fn xor_identity_l(x in field_element(), zero_constant: bool) { + let zero = (FieldElement::zero(), zero_constant); + let (lhs, rhs) = prop_assert_identity_l(xor_op, zero, x); + prop_assert_eq!(lhs, rhs); + } + + #[test] + fn and_zero_l(x in field_element(), ones_constant: bool) { + let zero = (FieldElement::zero(), ones_constant); + let (lhs, rhs) = prop_assert_zero_l(and_op, zero, x); + prop_assert_eq!(lhs, rhs); + } +} diff --git a/noir/noir-repo/acvm-repo/acvm_js/build.sh b/noir/noir-repo/acvm-repo/acvm_js/build.sh index c07d2d8a4c1..16fb26e55db 100755 --- a/noir/noir-repo/acvm-repo/acvm_js/build.sh +++ b/noir/noir-repo/acvm-repo/acvm_js/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -#require_command wasm-opt +require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/noir/noir-repo/aztec_macros/src/lib.rs b/noir/noir-repo/aztec_macros/src/lib.rs index 580a132aa5a..ec1d395725d 100644 --- a/noir/noir-repo/aztec_macros/src/lib.rs +++ b/noir/noir-repo/aztec_macros/src/lib.rs @@ -62,18 +62,25 @@ fn transform( file_id: FileId, context: &HirContext, ) -> Result { + let empty_spans = context.def_interner.is_in_lsp_mode(); + // Usage -> mut ast -> aztec_library::transform(&mut ast) // Covers all functions in the ast for submodule in ast.submodules.iter_mut().filter(|submodule| submodule.is_contract) { - if transform_module(&file_id, &mut submodule.contents, submodule.name.0.contents.as_str()) - .map_err(|err| (err.into(), file_id))? + if transform_module( + &file_id, + &mut submodule.contents, + submodule.name.0.contents.as_str(), + empty_spans, + ) + .map_err(|err| (err.into(), file_id))? { check_for_aztec_dependency(crate_id, context)?; } } - generate_event_impls(&mut ast).map_err(|err| (err.into(), file_id))?; - generate_note_interface_impl(&mut ast).map_err(|err| (err.into(), file_id))?; + generate_event_impls(&mut ast, empty_spans).map_err(|err| (err.into(), file_id))?; + generate_note_interface_impl(&mut ast, empty_spans).map_err(|err| (err.into(), file_id))?; Ok(ast) } @@ -85,6 +92,7 @@ fn transform_module( file_id: &FileId, module: &mut SortedModule, module_name: &str, + empty_spans: bool, ) -> Result { let mut has_transformed_module = false; @@ -99,7 +107,7 @@ fn transform_module( if !check_for_storage_implementation(module, storage_struct_name) { generate_storage_implementation(module, storage_struct_name)?; } - generate_storage_layout(module, storage_struct_name.clone(), module_name)?; + generate_storage_layout(module, storage_struct_name.clone(), module_name, empty_spans)?; } let has_initializer = module.functions.iter().any(|func| { @@ -144,7 +152,7 @@ fn transform_module( let stub_src = stub_function(fn_type, func, is_static); stubs.push((stub_src, Location { file: *file_id, span: func.name_ident().span() })); - export_fn_abi(&mut module.types, func)?; + export_fn_abi(&mut module.types, func, empty_spans)?; transform_function( fn_type, func, @@ -200,7 +208,7 @@ fn transform_module( }); } - generate_contract_interface(module, module_name, &stubs, storage_defined)?; + generate_contract_interface(module, module_name, &stubs, storage_defined, empty_spans)?; } Ok(has_transformed_module) diff --git a/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs b/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs index 40fde39a06f..8983266dab9 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs @@ -3,9 +3,10 @@ use noirc_frontend::ast::{FunctionReturnType, NoirFunction, UnresolvedTypeData}; use noirc_frontend::{ graph::CrateId, macros_api::{FileId, HirContext}, - parse_program, Type, + Type, }; +use crate::utils::parse_utils::parse_program; use crate::utils::{ errors::AztecMacroError, hir_utils::{ @@ -125,8 +126,12 @@ pub fn inject_compute_note_hash_and_optionally_a_nullifier( notes_and_lengths.iter().map(|(note_type, _)| note_type.clone()).collect::>(); // We can now generate a version of compute_note_hash_and_optionally_a_nullifier tailored for the contract in this crate. - let func = - generate_compute_note_hash_and_optionally_a_nullifier(¬e_types, max_note_length); + let empty_spans = context.def_interner.is_in_lsp_mode(); + let func = generate_compute_note_hash_and_optionally_a_nullifier( + ¬e_types, + max_note_length, + empty_spans, + ); // And inject the newly created function into the contract. @@ -149,11 +154,12 @@ pub fn inject_compute_note_hash_and_optionally_a_nullifier( fn generate_compute_note_hash_and_optionally_a_nullifier( note_types: &[String], max_note_length: u128, + empty_spans: bool, ) -> NoirFunction { let function_source = generate_compute_note_hash_and_optionally_a_nullifier_source(note_types, max_note_length); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors.clone()); } diff --git a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs index 56107de77c5..dd3ec7f6a75 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs @@ -5,13 +5,13 @@ use noirc_frontend::ast::{Ident, NoirFunction, UnresolvedTypeData}; use noirc_frontend::{ graph::CrateId, macros_api::{FieldElement, FileId, HirContext, HirExpression, HirLiteral, HirStatement}, - parse_program, parser::SortedModule, Type, }; use tiny_keccak::{Hasher, Keccak}; +use crate::utils::parse_utils::parse_program; use crate::utils::{ errors::AztecMacroError, hir_utils::{collect_crate_structs, get_contract_module_data, signature_of_type}, @@ -203,6 +203,7 @@ pub fn generate_contract_interface( module_name: &str, stubs: &[(String, Location)], has_storage_layout: bool, + empty_spans: bool, ) -> Result<(), AztecMacroError> { let storage_layout_getter = format!( "#[contract_library_method] @@ -253,7 +254,7 @@ pub fn generate_contract_interface( if has_storage_layout { format!("#[contract_library_method]\n{}", storage_layout_getter) } else { "".to_string() } ); - let (contract_interface_ast, errors) = parse_program(&contract_interface); + let (contract_interface_ast, errors) = parse_program(&contract_interface, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotGenerateContractInterface { secondary_message: Some("Failed to parse Noir macro code during contract interface generation. This is either a bug in the compiler or the Noir macro code".to_string()), }); diff --git a/noir/noir-repo/aztec_macros/src/transforms/events.rs b/noir/noir-repo/aztec_macros/src/transforms/events.rs index ecfca40189d..8b71bd77ae6 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/events.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/events.rs @@ -4,16 +4,19 @@ use noirc_frontend::token::SecondaryAttribute; use noirc_frontend::{ graph::CrateId, macros_api::{FileId, HirContext}, - parse_program, parser::SortedModule, }; use crate::utils::hir_utils::collect_crate_structs; +use crate::utils::parse_utils::parse_program; use crate::utils::{ast_utils::is_custom_attribute, errors::AztecMacroError}; // Automatic implementation of most of the methods in the EventInterface trait, guiding the user with meaningful error messages in case some // methods must be implemented manually. -pub fn generate_event_impls(module: &mut SortedModule) -> Result<(), AztecMacroError> { +pub fn generate_event_impls( + module: &mut SortedModule, + empty_spans: bool, +) -> Result<(), AztecMacroError> { // Find structs annotated with #[aztec(event)] // Why doesn't this work ? Events are not tagged and do not appear, it seems only going through the submodule works // let annotated_event_structs = module @@ -56,28 +59,39 @@ pub fn generate_event_impls(module: &mut SortedModule) -> Result<(), AztecMacroE )); } - let mut event_interface_trait_impl = - generate_trait_impl_stub_event_interface(event_type.as_str(), event_byte_len)?; + let mut event_interface_trait_impl = generate_trait_impl_stub_event_interface( + event_type.as_str(), + event_byte_len, + empty_spans, + )?; event_interface_trait_impl.items.push(TraitImplItem::Function( - generate_fn_get_event_type_id(event_type.as_str(), event_len)?, + generate_fn_get_event_type_id(event_type.as_str(), event_len, empty_spans)?, )); event_interface_trait_impl.items.push(TraitImplItem::Function( - generate_fn_private_to_be_bytes(event_type.as_str(), event_byte_len)?, + generate_fn_private_to_be_bytes(event_type.as_str(), event_byte_len, empty_spans)?, )); event_interface_trait_impl.items.push(TraitImplItem::Function( - generate_fn_to_be_bytes(event_type.as_str(), event_byte_len)?, + generate_fn_to_be_bytes(event_type.as_str(), event_byte_len, empty_spans)?, )); event_interface_trait_impl .items - .push(TraitImplItem::Function(generate_fn_emit(event_type.as_str())?)); + .push(TraitImplItem::Function(generate_fn_emit(event_type.as_str(), empty_spans)?)); submodule.contents.trait_impls.push(event_interface_trait_impl); - let serialize_trait_impl = - generate_trait_impl_serialize(event_type.as_str(), event_len, &event_fields)?; + let serialize_trait_impl = generate_trait_impl_serialize( + event_type.as_str(), + event_len, + &event_fields, + empty_spans, + )?; submodule.contents.trait_impls.push(serialize_trait_impl); - let deserialize_trait_impl = - generate_trait_impl_deserialize(event_type.as_str(), event_len, &event_fields)?; + let deserialize_trait_impl = generate_trait_impl_deserialize( + event_type.as_str(), + event_len, + &event_fields, + empty_spans, + )?; submodule.contents.trait_impls.push(deserialize_trait_impl); } } @@ -88,6 +102,7 @@ pub fn generate_event_impls(module: &mut SortedModule) -> Result<(), AztecMacroE fn generate_trait_impl_stub_event_interface( event_type: &str, byte_length: u32, + empty_spans: bool, ) -> Result { let byte_length_without_randomness = byte_length - 32; let trait_impl_source = format!( @@ -98,7 +113,7 @@ impl dep::aztec::event::event_interface::EventInterface<{byte_length}, {byte_len ) .to_string(); - let (parsed_ast, errors) = parse_program(&trait_impl_source); + let (parsed_ast, errors) = parse_program(&trait_impl_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementEventInterface { @@ -116,6 +131,7 @@ fn generate_trait_impl_serialize( event_type: &str, event_len: u32, event_fields: &[(String, String)], + empty_spans: bool, ) -> Result { let field_names = event_fields .iter() @@ -143,7 +159,7 @@ fn generate_trait_impl_serialize( ) .to_string(); - let (parsed_ast, errors) = parse_program(&trait_impl_source); + let (parsed_ast, errors) = parse_program(&trait_impl_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementEventInterface { @@ -161,6 +177,7 @@ fn generate_trait_impl_deserialize( event_type: &str, event_len: u32, event_fields: &[(String, String)], + empty_spans: bool, ) -> Result { let field_names: Vec = event_fields .iter() @@ -189,7 +206,7 @@ fn generate_trait_impl_deserialize( ) .to_string(); - let (parsed_ast, errors) = parse_program(&trait_impl_source); + let (parsed_ast, errors) = parse_program(&trait_impl_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementEventInterface { @@ -206,6 +223,7 @@ fn generate_trait_impl_deserialize( fn generate_fn_get_event_type_id( event_type: &str, field_length: u32, + empty_spans: bool, ) -> Result { let from_signature_input = std::iter::repeat("Field").take(field_length as usize).collect::>().join(","); @@ -218,7 +236,7 @@ fn generate_fn_get_event_type_id( ) .to_string(); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementEventInterface { @@ -235,6 +253,7 @@ fn generate_fn_get_event_type_id( fn generate_fn_private_to_be_bytes( event_type: &str, byte_length: u32, + empty_spans: bool, ) -> Result { let function_source = format!( " @@ -264,7 +283,7 @@ fn generate_fn_private_to_be_bytes( ) .to_string(); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementEventInterface { @@ -281,6 +300,7 @@ fn generate_fn_private_to_be_bytes( fn generate_fn_to_be_bytes( event_type: &str, byte_length: u32, + empty_spans: bool, ) -> Result { let byte_length_without_randomness = byte_length - 32; let function_source = format!( @@ -308,7 +328,7 @@ fn generate_fn_to_be_bytes( ") .to_string(); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementEventInterface { @@ -322,7 +342,7 @@ fn generate_fn_to_be_bytes( Ok(noir_fn) } -fn generate_fn_emit(event_type: &str) -> Result { +fn generate_fn_emit(event_type: &str, empty_spans: bool) -> Result { let function_source = format!( " fn emit(self: {event_type}, _emit: fn[Env](Self) -> ()) {{ @@ -332,7 +352,7 @@ fn generate_fn_emit(event_type: &str) -> Result { ) .to_string(); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementEventInterface { diff --git a/noir/noir-repo/aztec_macros/src/transforms/functions.rs b/noir/noir-repo/aztec_macros/src/transforms/functions.rs index 4d8b6ef7cdf..cd3fdd1fc62 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/functions.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/functions.rs @@ -8,16 +8,18 @@ use noirc_frontend::ast::{ UnresolvedTypeData, Visibility, }; -use noirc_frontend::{macros_api::FieldElement, parse_program}; +use noirc_frontend::macros_api::FieldElement; use crate::utils::ast_utils::member_access; +use crate::utils::parse_utils::parse_program; use crate::{ chained_dep, chained_path, utils::{ ast_utils::{ assignment, assignment_with_type, call, cast, expression, ident, ident_path, index_array, make_eq, make_statement, make_type, method_call, mutable_assignment, - mutable_reference, path, return_type, variable, variable_ident, variable_path, + mutable_reference, path, path_segment, return_type, variable, variable_ident, + variable_path, }, errors::AztecMacroError, }, @@ -131,6 +133,7 @@ pub fn transform_function( pub fn export_fn_abi( types: &mut Vec, func: &NoirFunction, + empty_spans: bool, ) -> Result<(), AztecMacroError> { let mut parameters_struct_source: Option<&str> = None; @@ -197,7 +200,7 @@ pub fn export_fn_abi( program.push_str(&export_struct_source); - let (ast, errors) = parse_program(&program); + let (ast, errors) = parse_program(&program, empty_spans); if !errors.is_empty() { return Err(AztecMacroError::CouldNotExportFunctionAbi { span: None, @@ -722,8 +725,8 @@ fn add_struct_to_hasher(identifier: &Ident, hasher_name: &str) -> Statement { fn str_to_bytes(identifier: &Ident) -> (Statement, Ident) { // let identifier_as_bytes = identifier.as_bytes(); let var = variable_ident(identifier.clone()); - let contents = if let ExpressionKind::Variable(p, _) = &var.kind { - p.segments.first().cloned().unwrap_or_else(|| panic!("No segments")).0.contents + let contents = if let ExpressionKind::Variable(p) = &var.kind { + p.first_name() } else { panic!("Unexpected identifier type") }; diff --git a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs index 3233e12ab73..6fccded45ef 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs @@ -6,7 +6,6 @@ use noirc_frontend::ast::{ use noirc_frontend::{ graph::CrateId, macros_api::{FileId, HirContext, HirExpression, HirLiteral, HirStatement}, - parse_program, parser::SortedModule, Type, }; @@ -16,11 +15,13 @@ use regex::Regex; // TODO(#7165): nuke the following dependency from here and Cargo.toml use tiny_keccak::{Hasher, Keccak}; +use crate::utils::parse_utils::parse_program; use crate::{ chained_dep, utils::{ ast_utils::{ check_trait_method_implemented, ident, ident_path, is_custom_attribute, make_type, + path_segment, }, errors::AztecMacroError, hir_utils::{fetch_notes, get_contract_module_data, inject_global}, @@ -29,7 +30,10 @@ use crate::{ // Automatic implementation of most of the methods in the NoteInterface trait, guiding the user with meaningful error messages in case some // methods must be implemented manually. -pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), AztecMacroError> { +pub fn generate_note_interface_impl( + module: &mut SortedModule, + empty_spans: bool, +) -> Result<(), AztecMacroError> { // Find structs annotated with #[aztec(note)] let annotated_note_structs = module .types @@ -45,8 +49,8 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt .iter_mut() .find(|trait_impl| { if let UnresolvedTypeData::Named(struct_path, _, _) = &trait_impl.object_type.typ { - struct_path.last_segment() == note_struct.name - && trait_impl.trait_name.last_segment().0.contents == "NoteInterface" + struct_path.last_ident() == note_struct.name + && trait_impl.trait_name.last_name() == "NoteInterface" } else { false } @@ -58,10 +62,11 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt note_struct.name.0.contents )), })?; - let note_interface_impl_span: Option = trait_impl.object_type.span; + let note_interface_impl_span: Option = + if empty_spans { None } else { trait_impl.object_type.span }; // Look for the note struct implementation, generate a default one if it doesn't exist (in order to append methods to it) let existing_impl = module.impls.iter_mut().find(|r#impl| match &r#impl.object_type.typ { - UnresolvedTypeData::Named(path, _, _) => path.last_segment().eq(¬e_struct.name), + UnresolvedTypeData::Named(path, _, _) => path.last_ident().eq(¬e_struct.name), _ => false, }); let note_impl = if let Some(note_impl) = existing_impl { @@ -73,7 +78,6 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt generics: vec![], methods: vec![], where_clause: vec![], - is_comptime: false, }; module.impls.push(default_impl.clone()); module.impls.last_mut().unwrap() @@ -85,9 +89,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt .trait_generics .iter() .map(|gen| match gen.typ.clone() { - UnresolvedTypeData::Named(path, _, _) => { - Ok(path.last_segment().0.contents.to_string()) - } + UnresolvedTypeData::Named(path, _, _) => Ok(path.last_name().to_string()), UnresolvedTypeData::Expression(UnresolvedTypeExpression::Constant(val, _)) => { Ok(val.to_string()) } @@ -108,9 +110,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt // Automatically inject the header field if it's not present let (header_field_name, _) = if let Some(existing_header) = note_struct.fields.iter().find(|(_, field_type)| match &field_type.typ { - UnresolvedTypeData::Named(path, _, _) => { - path.last_segment().0.contents == "NoteHeader" - } + UnresolvedTypeData::Named(path, _, _) => path.last_name() == "NoteHeader", _ => false, }) { existing_header.clone() @@ -144,6 +144,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt ¬e_serialized_len, &header_field_name.0.contents, note_interface_impl_span, + empty_spans, )?; trait_impl.items.push(TraitImplItem::Function(note_serialize_content_fn)); @@ -153,6 +154,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt ¬e_serialized_len, &header_field_name.0.contents, note_interface_impl_span, + empty_spans, )?; trait_impl.items.push(TraitImplItem::Function(note_deserialize_content_fn)); @@ -161,6 +163,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt ¬e_fields, &header_field_name.0.contents, note_interface_impl_span, + empty_spans, )?; structs_to_inject.push(note_properties_struct); let note_properties_fn = generate_note_properties_fn( @@ -168,6 +171,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt ¬e_fields, &header_field_name.0.contents, note_interface_impl_span, + empty_spans, )?; note_impl.methods.push((note_properties_fn, note_impl.type_span)); } @@ -177,6 +181,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt ¬e_type, &header_field_name.0.contents, note_interface_impl_span, + empty_spans, )?; trait_impl.items.push(TraitImplItem::Function(get_header_fn)); } @@ -185,6 +190,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt ¬e_type, &header_field_name.0.contents, note_interface_impl_span, + empty_spans, )?; trait_impl.items.push(TraitImplItem::Function(set_header_fn)); } @@ -192,13 +198,16 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt if !check_trait_method_implemented(trait_impl, "get_note_type_id") { let note_type_id = compute_note_type_id(¬e_type); let get_note_type_id_fn = - generate_get_note_type_id(note_type_id, note_interface_impl_span)?; + generate_get_note_type_id(note_type_id, note_interface_impl_span, empty_spans)?; trait_impl.items.push(TraitImplItem::Function(get_note_type_id_fn)); } if !check_trait_method_implemented(trait_impl, "compute_note_content_hash") { - let compute_note_content_hash_fn = - generate_compute_note_content_hash(¬e_type, note_interface_impl_span)?; + let compute_note_content_hash_fn = generate_compute_note_content_hash( + ¬e_type, + note_interface_impl_span, + empty_spans, + )?; trait_impl.items.push(TraitImplItem::Function(compute_note_content_hash_fn)); } @@ -208,6 +217,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt note_bytes_len.as_str(), note_serialized_len.as_str(), note_interface_impl_span, + empty_spans, )?; trait_impl.items.push(TraitImplItem::Function(to_be_bytes_fn)); } @@ -222,6 +232,7 @@ fn generate_note_to_be_bytes( byte_length: &str, serialized_length: &str, impl_span: Option, + empty_spans: bool, ) -> Result { let function_source = format!( " @@ -252,7 +263,7 @@ fn generate_note_to_be_bytes( ) .to_string(); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -263,7 +274,7 @@ fn generate_note_to_be_bytes( let mut function_ast = function_ast.into_sorted(); let mut noir_fn = function_ast.functions.remove(0); - noir_fn.def.span = impl_span.unwrap(); + noir_fn.def.span = impl_span.unwrap_or_default(); noir_fn.def.visibility = ItemVisibility::Public; Ok(noir_fn) } @@ -272,6 +283,7 @@ fn generate_note_get_header( note_type: &String, note_header_field_name: &String, impl_span: Option, + empty_spans: bool, ) -> Result { let function_source = format!( " @@ -283,7 +295,7 @@ fn generate_note_get_header( ) .to_string(); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -294,7 +306,7 @@ fn generate_note_get_header( let mut function_ast = function_ast.into_sorted(); let mut noir_fn = function_ast.functions.remove(0); - noir_fn.def.span = impl_span.unwrap(); + noir_fn.def.span = impl_span.unwrap_or_default(); noir_fn.def.visibility = ItemVisibility::Public; Ok(noir_fn) } @@ -303,6 +315,7 @@ fn generate_note_set_header( note_type: &String, note_header_field_name: &String, impl_span: Option, + empty_spans: bool, ) -> Result { let function_source = format!( " @@ -313,7 +326,7 @@ fn generate_note_set_header( note_type, note_header_field_name ); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -324,7 +337,7 @@ fn generate_note_set_header( let mut function_ast = function_ast.into_sorted(); let mut noir_fn = function_ast.functions.remove(0); - noir_fn.def.span = impl_span.unwrap(); + noir_fn.def.span = impl_span.unwrap_or_default(); noir_fn.def.visibility = ItemVisibility::Public; Ok(noir_fn) } @@ -334,6 +347,7 @@ fn generate_note_set_header( fn generate_get_note_type_id( note_type_id: u32, impl_span: Option, + empty_spans: bool, ) -> Result { // TODO(#7165): replace {} with dep::aztec::protocol_types::abis::note_selector::compute_note_selector(\"{}\") in the function source below let function_source = format!( @@ -346,7 +360,7 @@ fn generate_get_note_type_id( ) .to_string(); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -357,7 +371,7 @@ fn generate_get_note_type_id( let mut function_ast = function_ast.into_sorted(); let mut noir_fn = function_ast.functions.remove(0); - noir_fn.def.span = impl_span.unwrap(); + noir_fn.def.span = impl_span.unwrap_or_default(); noir_fn.def.visibility = ItemVisibility::Public; Ok(noir_fn) } @@ -376,11 +390,12 @@ fn generate_note_properties_struct( note_fields: &[(String, String)], note_header_field_name: &String, impl_span: Option, + empty_spans: bool, ) -> Result { let struct_source = generate_note_properties_struct_source(note_type, note_fields, note_header_field_name); - let (struct_ast, errors) = parse_program(&struct_source); + let (struct_ast, errors) = parse_program(&struct_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -409,6 +424,7 @@ fn generate_note_deserialize_content( note_serialize_len: &String, note_header_field_name: &String, impl_span: Option, + empty_spans: bool, ) -> Result { let function_source = generate_note_deserialize_content_source( note_type, @@ -417,7 +433,7 @@ fn generate_note_deserialize_content( note_header_field_name, ); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -428,7 +444,7 @@ fn generate_note_deserialize_content( let mut function_ast = function_ast.into_sorted(); let mut noir_fn = function_ast.functions.remove(0); - noir_fn.def.span = impl_span.unwrap(); + noir_fn.def.span = impl_span.unwrap_or_default(); noir_fn.def.visibility = ItemVisibility::Public; Ok(noir_fn) } @@ -446,6 +462,7 @@ fn generate_note_serialize_content( note_serialize_len: &String, note_header_field_name: &String, impl_span: Option, + empty_spans: bool, ) -> Result { let function_source = generate_note_serialize_content_source( note_type, @@ -454,7 +471,7 @@ fn generate_note_serialize_content( note_header_field_name, ); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -465,7 +482,7 @@ fn generate_note_serialize_content( let mut function_ast = function_ast.into_sorted(); let mut noir_fn = function_ast.functions.remove(0); - noir_fn.def.span = impl_span.unwrap(); + noir_fn.def.span = impl_span.unwrap_or_default(); noir_fn.def.visibility = ItemVisibility::Public; Ok(noir_fn) } @@ -476,10 +493,11 @@ fn generate_note_properties_fn( note_fields: &[(String, String)], note_header_field_name: &String, impl_span: Option, + empty_spans: bool, ) -> Result { let function_source = generate_note_properties_fn_source(note_type, note_fields, note_header_field_name); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -489,7 +507,7 @@ fn generate_note_properties_fn( } let mut function_ast = function_ast.into_sorted(); let mut noir_fn = function_ast.functions.remove(0); - noir_fn.def.span = impl_span.unwrap(); + noir_fn.def.span = impl_span.unwrap_or_default(); noir_fn.def.visibility = ItemVisibility::Public; Ok(noir_fn) } @@ -502,6 +520,7 @@ fn generate_note_properties_fn( fn generate_compute_note_content_hash( note_type: &String, impl_span: Option, + empty_spans: bool, ) -> Result { let function_source = format!( " @@ -511,7 +530,7 @@ fn generate_compute_note_content_hash( ", note_type ); - let (function_ast, errors) = parse_program(&function_source); + let (function_ast, errors) = parse_program(&function_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -521,7 +540,7 @@ fn generate_compute_note_content_hash( } let mut function_ast = function_ast.into_sorted(); let mut noir_fn = function_ast.functions.remove(0); - noir_fn.def.span = impl_span.unwrap(); + noir_fn.def.span = impl_span.unwrap_or_default(); noir_fn.def.visibility = ItemVisibility::Public; Ok(noir_fn) } @@ -529,6 +548,7 @@ fn generate_compute_note_content_hash( fn generate_note_exports_global( note_type: &str, note_type_id: &str, + empty_spans: bool, ) -> Result { let struct_source = format!( " @@ -541,7 +561,7 @@ fn generate_note_exports_global( ) .to_string(); - let (global_ast, errors) = parse_program(&struct_source); + let (global_ast, errors) = parse_program(&struct_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotImplementNoteInterface { @@ -783,9 +803,11 @@ pub fn inject_note_exports( file_id, )), }?; + let empty_spans = context.def_interner.is_in_lsp_mode(); let global = generate_note_exports_global( ¬e.borrow().name.0.contents, ¬e_type_id.to_hex(), + empty_spans, ) .map_err(|err| (err, file_id))?; diff --git a/noir/noir-repo/aztec_macros/src/transforms/storage.rs b/noir/noir-repo/aztec_macros/src/transforms/storage.rs index 1c6ef634070..dacea1a95e3 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/storage.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/storage.rs @@ -10,18 +10,18 @@ use noirc_frontend::{ FieldElement, FileId, HirContext, HirExpression, HirLiteral, HirStatement, NodeInterner, }, node_interner::TraitId, - parse_program, parser::SortedModule, token::SecondaryAttribute, Type, }; +use crate::utils::parse_utils::parse_program; use crate::{ chained_path, utils::{ ast_utils::{ call, expression, ident, ident_path, is_custom_attribute, lambda, make_statement, - make_type, pattern, return_type, variable, variable_path, + make_type, path_segment, pattern, return_type, variable, variable_path, }, errors::AztecMacroError, hir_utils::{ @@ -59,7 +59,7 @@ fn inject_context_in_storage_field(field: &mut UnresolvedType) -> Result<(), Azt vec![], false, ))); - match path.segments.last().unwrap().0.contents.as_str() { + match path.last_name() { "Map" => inject_context_in_storage_field(&mut generics[1]), _ => Ok(()), } @@ -106,9 +106,7 @@ pub fn check_for_storage_implementation( storage_struct_name: &String, ) -> bool { module.impls.iter().any(|r#impl| match &r#impl.object_type.typ { - UnresolvedTypeData::Named(path, _, _) => { - path.segments.last().is_some_and(|segment| segment.0.contents == *storage_struct_name) - } + UnresolvedTypeData::Named(path, _, _) => path.last_name() == *storage_struct_name, _ => false, }) } @@ -123,8 +121,8 @@ pub fn generate_storage_field_constructor( match typ { UnresolvedTypeData::Named(path, generics, _) => { let mut new_path = path.clone().to_owned(); - new_path.segments.push(ident("new")); - match path.segments.last().unwrap().0.contents.as_str() { + new_path.segments.push(path_segment("new")); + match path.last_name() { "Map" => Ok(call( variable_path(new_path), vec![ @@ -248,7 +246,6 @@ pub fn generate_storage_implementation( methods: vec![(init, Span::default())], where_clause: vec![], - is_comptime: false, }; module.impls.push(storage_impl); @@ -501,6 +498,7 @@ pub fn generate_storage_layout( module: &mut SortedModule, storage_struct_name: String, module_name: &str, + empty_spans: bool, ) -> Result<(), AztecMacroError> { let definition = module .types @@ -533,7 +531,7 @@ pub fn generate_storage_layout( storable_fields_impl.join(",\n") ); - let (struct_ast, errors) = parse_program(&storage_fields_source); + let (struct_ast, errors) = parse_program(&storage_fields_source, empty_spans); if !errors.is_empty() { dbg!(errors); return Err(AztecMacroError::CouldNotExportStorageLayout { diff --git a/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs b/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs index 4467c4bca4b..a74ec5b777a 100644 --- a/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs +++ b/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs @@ -2,8 +2,8 @@ use noirc_errors::{Span, Spanned}; use noirc_frontend::ast::{ BinaryOpKind, CallExpression, CastExpression, Expression, ExpressionKind, FunctionReturnType, Ident, IndexExpression, InfixExpression, Lambda, LetStatement, MemberAccessExpression, - MethodCallExpression, NoirTraitImpl, Path, Pattern, PrefixExpression, Statement, StatementKind, - TraitImplItem, UnaryOp, UnresolvedType, UnresolvedTypeData, + MethodCallExpression, NoirTraitImpl, Path, PathSegment, Pattern, PrefixExpression, Statement, + StatementKind, TraitImplItem, UnaryOp, UnresolvedType, UnresolvedTypeData, }; use noirc_frontend::token::SecondaryAttribute; @@ -18,6 +18,10 @@ pub fn ident_path(name: &str) -> Path { Path::from_ident(ident(name)) } +pub fn path_segment(name: &str) -> PathSegment { + PathSegment::from(ident(name)) +} + pub fn path(ident: Ident) -> Path { Path::from_ident(ident) } @@ -27,15 +31,15 @@ pub fn expression(kind: ExpressionKind) -> Expression { } pub fn variable(name: &str) -> Expression { - expression(ExpressionKind::Variable(ident_path(name), None)) + expression(ExpressionKind::Variable(ident_path(name))) } pub fn variable_ident(identifier: Ident) -> Expression { - expression(ExpressionKind::Variable(path(identifier), None)) + expression(ExpressionKind::Variable(path(identifier))) } pub fn variable_path(path: Path) -> Expression { - expression(ExpressionKind::Variable(path, None)) + expression(ExpressionKind::Variable(path)) } pub fn method_call( @@ -149,7 +153,7 @@ macro_rules! chained_path { { let mut base_path = ident_path($base); $( - base_path.segments.push(ident($tail)); + base_path.segments.push(path_segment($tail)); )* base_path } @@ -163,7 +167,7 @@ macro_rules! chained_dep { let mut base_path = ident_path($base); base_path.kind = PathKind::Plain; $( - base_path.segments.push(ident($tail)); + base_path.segments.push(path_segment($tail)); )* base_path } diff --git a/noir/noir-repo/aztec_macros/src/utils/hir_utils.rs b/noir/noir-repo/aztec_macros/src/utils/hir_utils.rs index 200ce3099cb..0a8ce371708 100644 --- a/noir/noir-repo/aztec_macros/src/utils/hir_utils.rs +++ b/noir/noir-repo/aztec_macros/src/utils/hir_utils.rs @@ -195,7 +195,7 @@ pub fn inject_fn( let trait_id = None; items.functions.push(UnresolvedFunctions { file_id, functions, trait_id, self_type: None }); - let mut errors = Elaborator::elaborate(context, *crate_id, items, None); + let mut errors = Elaborator::elaborate(context, *crate_id, items, None, false); errors.retain(|(error, _)| !CustomDiagnostic::from(error).is_warning()); if !errors.is_empty() { @@ -241,7 +241,7 @@ pub fn inject_global( let mut items = CollectedItems::default(); items.globals.push(UnresolvedGlobal { file_id, module_id, global_id, stmt_def: global }); - let _errors = Elaborator::elaborate(context, *crate_id, items, None); + let _errors = Elaborator::elaborate(context, *crate_id, items, None, false); } pub fn fully_qualified_note_path(context: &HirContext, note_id: StructId) -> Option { diff --git a/noir/noir-repo/aztec_macros/src/utils/mod.rs b/noir/noir-repo/aztec_macros/src/utils/mod.rs index c8914f83025..6809fe9f154 100644 --- a/noir/noir-repo/aztec_macros/src/utils/mod.rs +++ b/noir/noir-repo/aztec_macros/src/utils/mod.rs @@ -3,3 +3,4 @@ pub mod checks; pub mod constants; pub mod errors; pub mod hir_utils; +pub mod parse_utils; diff --git a/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs b/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs new file mode 100644 index 00000000000..a2c177026c4 --- /dev/null +++ b/noir/noir-repo/aztec_macros/src/utils/parse_utils.rs @@ -0,0 +1,534 @@ +use noirc_frontend::{ + ast::{ + ArrayLiteral, AssignStatement, BlockExpression, CallExpression, CastExpression, + ConstrainStatement, ConstructorExpression, Expression, ExpressionKind, ForLoopStatement, + ForRange, FunctionReturnType, Ident, IfExpression, IndexExpression, InfixExpression, + LValue, Lambda, LetStatement, Literal, MemberAccessExpression, MethodCallExpression, + ModuleDeclaration, NoirFunction, NoirStruct, NoirTrait, NoirTraitImpl, NoirTypeAlias, Path, + PathSegment, Pattern, PrefixExpression, Statement, StatementKind, TraitImplItem, TraitItem, + TypeImpl, UnresolvedGeneric, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, + UnresolvedTypeData, UnresolvedTypeExpression, UseTree, UseTreeKind, + }, + parser::{Item, ItemKind, ParsedSubModule, ParserError}, + ParsedModule, +}; + +/// Parses a program and will clear out (set them to a default) any spans in it if `empty_spans` is true. +/// We want to do this in code generated by macros when running in LSP mode so that the generated +/// code doesn't end up overlapping real code, messing with how inlay hints, hover, etc., work. +pub fn parse_program(source_program: &str, empty_spans: bool) -> (ParsedModule, Vec) { + let (mut parsed_program, errors) = noirc_frontend::parse_program(source_program); + if empty_spans { + empty_parsed_module(&mut parsed_program); + } + (parsed_program, errors) +} + +fn empty_parsed_module(parsed_module: &mut ParsedModule) { + for item in parsed_module.items.iter_mut() { + empty_item(item); + } +} + +fn empty_item(item: &mut Item) { + item.span = Default::default(); + + match &mut item.kind { + ItemKind::Function(noir_function) => empty_noir_function(noir_function), + ItemKind::Trait(noir_trait) => { + empty_noir_trait(noir_trait); + } + ItemKind::TraitImpl(noir_trait_impl) => { + empty_noir_trait_impl(noir_trait_impl); + } + ItemKind::Impl(type_impl) => { + empty_type_impl(type_impl); + } + ItemKind::Global(let_statement) => empty_let_statement(let_statement), + ItemKind::Submodules(parsed_submodule) => { + empty_parsed_submodule(parsed_submodule); + } + ItemKind::ModuleDecl(module_declaration) => empty_module_declaration(module_declaration), + ItemKind::Import(use_tree) => empty_use_tree(use_tree), + ItemKind::Struct(noir_struct) => empty_noir_struct(noir_struct), + ItemKind::TypeAlias(noir_type_alias) => empty_noir_type_alias(noir_type_alias), + } +} + +fn empty_noir_trait(noir_trait: &mut NoirTrait) { + noir_trait.span = Default::default(); + + empty_ident(&mut noir_trait.name); + empty_unresolved_generics(&mut noir_trait.generics); + empty_unresolved_trait_constraints(&mut noir_trait.where_clause); + for item in noir_trait.items.iter_mut() { + empty_trait_item(item); + } +} + +fn empty_noir_trait_impl(noir_trait_impl: &mut NoirTraitImpl) { + empty_path(&mut noir_trait_impl.trait_name); + empty_unresolved_generics(&mut noir_trait_impl.impl_generics); + empty_unresolved_type(&mut noir_trait_impl.object_type); + empty_unresolved_trait_constraints(&mut noir_trait_impl.where_clause); + for item in noir_trait_impl.items.iter_mut() { + empty_trait_impl_item(item); + } +} + +fn empty_type_impl(type_impl: &mut TypeImpl) { + empty_unresolved_type(&mut type_impl.object_type); + type_impl.type_span = Default::default(); + empty_unresolved_generics(&mut type_impl.generics); + empty_unresolved_trait_constraints(&mut type_impl.where_clause); + for (noir_function, _) in type_impl.methods.iter_mut() { + empty_noir_function(noir_function); + } +} + +fn empty_noir_function(noir_function: &mut NoirFunction) { + let def = &mut noir_function.def; + + def.span = Default::default(); + empty_ident(&mut def.name); + empty_unresolved_generics(&mut def.generics); + + for param in def.parameters.iter_mut() { + param.span = Default::default(); + empty_unresolved_type(&mut param.typ); + empty_pattern(&mut param.pattern); + } + + empty_unresolved_trait_constraints(&mut def.where_clause); + empty_function_return_type(&mut def.return_type); + empty_block_expression(&mut def.body); +} + +fn empty_trait_item(trait_item: &mut TraitItem) { + match trait_item { + TraitItem::Function { name, generics, parameters, return_type, where_clause, body } => { + empty_ident(name); + empty_unresolved_generics(generics); + for (name, typ) in parameters.iter_mut() { + empty_ident(name); + empty_unresolved_type(typ); + } + empty_function_return_type(return_type); + for trait_constraint in where_clause.iter_mut() { + empty_unresolved_trait_constraint(trait_constraint); + } + if let Some(body) = body { + empty_block_expression(body); + } + } + TraitItem::Constant { name, typ, default_value } => { + empty_ident(name); + empty_unresolved_type(typ); + if let Some(default_value) = default_value { + empty_expression(default_value); + } + } + TraitItem::Type { name } => { + empty_ident(name); + } + } +} + +fn empty_trait_impl_item(trait_impl_item: &mut TraitImplItem) { + match trait_impl_item { + TraitImplItem::Function(noir_function) => empty_noir_function(noir_function), + TraitImplItem::Constant(name, typ, default_value) => { + empty_ident(name); + empty_unresolved_type(typ); + empty_expression(default_value); + } + TraitImplItem::Type { name, alias } => { + empty_ident(name); + empty_unresolved_type(alias); + } + } +} + +fn empty_let_statement(let_statement: &mut LetStatement) { + empty_pattern(&mut let_statement.pattern); + empty_unresolved_type(&mut let_statement.r#type); + empty_expression(&mut let_statement.expression); +} + +fn empty_parsed_submodule(parsed_submodule: &mut ParsedSubModule) { + empty_ident(&mut parsed_submodule.name); + empty_parsed_module(&mut parsed_submodule.contents); +} + +fn empty_module_declaration(module_declaration: &mut ModuleDeclaration) { + empty_ident(&mut module_declaration.ident); +} + +fn empty_use_tree(use_tree: &mut UseTree) { + empty_path(&mut use_tree.prefix); + + match &mut use_tree.kind { + UseTreeKind::Path(name, alias) => { + empty_ident(name); + if let Some(alias) = alias { + empty_ident(alias); + } + } + UseTreeKind::List(use_trees) => { + for use_tree in use_trees.iter_mut() { + empty_use_tree(use_tree); + } + } + } +} + +fn empty_noir_struct(noir_struct: &mut NoirStruct) { + noir_struct.span = Default::default(); + empty_ident(&mut noir_struct.name); + for (name, typ) in noir_struct.fields.iter_mut() { + empty_ident(name); + empty_unresolved_type(typ); + } + empty_unresolved_generics(&mut noir_struct.generics); +} + +fn empty_noir_type_alias(noir_type_alias: &mut NoirTypeAlias) { + noir_type_alias.span = Default::default(); + empty_ident(&mut noir_type_alias.name); + empty_unresolved_type(&mut noir_type_alias.typ); +} + +fn empty_block_expression(block_expression: &mut BlockExpression) { + for statement in block_expression.statements.iter_mut() { + empty_statement(statement); + } +} + +fn empty_statement(statement: &mut Statement) { + statement.span = Default::default(); + + match &mut statement.kind { + StatementKind::Let(let_statement) => empty_let_statement(let_statement), + StatementKind::Constrain(constrain_statement) => { + empty_constrain_statement(constrain_statement) + } + StatementKind::Expression(expression) => empty_expression(expression), + StatementKind::Assign(assign_statement) => empty_assign_statement(assign_statement), + StatementKind::For(for_loop_statement) => empty_for_loop_statement(for_loop_statement), + StatementKind::Comptime(statement) => empty_statement(statement), + StatementKind::Semi(expression) => empty_expression(expression), + StatementKind::Break | StatementKind::Continue | StatementKind::Error => (), + } +} + +fn empty_constrain_statement(constrain_statement: &mut ConstrainStatement) { + empty_expression(&mut constrain_statement.0); + if let Some(expression) = &mut constrain_statement.1 { + empty_expression(expression); + } +} + +fn empty_expressions(expressions: &mut [Expression]) { + for expression in expressions.iter_mut() { + empty_expression(expression); + } +} + +fn empty_expression(expression: &mut Expression) { + expression.span = Default::default(); + + match &mut expression.kind { + ExpressionKind::Literal(literal) => empty_literal(literal), + ExpressionKind::Block(block_expression) => empty_block_expression(block_expression), + ExpressionKind::Prefix(prefix_expression) => empty_prefix_expression(prefix_expression), + ExpressionKind::Index(index_expression) => empty_index_expression(index_expression), + ExpressionKind::Call(call_expression) => empty_call_expression(call_expression), + ExpressionKind::MethodCall(method_call_expression) => { + empty_method_call_expression(method_call_expression) + } + ExpressionKind::Constructor(constructor_expression) => { + empty_constructor_expression(constructor_expression) + } + ExpressionKind::MemberAccess(member_access_expression) => { + empty_member_access_expression(member_access_expression) + } + ExpressionKind::Cast(cast_expression) => empty_cast_expression(cast_expression), + ExpressionKind::Infix(infix_expression) => empty_infix_expression(infix_expression), + ExpressionKind::If(if_expression) => empty_if_expression(if_expression), + ExpressionKind::Variable(path) => empty_path(path), + ExpressionKind::Tuple(expressions) => { + empty_expressions(expressions); + } + ExpressionKind::Lambda(lambda) => empty_lambda(lambda), + ExpressionKind::Parenthesized(expression) => empty_expression(expression), + ExpressionKind::Unquote(expression) => { + empty_expression(expression); + } + ExpressionKind::Comptime(block_expression, _span) => { + empty_block_expression(block_expression); + } + ExpressionKind::Quote(..) | ExpressionKind::Resolved(_) | ExpressionKind::Error => (), + } +} + +fn empty_assign_statement(assign_statement: &mut AssignStatement) { + empty_lvalue(&mut assign_statement.lvalue); + empty_expression(&mut assign_statement.expression); +} + +fn empty_for_loop_statement(for_loop_statement: &mut ForLoopStatement) { + for_loop_statement.span = Default::default(); + empty_ident(&mut for_loop_statement.identifier); + empty_for_range(&mut for_loop_statement.range); + empty_expression(&mut for_loop_statement.block); +} + +fn empty_unresolved_types(unresolved_types: &mut [UnresolvedType]) { + for unresolved_type in unresolved_types.iter_mut() { + empty_unresolved_type(unresolved_type); + } +} + +fn empty_unresolved_type(unresolved_type: &mut UnresolvedType) { + unresolved_type.span = Default::default(); + + match &mut unresolved_type.typ { + UnresolvedTypeData::Array(unresolved_type_expression, unresolved_type) => { + empty_unresolved_type_expression(unresolved_type_expression); + empty_unresolved_type(unresolved_type); + } + UnresolvedTypeData::Slice(unresolved_type) => empty_unresolved_type(unresolved_type), + UnresolvedTypeData::Expression(unresolved_type_expression) => { + empty_unresolved_type_expression(unresolved_type_expression) + } + UnresolvedTypeData::FormatString(unresolved_type_expression, unresolved_type) => { + empty_unresolved_type_expression(unresolved_type_expression); + empty_unresolved_type(unresolved_type); + } + UnresolvedTypeData::Parenthesized(unresolved_type) => { + empty_unresolved_type(unresolved_type) + } + UnresolvedTypeData::Named(path, unresolved_types, _) => { + empty_path(path); + empty_unresolved_types(unresolved_types); + } + UnresolvedTypeData::TraitAsType(path, unresolved_types) => { + empty_path(path); + empty_unresolved_types(unresolved_types); + } + UnresolvedTypeData::MutableReference(unresolved_type) => { + empty_unresolved_type(unresolved_type) + } + UnresolvedTypeData::Tuple(unresolved_types) => empty_unresolved_types(unresolved_types), + UnresolvedTypeData::Function(args, ret, _env) => { + empty_unresolved_types(args); + empty_unresolved_type(ret); + } + UnresolvedTypeData::FieldElement + | UnresolvedTypeData::Integer(_, _) + | UnresolvedTypeData::Bool + | UnresolvedTypeData::String(_) + | UnresolvedTypeData::Unit + | UnresolvedTypeData::Quoted(_) + | UnresolvedTypeData::Resolved(_) + | UnresolvedTypeData::Unspecified + | UnresolvedTypeData::Error => (), + } +} + +fn empty_unresolved_generics(unresolved_generic: &mut UnresolvedGenerics) { + for generic in unresolved_generic.iter_mut() { + empty_unresolved_generic(generic); + } +} + +fn empty_unresolved_generic(unresolved_generic: &mut UnresolvedGeneric) { + match unresolved_generic { + UnresolvedGeneric::Variable(ident) => empty_ident(ident), + UnresolvedGeneric::Numeric { ident, typ } => { + empty_ident(ident); + empty_unresolved_type(typ); + } + UnresolvedGeneric::Resolved(..) => (), + } +} + +fn empty_pattern(pattern: &mut Pattern) { + match pattern { + Pattern::Identifier(ident) => empty_ident(ident), + Pattern::Mutable(pattern, _span, _) => { + empty_pattern(pattern); + } + Pattern::Tuple(patterns, _) => { + for pattern in patterns.iter_mut() { + empty_pattern(pattern); + } + } + Pattern::Struct(path, patterns, _) => { + empty_path(path); + for (name, pattern) in patterns.iter_mut() { + empty_ident(name); + empty_pattern(pattern); + } + } + } +} + +fn empty_unresolved_trait_constraints( + unresolved_trait_constriants: &mut [UnresolvedTraitConstraint], +) { + for trait_constraint in unresolved_trait_constriants.iter_mut() { + empty_unresolved_trait_constraint(trait_constraint); + } +} + +fn empty_unresolved_trait_constraint(unresolved_trait_constraint: &mut UnresolvedTraitConstraint) { + empty_unresolved_type(&mut unresolved_trait_constraint.typ); +} + +fn empty_function_return_type(function_return_type: &mut FunctionReturnType) { + match function_return_type { + FunctionReturnType::Ty(unresolved_type) => empty_unresolved_type(unresolved_type), + FunctionReturnType::Default(_) => (), + } +} + +fn empty_ident(ident: &mut Ident) { + ident.0.set_span(Default::default()); +} + +fn empty_path(path: &mut Path) { + path.span = Default::default(); + for segment in path.segments.iter_mut() { + empty_path_segment(segment); + } +} + +fn empty_path_segment(segment: &mut PathSegment) { + segment.span = Default::default(); + empty_ident(&mut segment.ident); +} + +fn empty_literal(literal: &mut Literal) { + match literal { + Literal::Array(array_literal) => empty_array_literal(array_literal), + Literal::Slice(array_literal) => empty_array_literal(array_literal), + Literal::Bool(_) + | Literal::Integer(_, _) + | Literal::Str(_) + | Literal::RawStr(_, _) + | Literal::FmtStr(_) + | Literal::Unit => (), + } +} + +fn empty_array_literal(array_literal: &mut ArrayLiteral) { + match array_literal { + ArrayLiteral::Standard(expressions) => { + empty_expressions(expressions); + } + ArrayLiteral::Repeated { repeated_element, length } => { + empty_expression(repeated_element); + empty_expression(length); + } + } +} + +fn empty_prefix_expression(prefix_expression: &mut PrefixExpression) { + empty_expression(&mut prefix_expression.rhs); +} + +fn empty_index_expression(index_expression: &mut IndexExpression) { + empty_expression(&mut index_expression.collection); + empty_expression(&mut index_expression.index); +} + +fn empty_call_expression(call_expression: &mut CallExpression) { + empty_expression(&mut call_expression.func); + empty_expressions(&mut call_expression.arguments); +} + +fn empty_method_call_expression(method_call_expression: &mut MethodCallExpression) { + empty_expression(&mut method_call_expression.object); + empty_ident(&mut method_call_expression.method_name); + if let Some(generics) = &mut method_call_expression.generics { + empty_unresolved_types(generics); + } + empty_expressions(&mut method_call_expression.arguments); +} + +fn empty_constructor_expression(constructor_expression: &mut ConstructorExpression) { + empty_path(&mut constructor_expression.type_name); + for (name, expression) in constructor_expression.fields.iter_mut() { + empty_ident(name); + empty_expression(expression); + } +} + +fn empty_member_access_expression(member_access_expression: &mut MemberAccessExpression) { + empty_expression(&mut member_access_expression.lhs); + empty_ident(&mut member_access_expression.rhs); +} + +fn empty_cast_expression(cast_expression: &mut CastExpression) { + empty_expression(&mut cast_expression.lhs); + empty_unresolved_type(&mut cast_expression.r#type); +} + +fn empty_infix_expression(infix_expression: &mut InfixExpression) { + empty_expression(&mut infix_expression.lhs); + empty_expression(&mut infix_expression.rhs); +} + +fn empty_if_expression(if_expression: &mut IfExpression) { + empty_expression(&mut if_expression.condition); + empty_expression(&mut if_expression.consequence); + if let Some(alternative) = &mut if_expression.alternative { + empty_expression(alternative); + } +} + +fn empty_lambda(lambda: &mut Lambda) { + for (name, typ) in lambda.parameters.iter_mut() { + empty_pattern(name); + empty_unresolved_type(typ); + } + empty_unresolved_type(&mut lambda.return_type); + empty_expression(&mut lambda.body); +} + +fn empty_lvalue(lvalue: &mut LValue) { + match lvalue { + LValue::Ident(ident) => empty_ident(ident), + LValue::MemberAccess { ref mut object, ref mut field_name, span: _ } => { + empty_lvalue(object); + empty_ident(field_name); + } + LValue::Index { ref mut array, ref mut index, span: _ } => { + empty_lvalue(array); + empty_expression(index); + } + LValue::Dereference(lvalue, _) => empty_lvalue(lvalue), + } +} + +fn empty_for_range(for_range: &mut ForRange) { + match for_range { + ForRange::Range(from, to) => { + empty_expression(from); + empty_expression(to); + } + ForRange::Array(expression) => empty_expression(expression), + } +} + +fn empty_unresolved_type_expression(unresolved_type_expression: &mut UnresolvedTypeExpression) { + match unresolved_type_expression { + UnresolvedTypeExpression::Variable(path) => empty_path(path), + UnresolvedTypeExpression::BinaryOperation(lhs, _, rhs, _) => { + empty_unresolved_type_expression(lhs); + empty_unresolved_type_expression(rhs); + } + UnresolvedTypeExpression::Constant(_, _) => (), + } +} diff --git a/noir/noir-repo/compiler/fm/src/file_map.rs b/noir/noir-repo/compiler/fm/src/file_map.rs index 50412d352ec..ba552fe5156 100644 --- a/noir/noir-repo/compiler/fm/src/file_map.rs +++ b/noir/noir-repo/compiler/fm/src/file_map.rs @@ -34,6 +34,7 @@ impl From<&PathBuf> for PathString { pub struct FileMap { files: SimpleFiles, name_to_id: HashMap, + current_dir: Option, } // XXX: Note that we derive Default here due to ModuleOrigin requiring us to set a FileId @@ -82,7 +83,11 @@ impl FileMap { } impl Default for FileMap { fn default() -> Self { - FileMap { files: SimpleFiles::new(), name_to_id: HashMap::new() } + FileMap { + files: SimpleFiles::new(), + name_to_id: HashMap::new(), + current_dir: std::env::current_dir().ok(), + } } } @@ -92,7 +97,16 @@ impl<'a> Files<'a> for FileMap { type Source = &'a str; fn name(&self, file_id: Self::FileId) -> Result { - Ok(self.files.get(file_id.as_usize())?.name().clone()) + let name = self.files.get(file_id.as_usize())?.name().clone(); + + // See if we can make the file name a bit shorter/easier to read if it starts with the current directory + if let Some(current_dir) = &self.current_dir { + if let Ok(name_without_prefix) = name.0.strip_prefix(current_dir) { + return Ok(PathString::from_path(name_without_prefix.to_path_buf())); + } + } + + Ok(name) } fn source(&'a self, file_id: Self::FileId) -> Result { diff --git a/noir/noir-repo/compiler/integration-tests/circuits/recursion/src/main.nr b/noir/noir-repo/compiler/integration-tests/circuits/recursion/src/main.nr index 94cae14daa7..41f94baff4e 100644 --- a/noir/noir-repo/compiler/integration-tests/circuits/recursion/src/main.nr +++ b/noir/noir-repo/compiler/integration-tests/circuits/recursion/src/main.nr @@ -4,10 +4,5 @@ fn main( public_inputs: [Field; 1], key_hash: Field ) { - std::verify_proof( - verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), - key_hash - ) + std::verify_proof(verification_key, proof, public_inputs, key_hash) } diff --git a/noir/noir-repo/compiler/integration-tests/test/node/prove_and_verify.test.ts b/noir/noir-repo/compiler/integration-tests/test/node/prove_and_verify.test.ts index 699dcf5e918..babc8ca5bb8 100644 --- a/noir/noir-repo/compiler/integration-tests/test/node/prove_and_verify.test.ts +++ b/noir/noir-repo/compiler/integration-tests/test/node/prove_and_verify.test.ts @@ -2,7 +2,12 @@ import { expect } from 'chai'; import assert_lt_json from '../../circuits/assert_lt/target/assert_lt.json' assert { type: 'json' }; import fold_fibonacci_json from '../../circuits/fold_fibonacci/target/fold_fibonacci.json' assert { type: 'json' }; import { Noir } from '@noir-lang/noir_js'; -import { BarretenbergBackend as Backend, BarretenbergVerifier as Verifier } from '@noir-lang/backend_barretenberg'; +import { + BarretenbergBackend as Backend, + BarretenbergVerifier as Verifier, + UltraHonkBackend, + UltraHonkVerifier, +} from '@noir-lang/backend_barretenberg'; import { CompiledCircuit } from '@noir-lang/types'; const assert_lt_program = assert_lt_json as CompiledCircuit; @@ -150,3 +155,138 @@ it('end-to-end proof creation and verification for multiple ACIR circuits (inner const isValid = await backend.verifyProof(proof); expect(isValid).to.be.true; }); + +const honkBackend = new UltraHonkBackend(assert_lt_program); + +it('UltraHonk end-to-end proof creation and verification (outer)', async () => { + // Noir.Js part + const inputs = { + x: '2', + y: '3', + }; + + const program = new Noir(assert_lt_program); + + const { witness } = await program.execute(inputs); + + // bb.js part + // + // Proof creation + const proof = await honkBackend.generateProof(witness); + + // Proof verification + const isValid = await honkBackend.verifyProof(proof); + expect(isValid).to.be.true; +}); + +it('UltraHonk end-to-end proof creation and verification (outer) -- Verifier API', async () => { + // Noir.Js part + const inputs = { + x: '2', + y: '3', + }; + + // Execute program + const program = new Noir(assert_lt_program); + const { witness } = await program.execute(inputs); + + // Generate proof + const proof = await honkBackend.generateProof(witness); + + const verificationKey = await honkBackend.getVerificationKey(); + + // Proof verification + const verifier = new UltraHonkVerifier(); + const isValid = await verifier.verifyProof(proof, verificationKey); + expect(isValid).to.be.true; +}); + +it('UltraHonk end-to-end proof creation and verification (inner)', async () => { + // Noir.Js part + const inputs = { + x: '2', + y: '3', + }; + + const program = new Noir(assert_lt_program); + + const { witness } = await program.execute(inputs); + + // bb.js part + // + // Proof creation + const proof = await honkBackend.generateProof(witness); + + // Proof verification + const isValid = await honkBackend.verifyProof(proof); + expect(isValid).to.be.true; +}); + +it('UltraHonk end-to-end proving and verification with different instances', async () => { + // Noir.Js part + const inputs = { + x: '2', + y: '3', + }; + + const program = new Noir(assert_lt_program); + + const { witness } = await program.execute(inputs); + + // bb.js part + const proof = await honkBackend.generateProof(witness); + + const verifier = new UltraHonkBackend(assert_lt_program); + const proof_is_valid = await verifier.verifyProof(proof); + expect(proof_is_valid).to.be.true; +}); + +it('[BUG] -- UltraHonk bb.js null function or function signature mismatch (outer-inner) ', async () => { + // Noir.Js part + const inputs = { + x: '2', + y: '3', + }; + + const program = new Noir(assert_lt_program); + + const { witness } = await program.execute(inputs); + + // bb.js part + // + // Proof creation + // + // Create a proof using both proving systems, the majority of the time + // one would only use outer proofs. + const proofOuter = await honkBackend.generateProof(witness); + const _proofInner = await honkBackend.generateProof(witness); + + // Proof verification + // + const isValidOuter = await honkBackend.verifyProof(proofOuter); + expect(isValidOuter).to.be.true; + // We can also try verifying an inner proof and it will fail. + const isValidInner = await honkBackend.verifyProof(_proofInner); + expect(isValidInner).to.be.true; +}); + +it('UltraHonk end-to-end proof creation and verification for multiple ACIR circuits (inner)', async () => { + // Noir.Js part + const inputs = { + x: '10', + }; + + const program = new Noir(fold_fibonacci_program); + + const { witness } = await program.execute(inputs); + + // bb.js part + // + // Proof creation + const honkBackend = new UltraHonkBackend(fold_fibonacci_program); + const proof = await honkBackend.generateProof(witness); + + // Proof verification + const isValid = await honkBackend.verifyProof(proof); + expect(isValid).to.be.true; +}); diff --git a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs index d0b33945f40..87181b285de 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs @@ -100,6 +100,7 @@ pub(super) fn abi_type_from_hir_type(context: &Context, typ: &Type) -> AbiType { Type::Error | Type::Unit | Type::Constant(_) + | Type::InfixExpr(..) | Type::TraitAsType(..) | Type::TypeVariable(_, _) | Type::NamedGeneric(..) @@ -116,7 +117,7 @@ fn to_abi_visibility(value: Visibility) -> AbiVisibility { match value { Visibility::Public => AbiVisibility::Public, Visibility::Private => AbiVisibility::Private, - Visibility::DataBus => AbiVisibility::DataBus, + Visibility::CallData(_) | Visibility::ReturnData => AbiVisibility::DataBus, } } diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index dd774a1eeec..2e185c69461 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -56,6 +56,12 @@ pub struct CompileOptions { #[arg(long, value_parser = parse_expression_width)] pub expression_width: Option, + /// Generate ACIR with the target backend expression width. + /// The default is to generate ACIR without a bound and split expressions after code generation. + /// Activating this flag can sometimes provide optimizations for certain programs. + #[arg(long, default_value = "false")] + pub bounded_codegen: bool, + /// Force a full recompilation. #[arg(long = "force")] pub force_compile: bool, @@ -107,6 +113,10 @@ pub struct CompileOptions { /// Outputs the paths to any modified artifacts #[arg(long, hide = true)] pub show_artifact_paths: bool, + + /// Temporary flag to enable the experimental arithmetic generics feature + #[arg(long, hide = true)] + pub arithmetic_generics: bool, } pub fn parse_expression_width(input: &str) -> Result { @@ -256,21 +266,28 @@ pub fn add_dep( pub fn check_crate( context: &mut Context, crate_id: CrateId, - deny_warnings: bool, - disable_macros: bool, - debug_comptime_in_file: Option<&str>, + options: &CompileOptions, ) -> CompilationResult<()> { - let macros: &[&dyn MacroProcessor] = - if disable_macros { &[] } else { &[&aztec_macros::AztecMacro as &dyn MacroProcessor] }; + let macros: &[&dyn MacroProcessor] = if options.disable_macros { + &[] + } else { + &[&aztec_macros::AztecMacro as &dyn MacroProcessor] + }; let mut errors = vec![]; - let diagnostics = CrateDefMap::collect_defs(crate_id, context, debug_comptime_in_file, macros); + let diagnostics = CrateDefMap::collect_defs( + crate_id, + context, + options.debug_comptime_in_file.as_deref(), + options.arithmetic_generics, + macros, + ); errors.extend(diagnostics.into_iter().map(|(error, file_id)| { let diagnostic = CustomDiagnostic::from(&error); diagnostic.in_file(file_id) })); - if has_errors(&errors, deny_warnings) { + if has_errors(&errors, options.deny_warnings) { Err(errors) } else { Ok(((), errors)) @@ -296,13 +313,7 @@ pub fn compile_main( options: &CompileOptions, cached_program: Option, ) -> CompilationResult { - let (_, mut warnings) = check_crate( - context, - crate_id, - options.deny_warnings, - options.disable_macros, - options.debug_comptime_in_file.as_deref(), - )?; + let (_, mut warnings) = check_crate(context, crate_id, options)?; let main = context.get_main_function(&crate_id).ok_or_else(|| { // TODO(#2155): This error might be a better to exist in Nargo @@ -337,13 +348,7 @@ pub fn compile_contract( crate_id: CrateId, options: &CompileOptions, ) -> CompilationResult { - let (_, warnings) = check_crate( - context, - crate_id, - options.deny_warnings, - options.disable_macros, - options.debug_comptime_in_file.as_deref(), - )?; + let (_, warnings) = check_crate(context, crate_id, options)?; // TODO: We probably want to error if contracts is empty let contracts = context.get_all_contracts(&crate_id); @@ -512,6 +517,12 @@ fn compile_contract_inner( } } +/// Default expression width used for Noir compilation. +/// The ACVM native type `ExpressionWidth` has its own default which should always be unbounded, +/// while we can sometimes expect the compilation target width to change. +/// Thus, we set it separately here rather than trying to alter the default derivation of the type. +pub const DEFAULT_EXPRESSION_WIDTH: ExpressionWidth = ExpressionWidth::Bounded { width: 4 }; + /// Compile the current crate using `main_function` as the entrypoint. /// /// This function assumes [`check_crate`] is called beforehand. @@ -550,6 +561,11 @@ pub fn compile_no_check( enable_brillig_logging: options.show_brillig, force_brillig_output: options.force_brillig, print_codegen_timings: options.benchmark_codegen, + expression_width: if options.bounded_codegen { + options.expression_width.unwrap_or(DEFAULT_EXPRESSION_WIDTH) + } else { + ExpressionWidth::default() + }, }; let SsaProgramArtifact { program, debug, warnings, names, error_types, .. } = diff --git a/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs b/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs index d2474444d13..e290842480d 100644 --- a/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs +++ b/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -25,7 +25,7 @@ fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> let root_crate_id = prepare_crate(&mut context, file_name); let ((), warnings) = - noirc_driver::check_crate(&mut context, root_crate_id, false, false, None)?; + noirc_driver::check_crate(&mut context, root_crate_id, &Default::default())?; assert_eq!(warnings, Vec::new(), "stdlib is producing {} warnings", warnings.len()); diff --git a/noir/noir-repo/compiler/noirc_errors/src/position.rs b/noir/noir-repo/compiler/noirc_errors/src/position.rs index 9f9879e1d1b..02b242e8b4d 100644 --- a/noir/noir-repo/compiler/noirc_errors/src/position.rs +++ b/noir/noir-repo/compiler/noirc_errors/src/position.rs @@ -43,6 +43,10 @@ impl Spanned { pub fn span(&self) -> Span { self.span } + + pub fn set_span(&mut self, span: Span) { + self.span = span; + } } impl std::borrow::Borrow for Spanned { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index 81327cec013..41dbf3b7272 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -42,6 +42,22 @@ pub mod ir; mod opt; pub mod ssa_gen; +pub struct SsaEvaluatorOptions { + /// Emit debug information for the intermediate SSA IR + pub enable_ssa_logging: bool, + + pub enable_brillig_logging: bool, + + /// Force Brillig output (for step debugging) + pub force_brillig_output: bool, + + /// Pretty print benchmark times of each code generation pass + pub print_codegen_timings: bool, + + /// Width of expressions to be used for ACIR + pub expression_width: ExpressionWidth, +} + pub(crate) struct ArtifactsAndWarnings(Artifacts, Vec); /// Optimize the given program by converting it into SSA @@ -99,7 +115,9 @@ pub(crate) fn optimize_into_acir( drop(ssa_gen_span_guard); - let artifacts = time("SSA to ACIR", options.print_codegen_timings, || ssa.into_acir(&brillig))?; + let artifacts = time("SSA to ACIR", options.print_codegen_timings, || { + ssa.into_acir(&brillig, options.expression_width) + })?; Ok(ArtifactsAndWarnings(artifacts, ssa_level_warnings)) } @@ -160,19 +178,6 @@ impl SsaProgramArtifact { } } -pub struct SsaEvaluatorOptions { - /// Emit debug information for the intermediate SSA IR - pub enable_ssa_logging: bool, - - pub enable_brillig_logging: bool, - - /// Force Brillig output (for step debugging) - pub force_brillig_output: bool, - - /// Pretty print benchmark times of each code generation pass - pub print_codegen_timings: bool, -} - /// Compiles the [`Program`] into [`ACIR``][acvm::acir::circuit::Program]. /// /// The output ACIR is backend-agnostic and so must go through a transformation pass before usage in proof generation. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 629cc491ba6..fdad06a520b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -9,7 +9,7 @@ use crate::ssa::ir::types::Type as SsaType; use crate::ssa::ir::{instruction::Endian, types::NumericType}; use acvm::acir::circuit::brillig::{BrilligInputs, BrilligOutputs}; use acvm::acir::circuit::opcodes::{BlockId, BlockType, MemOp}; -use acvm::acir::circuit::{AssertionPayload, ExpressionOrMemory, Opcode}; +use acvm::acir::circuit::{AssertionPayload, ExpressionOrMemory, ExpressionWidth, Opcode}; use acvm::blackbox_solver; use acvm::brillig_vm::{MemoryValue, VMStatus, VM}; use acvm::{ @@ -24,6 +24,7 @@ use acvm::{ use fxhash::FxHashMap as HashMap; use iter_extended::{try_vecmap, vecmap}; use num_bigint::BigUint; +use std::cmp::Ordering; use std::{borrow::Cow, hash::Hash}; #[derive(Clone, Debug, PartialEq, Eq, Hash)] @@ -124,9 +125,15 @@ pub(crate) struct AcirContext { /// The BigIntContext, used to generate identifiers for BigIntegers big_int_ctx: BigIntContext, + + expression_width: ExpressionWidth, } impl AcirContext { + pub(crate) fn set_expression_width(&mut self, expression_width: ExpressionWidth) { + self.expression_width = expression_width; + } + pub(crate) fn current_witness_index(&self) -> Witness { self.acir_ir.current_witness_index() } @@ -584,6 +591,7 @@ impl AcirContext { pub(crate) fn mul_var(&mut self, lhs: AcirVar, rhs: AcirVar) -> Result { let lhs_data = self.vars[&lhs].clone(); let rhs_data = self.vars[&rhs].clone(); + let result = match (lhs_data, rhs_data) { // (x * 1) == (1 * x) == x (AcirVarData::Const(constant), _) if constant.is_one() => rhs, @@ -655,6 +663,7 @@ impl AcirContext { self.mul_var(lhs, rhs)? } }; + Ok(result) } @@ -670,9 +679,62 @@ impl AcirContext { pub(crate) fn add_var(&mut self, lhs: AcirVar, rhs: AcirVar) -> Result { let lhs_expr = self.var_to_expression(lhs)?; let rhs_expr = self.var_to_expression(rhs)?; + let sum_expr = &lhs_expr + &rhs_expr; + if fits_in_one_identity(&sum_expr, self.expression_width) { + let sum_var = self.add_data(AcirVarData::from(sum_expr)); + + return Ok(sum_var); + } + + let sum_expr = match lhs_expr.width().cmp(&rhs_expr.width()) { + Ordering::Greater => { + let lhs_witness_var = self.get_or_create_witness_var(lhs)?; + let lhs_witness_expr = self.var_to_expression(lhs_witness_var)?; + + let new_sum_expr = &lhs_witness_expr + &rhs_expr; + if fits_in_one_identity(&new_sum_expr, self.expression_width) { + new_sum_expr + } else { + let rhs_witness_var = self.get_or_create_witness_var(rhs)?; + let rhs_witness_expr = self.var_to_expression(rhs_witness_var)?; + + &lhs_expr + &rhs_witness_expr + } + } + Ordering::Less => { + let rhs_witness_var = self.get_or_create_witness_var(rhs)?; + let rhs_witness_expr = self.var_to_expression(rhs_witness_var)?; + + let new_sum_expr = &lhs_expr + &rhs_witness_expr; + if fits_in_one_identity(&new_sum_expr, self.expression_width) { + new_sum_expr + } else { + let lhs_witness_var = self.get_or_create_witness_var(lhs)?; + let lhs_witness_expr = self.var_to_expression(lhs_witness_var)?; - Ok(self.add_data(AcirVarData::from(sum_expr))) + &lhs_witness_expr + &rhs_expr + } + } + Ordering::Equal => { + let lhs_witness_var = self.get_or_create_witness_var(lhs)?; + let lhs_witness_expr = self.var_to_expression(lhs_witness_var)?; + + let new_sum_expr = &lhs_witness_expr + &rhs_expr; + if fits_in_one_identity(&new_sum_expr, self.expression_width) { + new_sum_expr + } else { + let rhs_witness_var = self.get_or_create_witness_var(rhs)?; + let rhs_witness_expr = self.var_to_expression(rhs_witness_var)?; + + &lhs_witness_expr + &rhs_witness_expr + } + } + }; + + let sum_var = self.add_data(AcirVarData::from(sum_expr)); + + Ok(sum_var) } /// Adds a new Variable to context whose value will @@ -1990,6 +2052,23 @@ impl From> for AcirVarData { } } +/// Checks if this expression can fit into one arithmetic identity +fn fits_in_one_identity(expr: &Expression, width: ExpressionWidth) -> bool { + let width = match &width { + ExpressionWidth::Unbounded => { + return true; + } + ExpressionWidth::Bounded { width } => *width, + }; + + // A Polynomial with more than one mul term cannot fit into one opcode + if expr.mul_terms.len() > 1 { + return false; + }; + + expr.width() <= width +} + /// A Reference to an `AcirVarData` #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub(crate) struct AcirVar(usize); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index a75aabe6a03..0e4bbbf759c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -33,7 +33,7 @@ use acvm::acir::circuit::opcodes::BlockType; use noirc_frontend::monomorphization::ast::InlineType; use acvm::acir::circuit::brillig::BrilligBytecode; -use acvm::acir::circuit::{AssertionPayload, ErrorSelector, OpcodeLocation}; +use acvm::acir::circuit::{AssertionPayload, ErrorSelector, ExpressionWidth, OpcodeLocation}; use acvm::acir::native_types::Witness; use acvm::acir::BlackBoxFunc; use acvm::{acir::circuit::opcodes::BlockId, acir::AcirField, FieldElement}; @@ -282,12 +282,16 @@ pub(crate) type Artifacts = ( impl Ssa { #[tracing::instrument(level = "trace", skip_all)] - pub(crate) fn into_acir(self, brillig: &Brillig) -> Result { + pub(crate) fn into_acir( + self, + brillig: &Brillig, + expression_width: ExpressionWidth, + ) -> Result { let mut acirs = Vec::new(); - // TODO: can we parallelise this? + // TODO: can we parallelize this? let mut shared_context = SharedContext::default(); for function in self.functions.values() { - let context = Context::new(&mut shared_context); + let context = Context::new(&mut shared_context, expression_width); if let Some(mut generated_acir) = context.convert_ssa_function(&self, function, brillig)? { @@ -334,8 +338,12 @@ impl Ssa { } impl<'a> Context<'a> { - fn new(shared_context: &'a mut SharedContext) -> Context<'a> { + fn new( + shared_context: &'a mut SharedContext, + expression_width: ExpressionWidth, + ) -> Context<'a> { let mut acir_context = AcirContext::default(); + acir_context.set_expression_width(expression_width); let current_side_effects_enabled_var = acir_context.add_constant(FieldElement::one()); Context { @@ -422,6 +430,12 @@ impl<'a> Context<'a> { let (return_vars, return_warnings) = self.convert_ssa_return(entry_block.unwrap_terminator(), dfg)?; + let call_data_arrays: Vec = + self.data_bus.call_data.iter().map(|cd| cd.array_id).collect(); + for call_data_array in call_data_arrays { + self.ensure_array_is_initialized(call_data_array, dfg)?; + } + // TODO: This is a naive method of assigning the return values to their witnesses as // we're likely to get a number of constraints which are asserting one witness to be equal to another. // @@ -1255,20 +1269,23 @@ impl<'a> Context<'a> { let res_typ = dfg.type_of_value(results[0]); // Get operations to call-data parameters are replaced by a get to the call-data-bus array - if let Some(call_data) = self.data_bus.call_data { - if self.data_bus.call_data_map.contains_key(&array) { - // TODO: the block_id of call-data must be notified to the backend - // TODO: should we do the same for return-data? - let type_size = res_typ.flattened_size(); - let type_size = - self.acir_context.add_constant(FieldElement::from(type_size as i128)); - let offset = self.acir_context.mul_var(var_index, type_size)?; - let bus_index = self - .acir_context - .add_constant(FieldElement::from(self.data_bus.call_data_map[&array] as i128)); - let new_index = self.acir_context.add_var(offset, bus_index)?; - return self.array_get(instruction, call_data, new_index, dfg, index_side_effect); - } + if let Some(call_data) = + self.data_bus.call_data.iter().find(|cd| cd.index_map.contains_key(&array)) + { + let type_size = res_typ.flattened_size(); + let type_size = self.acir_context.add_constant(FieldElement::from(type_size as i128)); + let offset = self.acir_context.mul_var(var_index, type_size)?; + let bus_index = self + .acir_context + .add_constant(FieldElement::from(call_data.index_map[&array] as i128)); + let new_index = self.acir_context.add_var(offset, bus_index)?; + return self.array_get( + instruction, + call_data.array_id, + new_index, + dfg, + index_side_effect, + ); } // Compiler sanity check @@ -1288,6 +1305,7 @@ impl<'a> Context<'a> { index_side_effect = false; } } + // Fallback to multiplication if the index side_effects have not already been handled if index_side_effect { // Set the value to 0 if current_side_effects is 0, to ensure it fits in any value type @@ -1698,17 +1716,20 @@ impl<'a> Context<'a> { len: usize, value: Option, ) -> Result<(), InternalError> { - let databus = if self.data_bus.call_data.is_some() - && self.block_id(&self.data_bus.call_data.unwrap()) == array - { - BlockType::CallData - } else if self.data_bus.return_data.is_some() + let mut databus = BlockType::Memory; + if self.data_bus.return_data.is_some() && self.block_id(&self.data_bus.return_data.unwrap()) == array { - BlockType::ReturnData - } else { - BlockType::Memory - }; + databus = BlockType::ReturnData; + } + for array_id in self.data_bus.call_data_array() { + if self.block_id(&array_id) == array { + assert!(databus == BlockType::Memory); + databus = BlockType::CallData; + break; + } + } + self.acir_context.initialize_array(array, len, value, databus)?; self.initialized_arrays.insert(array); Ok(()) @@ -2820,7 +2841,7 @@ mod test { use acvm::{ acir::{ - circuit::{Opcode, OpcodeLocation}, + circuit::{ExpressionWidth, Opcode, OpcodeLocation}, native_types::Witness, }, FieldElement, @@ -2917,7 +2938,7 @@ mod test { let ssa = builder.finish(); let (acir_functions, _, _) = ssa - .into_acir(&Brillig::default()) + .into_acir(&Brillig::default(), ExpressionWidth::default()) .expect("Should compile manually written SSA into ACIR"); // Expected result: // main f0 @@ -3012,7 +3033,7 @@ mod test { let ssa = builder.finish(); let (acir_functions, _, _) = ssa - .into_acir(&Brillig::default()) + .into_acir(&Brillig::default(), ExpressionWidth::default()) .expect("Should compile manually written SSA into ACIR"); // The expected result should look very similar to the above test expect that the input witnesses of the `Call` // opcodes will be different. The changes can discerned from the checks below. @@ -3102,7 +3123,7 @@ mod test { let ssa = builder.finish(); let (acir_functions, _, _) = ssa - .into_acir(&Brillig::default()) + .into_acir(&Brillig::default(), ExpressionWidth::default()) .expect("Should compile manually written SSA into ACIR"); assert_eq!(acir_functions.len(), 3, "Should have three ACIR functions"); @@ -3215,8 +3236,9 @@ mod test { let ssa = builder.finish(); let brillig = ssa.to_brillig(false); - let (acir_functions, brillig_functions, _) = - ssa.into_acir(&brillig).expect("Should compile manually written SSA into ACIR"); + let (acir_functions, brillig_functions, _) = ssa + .into_acir(&brillig, ExpressionWidth::default()) + .expect("Should compile manually written SSA into ACIR"); assert_eq!(acir_functions.len(), 1, "Should only have a `main` ACIR function"); assert_eq!(brillig_functions.len(), 2, "Should only have generated two Brillig functions"); @@ -3272,7 +3294,7 @@ mod test { // The Brillig bytecode we insert for the stdlib is hardcoded so we do not need to provide any // Brillig artifacts to the ACIR gen pass. let (acir_functions, brillig_functions, _) = ssa - .into_acir(&Brillig::default()) + .into_acir(&Brillig::default(), ExpressionWidth::default()) .expect("Should compile manually written SSA into ACIR"); assert_eq!(acir_functions.len(), 1, "Should only have a `main` ACIR function"); @@ -3343,8 +3365,9 @@ mod test { let brillig = ssa.to_brillig(false); println!("{}", ssa); - let (acir_functions, brillig_functions, _) = - ssa.into_acir(&brillig).expect("Should compile manually written SSA into ACIR"); + let (acir_functions, brillig_functions, _) = ssa + .into_acir(&brillig, ExpressionWidth::default()) + .expect("Should compile manually written SSA into ACIR"); assert_eq!(acir_functions.len(), 1, "Should only have a `main` ACIR function"); // We expect 3 brillig functions: @@ -3431,8 +3454,9 @@ mod test { let brillig = ssa.to_brillig(false); println!("{}", ssa); - let (acir_functions, brillig_functions, _) = - ssa.into_acir(&brillig).expect("Should compile manually written SSA into ACIR"); + let (acir_functions, brillig_functions, _) = ssa + .into_acir(&brillig, ExpressionWidth::default()) + .expect("Should compile manually written SSA into ACIR"); assert_eq!(acir_functions.len(), 2, "Should only have two ACIR functions"); // We expect 3 brillig functions: diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index 5f0660f5a79..50964e9161b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeMap; use std::rc::Rc; use crate::ssa::ir::{types::Type, value::ValueId}; @@ -8,6 +9,12 @@ use noirc_frontend::hir_def::function::FunctionSignature; use super::FunctionBuilder; +#[derive(Clone)] +pub(crate) enum DatabusVisibility { + None, + CallData(u32), + ReturnData, +} /// Used to create a data bus, which is an array of private inputs /// replacing public inputs pub(crate) struct DataBusBuilder { @@ -27,15 +34,16 @@ impl DataBusBuilder { } } - /// Generates a boolean vector telling which (ssa) parameter from the given function signature + /// Generates a vector telling which (ssa) parameters from the given function signature /// are tagged with databus visibility - pub(crate) fn is_databus(main_signature: &FunctionSignature) -> Vec { + pub(crate) fn is_databus(main_signature: &FunctionSignature) -> Vec { let mut params_is_databus = Vec::new(); for param in &main_signature.0 { let is_databus = match param.2 { - ast::Visibility::Public | ast::Visibility::Private => false, - ast::Visibility::DataBus => true, + ast::Visibility::Public | ast::Visibility::Private => DatabusVisibility::None, + ast::Visibility::CallData(id) => DatabusVisibility::CallData(id), + ast::Visibility::ReturnData => DatabusVisibility::ReturnData, }; let len = param.1.field_count() as usize; params_is_databus.extend(vec![is_databus; len]); @@ -44,34 +52,51 @@ impl DataBusBuilder { } } +#[derive(Clone, Debug)] +pub(crate) struct CallData { + pub(crate) array_id: ValueId, + pub(crate) index_map: HashMap, +} + #[derive(Clone, Default, Debug)] pub(crate) struct DataBus { - pub(crate) call_data: Option, - pub(crate) call_data_map: HashMap, + pub(crate) call_data: Vec, pub(crate) return_data: Option, } impl DataBus { /// Updates the databus values with the provided function pub(crate) fn map_values(&self, mut f: impl FnMut(ValueId) -> ValueId) -> DataBus { - let mut call_data_map = HashMap::default(); - for (k, v) in self.call_data_map.iter() { - call_data_map.insert(f(*k), *v); - } - DataBus { - call_data: self.call_data.map(&mut f), - call_data_map, - return_data: self.return_data.map(&mut f), - } + let call_data = self + .call_data + .iter() + .map(|cd| { + let mut call_data_map = HashMap::default(); + for (k, v) in cd.index_map.iter() { + call_data_map.insert(f(*k), *v); + } + CallData { array_id: f(cd.array_id), index_map: call_data_map } + }) + .collect(); + DataBus { call_data, return_data: self.return_data.map(&mut f) } } + pub(crate) fn call_data_array(&self) -> Vec { + self.call_data.iter().map(|cd| cd.array_id).collect() + } /// Construct a databus from call_data and return_data data bus builders - pub(crate) fn get_data_bus(call_data: DataBusBuilder, return_data: DataBusBuilder) -> DataBus { - DataBus { - call_data: call_data.databus, - call_data_map: call_data.map, - return_data: return_data.databus, + pub(crate) fn get_data_bus( + call_data: Vec, + return_data: DataBusBuilder, + ) -> DataBus { + let mut call_data_args = Vec::new(); + for call_data_item in call_data { + if let Some(array_id) = call_data_item.databus { + call_data_args.push(CallData { array_id, index_map: call_data_item.map }); + } } + + DataBus { call_data: call_data_args, return_data: return_data.databus } } } @@ -129,19 +154,36 @@ impl FunctionBuilder { } /// Generate the data bus for call-data, based on the parameters of the entry block - /// and a boolean vector telling which ones are call-data - pub(crate) fn call_data_bus(&mut self, is_params_databus: Vec) -> DataBusBuilder { + /// and a vector telling which ones are call-data + pub(crate) fn call_data_bus( + &mut self, + is_params_databus: Vec, + ) -> Vec { //filter parameters of the first block that have call-data visibility let first_block = self.current_function.entry_block(); let params = self.current_function.dfg[first_block].parameters(); - let mut databus_param = Vec::new(); - for (param, is_databus) in params.iter().zip(is_params_databus) { - if is_databus { - databus_param.push(param.to_owned()); + let mut databus_param: BTreeMap> = BTreeMap::new(); + for (param, databus_attribute) in params.iter().zip(is_params_databus) { + match databus_attribute { + DatabusVisibility::None | DatabusVisibility::ReturnData => continue, + DatabusVisibility::CallData(call_data_id) => { + if let std::collections::btree_map::Entry::Vacant(e) = + databus_param.entry(call_data_id) + { + e.insert(vec![param.to_owned()]); + } else { + databus_param.get_mut(&call_data_id).unwrap().push(param.to_owned()); + } + } } } - // create the call-data-bus from the filtered list - let call_data = DataBusBuilder::new(); - self.initialize_data_bus(&databus_param, call_data) + // create the call-data-bus from the filtered lists + let mut result = Vec::new(); + for id in databus_param.keys() { + let builder = DataBusBuilder::new(); + let call_databus = self.initialize_data_bus(&databus_param[id], builder); + result.push(call_databus); + } + result } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs index 42383680f44..24519d530ee 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -34,8 +34,8 @@ impl Ssa { /// of its instructions are needed elsewhere. fn dead_instruction_elimination(function: &mut Function) { let mut context = Context::default(); - if let Some(call_data) = function.dfg.data_bus.call_data { - context.mark_used_instruction_results(&function.dfg, call_data); + for call_data in &function.dfg.data_bus.call_data { + context.mark_used_instruction_results(&function.dfg, call_data.array_id); } let blocks = PostOrder::with_function(function); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index abd251b008f..468a8573307 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -44,7 +44,7 @@ pub(crate) fn generate_ssa( // see which parameter has call_data/return_data attribute let is_databus = DataBusBuilder::is_databus(&program.main_function_signature); - let is_return_data = matches!(program.return_visibility, Visibility::DataBus); + let is_return_data = matches!(program.return_visibility, Visibility::ReturnData); let return_location = program.return_location; let context = SharedContext::new(program); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs index 057daa2bdde..7a324eb2600 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs @@ -7,7 +7,7 @@ use crate::ast::{ }; use crate::hir::def_collector::errors::DefCollectorErrorKind; use crate::macros_api::StructId; -use crate::node_interner::ExprId; +use crate::node_interner::{ExprId, QuotedTypeId}; use crate::token::{Attributes, Token, Tokens}; use crate::{Kind, Type}; use acvm::{acir::AcirField, FieldElement}; @@ -29,9 +29,7 @@ pub enum ExpressionKind { Cast(Box), Infix(Box), If(Box), - // The optional vec here is the optional list of generics - // provided by the turbofish operator, if used - Variable(Path, Option>), + Variable(Path), Tuple(Vec), Lambda(Box), Parenthesized(Box), @@ -53,7 +51,16 @@ pub type UnresolvedGenerics = Vec; #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum UnresolvedGeneric { Variable(Ident), - Numeric { ident: Ident, typ: UnresolvedType }, + Numeric { + ident: Ident, + typ: UnresolvedType, + }, + + /// Already-resolved generics can be parsed as generics when a macro + /// splices existing types into a generic list. In this case we have + /// to validate the type refers to a named generic and treat that + /// as a ResolvedGeneric when this is resolved. + Resolved(QuotedTypeId, Span), } impl UnresolvedGeneric { @@ -63,6 +70,7 @@ impl UnresolvedGeneric { UnresolvedGeneric::Numeric { ident, typ } => { ident.0.span().merge(typ.span.unwrap_or_default()) } + UnresolvedGeneric::Resolved(_, span) => *span, } } @@ -73,6 +81,9 @@ impl UnresolvedGeneric { let typ = self.resolve_numeric_kind_type(typ)?; Ok(Kind::Numeric(Box::new(typ))) } + UnresolvedGeneric::Resolved(..) => { + panic!("Don't know the kind of a resolved generic here") + } } } @@ -96,6 +107,7 @@ impl UnresolvedGeneric { pub(crate) fn ident(&self) -> &Ident { match self { UnresolvedGeneric::Variable(ident) | UnresolvedGeneric::Numeric { ident, .. } => ident, + UnresolvedGeneric::Resolved(..) => panic!("UnresolvedGeneric::Resolved no ident"), } } } @@ -105,6 +117,7 @@ impl Display for UnresolvedGeneric { match self { UnresolvedGeneric::Variable(ident) => write!(f, "{ident}"), UnresolvedGeneric::Numeric { ident, typ } => write!(f, "let {ident}: {typ}"), + UnresolvedGeneric::Resolved(..) => write!(f, "(resolved)"), } } } @@ -118,7 +131,7 @@ impl From for UnresolvedGeneric { impl ExpressionKind { pub fn into_path(self) -> Option { match self { - ExpressionKind::Variable(path, _) => Some(path), + ExpressionKind::Variable(path) => Some(path), _ => None, } } @@ -265,29 +278,9 @@ impl Expression { arguments: Vec, span: Span, ) -> Expression { - // Need to check if lhs is an if expression since users can sequence if expressions - // with tuples without calling them. E.g. `if c { t } else { e }(a, b)` is interpreted - // as a sequence of { if, tuple } rather than a function call. This behavior matches rust. - let kind = if matches!(&lhs.kind, ExpressionKind::If(..)) { - ExpressionKind::Block(BlockExpression { - statements: vec![ - Statement { kind: StatementKind::Expression(lhs), span }, - Statement { - kind: StatementKind::Expression(Expression::new( - ExpressionKind::Tuple(arguments), - span, - )), - span, - }, - ], - }) - } else { - ExpressionKind::Call(Box::new(CallExpression { - func: Box::new(lhs), - is_macro_call, - arguments, - })) - }; + let func = Box::new(lhs); + let kind = + ExpressionKind::Call(Box::new(CallExpression { func, is_macro_call, arguments })); Expression::new(kind, span) } } @@ -583,14 +576,7 @@ impl Display for ExpressionKind { Cast(cast) => cast.fmt(f), Infix(infix) => infix.fmt(f), If(if_expr) => if_expr.fmt(f), - Variable(path, generics) => { - if let Some(generics) = generics { - let generics = vecmap(generics, ToString::to_string); - write!(f, "{path}::<{}>", generics.join(", ")) - } else { - path.fmt(f) - } - } + Variable(path) => path.fmt(f), Constructor(constructor) => constructor.fmt(f), MemberAccess(access) => access.fmt(f), Tuple(elements) => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs index 038a13529d7..f59d316950c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs @@ -39,6 +39,18 @@ pub enum IntegerBitSize { SixtyFour, } +impl IntegerBitSize { + pub fn bit_size(&self) -> u8 { + match self { + IntegerBitSize::One => 1, + IntegerBitSize::Eight => 8, + IntegerBitSize::Sixteen => 16, + IntegerBitSize::ThirtyTwo => 32, + IntegerBitSize::SixtyFour => 64, + } + } +} + impl IntegerBitSize { pub fn allowed_sizes() -> Vec { vec![Self::One, Self::Eight, Self::ThirtyTwo, Self::SixtyFour] @@ -291,12 +303,21 @@ impl UnresolvedTypeData { } } -#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] +#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash, PartialOrd, Ord)] pub enum Signedness { Unsigned, Signed, } +impl Signedness { + pub fn is_signed(&self) -> bool { + match self { + Signedness::Unsigned => false, + Signedness::Signed => true, + } + } +} + impl UnresolvedTypeExpression { // This large error size is justified because it improves parsing speeds by around 40% in // release mode. See `ParserError` definition for further explanation. @@ -324,7 +345,7 @@ impl UnresolvedTypeExpression { Some(int) => Ok(UnresolvedTypeExpression::Constant(int, expr.span)), None => Err(expr), }, - ExpressionKind::Variable(path, _) => Ok(UnresolvedTypeExpression::Variable(path)), + ExpressionKind::Variable(path) => Ok(UnresolvedTypeExpression::Variable(path)), ExpressionKind::Prefix(prefix) if prefix.operator == UnaryOp::Minus => { let lhs = Box::new(UnresolvedTypeExpression::Constant(0, expr.span)); let rhs = Box::new(UnresolvedTypeExpression::from_expr_helper(prefix.rhs)?); @@ -390,7 +411,9 @@ pub enum Visibility { Private, /// DataBus is public input handled as private input. We use the fact that return values are properly computed by the program to avoid having them as public inputs /// it is useful for recursion and is handled by the proving system. - DataBus, + /// The u32 value is used to group inputs having the same value. + CallData(u32), + ReturnData, } impl std::fmt::Display for Visibility { @@ -398,7 +421,8 @@ impl std::fmt::Display for Visibility { match self { Self::Public => write!(f, "pub"), Self::Private => write!(f, "priv"), - Self::DataBus => write!(f, "databus"), + Self::CallData(id) => write!(f, "calldata{id}"), + Self::ReturnData => write!(f, "returndata"), } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs index b41efebc905..8ce2e1a41c0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/statement.rs @@ -236,10 +236,11 @@ impl From for Expression { fn from(i: Ident) -> Expression { Expression { span: i.0.span(), - kind: ExpressionKind::Variable( - Path { span: i.span(), segments: vec![i], kind: PathKind::Plain }, - None, - ), + kind: ExpressionKind::Variable(Path { + span: i.span(), + segments: vec![PathSegment::from(i)], + kind: PathKind::Plain, + }), } } } @@ -362,18 +363,18 @@ impl UseTree { // it would most likely cause further errors during name resolution #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub struct Path { - pub segments: Vec, + pub segments: Vec, pub kind: PathKind, pub span: Span, } impl Path { - pub fn pop(&mut self) -> Ident { + pub fn pop(&mut self) -> PathSegment { self.segments.pop().unwrap() } fn join(mut self, ident: Ident) -> Path { - self.segments.push(ident); + self.segments.push(PathSegment::from(ident)); self } @@ -384,18 +385,37 @@ impl Path { } pub fn from_ident(name: Ident) -> Path { - Path { span: name.span(), segments: vec![name], kind: PathKind::Plain } + Path { span: name.span(), segments: vec![PathSegment::from(name)], kind: PathKind::Plain } } pub fn span(&self) -> Span { self.span } - pub fn last_segment(&self) -> Ident { + pub fn first_segment(&self) -> PathSegment { + assert!(!self.segments.is_empty()); + self.segments.first().unwrap().clone() + } + + pub fn last_segment(&self) -> PathSegment { assert!(!self.segments.is_empty()); self.segments.last().unwrap().clone() } + pub fn last_ident(&self) -> Ident { + self.last_segment().ident + } + + pub fn first_name(&self) -> &str { + assert!(!self.segments.is_empty()); + &self.segments.first().unwrap().ident.0.contents + } + + pub fn last_name(&self) -> &str { + assert!(!self.segments.is_empty()); + &self.segments.last().unwrap().ident.0.contents + } + pub fn is_ident(&self) -> bool { self.segments.len() == 1 && self.kind == PathKind::Plain } @@ -404,14 +424,14 @@ impl Path { if !self.is_ident() { return None; } - self.segments.first() + self.segments.first().map(|segment| &segment.ident) } pub fn to_ident(&self) -> Option { if !self.is_ident() { return None; } - self.segments.first().cloned() + self.segments.first().cloned().map(|segment| segment.ident) } pub fn as_string(&self) -> String { @@ -421,19 +441,58 @@ impl Path { match segments.next() { None => panic!("empty segment"), Some(seg) => { - string.push_str(&seg.0.contents); + string.push_str(&seg.ident.0.contents); } } for segment in segments { string.push_str("::"); - string.push_str(&segment.0.contents); + string.push_str(&segment.ident.0.contents); } string } } +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +pub struct PathSegment { + pub ident: Ident, + pub generics: Option>, + pub span: Span, +} + +impl PathSegment { + /// Returns the span where turbofish happen. For example: + /// + /// foo:: + /// ~^^^^ + /// + /// Returns an empty span at the end of `foo` if there's no turbofish. + pub fn turbofish_span(&self) -> Span { + Span::from(self.ident.span().end()..self.span.end()) + } +} + +impl From for PathSegment { + fn from(ident: Ident) -> PathSegment { + let span = ident.span(); + PathSegment { ident, generics: None, span } + } +} + +impl Display for PathSegment { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.ident.fmt(f)?; + + if let Some(generics) = &self.generics { + let generics = vecmap(generics, ToString::to_string); + write!(f, "::<{}>", generics.join(", "))?; + } + + Ok(()) + } +} + #[derive(Debug, PartialEq, Eq, Clone)] pub struct LetStatement { pub pattern: Pattern, @@ -517,7 +576,7 @@ impl Recoverable for Pattern { impl LValue { fn as_expression(&self) -> Expression { let kind = match self { - LValue::Ident(ident) => ExpressionKind::Variable(Path::from_ident(ident.clone()), None), + LValue::Ident(ident) => ExpressionKind::Variable(Path::from_ident(ident.clone())), LValue::MemberAccess { object, field_name, span: _ } => { ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { lhs: object.as_expression(), @@ -606,11 +665,12 @@ impl ForRange { }; // array.len() - let segments = vec![array_ident]; - let array_ident = ExpressionKind::Variable( - Path { segments, kind: PathKind::Plain, span: array_span }, - None, - ); + let segments = vec![PathSegment::from(array_ident)]; + let array_ident = ExpressionKind::Variable(Path { + segments, + kind: PathKind::Plain, + span: array_span, + }); let end_range = ExpressionKind::MethodCall(Box::new(MethodCallExpression { object: Expression::new(array_ident.clone(), array_span), @@ -626,11 +686,12 @@ impl ForRange { let fresh_identifier = Ident::new(index_name.clone(), array_span); // array[i] - let segments = vec![Ident::new(index_name, array_span)]; - let index_ident = ExpressionKind::Variable( - Path { segments, kind: PathKind::Plain, span: array_span }, - None, - ); + let segments = vec![PathSegment::from(Ident::new(index_name, array_span))]; + let index_ident = ExpressionKind::Variable(Path { + segments, + kind: PathKind::Plain, + span: array_span, + }); let loop_element = ExpressionKind::Index(Box::new(IndexExpression { collection: Expression::new(array_ident, array_span), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/structure.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/structure.rs index 112747e09fb..732cbee9232 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/structure.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/structure.rs @@ -14,7 +14,6 @@ pub struct NoirStruct { pub generics: UnresolvedGenerics, pub fields: Vec<(Ident, UnresolvedType)>, pub span: Span, - pub is_comptime: bool, } impl Display for NoirStruct { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/traits.rs index b23fbaede61..f8f8ef667b4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/traits.rs @@ -53,7 +53,6 @@ pub struct TypeImpl { pub generics: UnresolvedGenerics, pub where_clause: Vec, pub methods: Vec<(NoirFunction, Span)>, - pub is_comptime: bool, } /// Ast node for an implementation of a trait for a particular type @@ -70,8 +69,6 @@ pub struct NoirTraitImpl { pub where_clause: Vec, pub items: Vec, - - pub is_comptime: bool, } /// Represents a simple trait constraint such as `where Foo: TraitY` diff --git a/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs index 443267380b5..598ffed1433 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs @@ -1,3 +1,4 @@ +use crate::ast::PathSegment; use crate::parser::{parse_program, ParsedModule}; use crate::{ ast, @@ -171,14 +172,11 @@ impl DebugInstrumenter { let last_stmt = if has_ret_expr { ast::Statement { kind: ast::StatementKind::Expression(ast::Expression { - kind: ast::ExpressionKind::Variable( - ast::Path { - segments: vec![ident("__debug_expr", span)], - kind: PathKind::Plain, - span, - }, - None, - ), + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![PathSegment::from(ident("__debug_expr", span))], + kind: PathKind::Plain, + span, + }), span, }), span, @@ -571,14 +569,11 @@ fn build_assign_var_stmt(var_id: SourceVarId, expr: ast::Expression) -> ast::Sta let span = expr.span; let kind = ast::ExpressionKind::Call(Box::new(ast::CallExpression { func: Box::new(ast::Expression { - kind: ast::ExpressionKind::Variable( - ast::Path { - segments: vec![ident("__debug_var_assign", span)], - kind: PathKind::Plain, - span, - }, - None, - ), + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![PathSegment::from(ident("__debug_var_assign", span))], + kind: PathKind::Plain, + span, + }), span, }), is_macro_call: false, @@ -590,14 +585,11 @@ fn build_assign_var_stmt(var_id: SourceVarId, expr: ast::Expression) -> ast::Sta fn build_drop_var_stmt(var_id: SourceVarId, span: Span) -> ast::Statement { let kind = ast::ExpressionKind::Call(Box::new(ast::CallExpression { func: Box::new(ast::Expression { - kind: ast::ExpressionKind::Variable( - ast::Path { - segments: vec![ident("__debug_var_drop", span)], - kind: PathKind::Plain, - span, - }, - None, - ), + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![PathSegment::from(ident("__debug_var_drop", span))], + kind: PathKind::Plain, + span, + }), span, }), is_macro_call: false, @@ -618,14 +610,14 @@ fn build_assign_member_stmt( let span = expr.span; let kind = ast::ExpressionKind::Call(Box::new(ast::CallExpression { func: Box::new(ast::Expression { - kind: ast::ExpressionKind::Variable( - ast::Path { - segments: vec![ident(&format!["__debug_member_assign_{arity}"], span)], - kind: PathKind::Plain, + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![PathSegment::from(ident( + &format!["__debug_member_assign_{arity}"], span, - }, - None, - ), + ))], + kind: PathKind::Plain, + span, + }), span, }), is_macro_call: false, @@ -642,14 +634,11 @@ fn build_assign_member_stmt( fn build_debug_call_stmt(fname: &str, fn_id: DebugFnId, span: Span) -> ast::Statement { let kind = ast::ExpressionKind::Call(Box::new(ast::CallExpression { func: Box::new(ast::Expression { - kind: ast::ExpressionKind::Variable( - ast::Path { - segments: vec![ident(&format!["__debug_fn_{fname}"], span)], - kind: PathKind::Plain, - span, - }, - None, - ), + kind: ast::ExpressionKind::Variable(ast::Path { + segments: vec![PathSegment::from(ident(&format!["__debug_fn_{fname}"], span))], + kind: PathKind::Plain, + span, + }), span, }), is_macro_call: false, @@ -712,10 +701,11 @@ fn ident(s: &str, span: Span) -> ast::Ident { fn id_expr(id: &ast::Ident) -> ast::Expression { ast::Expression { - kind: ast::ExpressionKind::Variable( - Path { segments: vec![id.clone()], kind: PathKind::Plain, span: id.span() }, - None, - ), + kind: ast::ExpressionKind::Variable(Path { + segments: vec![PathSegment::from(id.clone())], + kind: PathKind::Plain, + span: id.span(), + }), span: id.span(), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs index 0cbd2db55da..afa2e7fa7a8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -1,9 +1,30 @@ -use std::mem::replace; +use std::{collections::BTreeMap, fmt::Display}; + +use chumsky::Parser; +use fm::FileId; +use iter_extended::vecmap; +use noirc_errors::{Location, Span}; use crate::{ + hir::{ + comptime::{Interpreter, InterpreterError, Value}, + def_collector::{ + dc_crate::{ + CollectedItems, CompilationError, UnresolvedFunctions, UnresolvedStruct, + UnresolvedTrait, UnresolvedTraitImpl, + }, + dc_mod, + }, + resolution::errors::ResolverError, + }, hir_def::expr::HirIdent, - macros_api::Expression, - node_interner::{DependencyId, ExprId, FuncId}, + lexer::Lexer, + macros_api::{ + Expression, ExpressionKind, HirExpression, NodeInterner, SecondaryAttribute, StructId, + }, + node_interner::{DefinitionKind, DependencyId, FuncId, TraitId}, + parser::{self, TopLevelStatement}, + Type, TypeBindings, }; use super::{Elaborator, FunctionContext, ResolverMeta}; @@ -12,50 +33,47 @@ impl<'context> Elaborator<'context> { /// Elaborate an expression from the middle of a comptime scope. /// When this happens we require additional information to know /// what variables should be in scope. - pub fn elaborate_expression_from_comptime( - &mut self, - expr: Expression, - function: Option, - ) -> ExprId { - self.function_context.push(FunctionContext::default()); - let old_scope = self.scopes.end_function(); - self.scopes.start_function(); - let function_id = function.map(DependencyId::Function); - let old_item = replace(&mut self.current_item, function_id); - - // Note: recover_generics isn't good enough here because any existing generics - // should not be in scope of this new function - let old_generics = std::mem::take(&mut self.generics); - - let old_crate_and_module = function.map(|function| { - let meta = self.interner.function_meta(&function); - let old_crate = replace(&mut self.crate_id, meta.source_crate); - let old_module = replace(&mut self.local_module, meta.source_module); - self.introduce_generics_into_scope(meta.all_generics.clone()); - (old_crate, old_module) - }); - - self.populate_scope_from_comptime_scopes(); - let expr = self.elaborate_expression(expr).0; - - if let Some((old_crate, old_module)) = old_crate_and_module { - self.crate_id = old_crate; - self.local_module = old_module; + pub fn elaborate_item_from_comptime<'a, T>( + &'a mut self, + current_function: Option, + f: impl FnOnce(&mut Elaborator<'a>) -> T, + ) -> T { + // Create a fresh elaborator to ensure no state is changed from + // this elaborator + let mut elaborator = Elaborator::new( + self.interner, + self.def_maps, + self.crate_id, + self.debug_comptime_in_file, + self.enable_arithmetic_generics, + ); + + elaborator.function_context.push(FunctionContext::default()); + elaborator.scopes.start_function(); + + if let Some(function) = current_function { + let meta = elaborator.interner.function_meta(&function); + elaborator.current_item = Some(DependencyId::Function(function)); + elaborator.crate_id = meta.source_crate; + elaborator.local_module = meta.source_module; + elaborator.file = meta.source_file; + elaborator.introduce_generics_into_scope(meta.all_generics.clone()); } - self.generics = old_generics; - self.current_item = old_item; - self.scopes.end_function(); - self.scopes.0.push(old_scope); - self.check_and_pop_function_context(); - expr + elaborator.populate_scope_from_comptime_scopes(); + + let result = f(&mut elaborator); + elaborator.check_and_pop_function_context(); + + self.errors.append(&mut elaborator.errors); + result } fn populate_scope_from_comptime_scopes(&mut self) { // Take the comptime scope to be our runtime scope. // Iterate from global scope to the most local scope so that the // later definitions will naturally shadow the former. - for scope in &self.comptime_scopes { + for scope in &self.interner.comptime_scopes { for definition_id in scope.keys() { let definition = self.interner.definition(*definition_id); let name = definition.name.clone(); @@ -68,4 +86,326 @@ impl<'context> Elaborator<'context> { } } } + + pub(super) fn run_comptime_attributes_on_item( + &mut self, + attributes: &[SecondaryAttribute], + item: Value, + span: Span, + generated_items: &mut CollectedItems, + ) { + for attribute in attributes { + if let SecondaryAttribute::Custom(name) = attribute { + if let Err(error) = + self.run_comptime_attribute_on_item(name, item.clone(), span, generated_items) + { + self.errors.push(error); + } + } + } + } + + fn run_comptime_attribute_on_item( + &mut self, + attribute: &str, + item: Value, + span: Span, + generated_items: &mut CollectedItems, + ) -> Result<(), (CompilationError, FileId)> { + let location = Location::new(span, self.file); + let Some((function, arguments)) = Self::parse_attribute(attribute, self.file)? else { + // Do not issue an error if the attribute is unknown + return Ok(()); + }; + + // Elaborate the function, rolling back any errors generated in case it is unknown + let error_count = self.errors.len(); + let function = self.elaborate_expression(function).0; + self.errors.truncate(error_count); + + let definition_id = match self.interner.expression(&function) { + HirExpression::Ident(ident, _) => ident.id, + _ => return Ok(()), + }; + + let Some(definition) = self.interner.try_definition(definition_id) else { + // If there's no such function, don't return an error. + // This preserves backwards compatibility in allowing custom attributes that + // do not refer to comptime functions. + return Ok(()); + }; + + let DefinitionKind::Function(function) = definition.kind else { + return Err((ResolverError::NonFunctionInAnnotation { span }.into(), self.file)); + }; + + let mut interpreter = self.setup_interpreter(); + let mut arguments = + Self::handle_attribute_arguments(&mut interpreter, function, arguments, location) + .map_err(|error| { + let file = error.get_location().file; + (error.into(), file) + })?; + + arguments.insert(0, (item, location)); + + let value = interpreter + .call_function(function, arguments, TypeBindings::new(), location) + .map_err(|error| error.into_compilation_error_pair())?; + + if value != Value::Unit { + let items = value + .into_top_level_items(location, self.interner) + .map_err(|error| error.into_compilation_error_pair())?; + + self.add_items(items, generated_items, location); + } + + Ok(()) + } + + /// Parses an attribute in the form of a function call (e.g. `#[foo(a b, c d)]`) into + /// the function and quoted arguments called (e.g. `("foo", vec![(a b, location), (c d, location)])`) + #[allow(clippy::type_complexity)] + fn parse_attribute( + annotation: &str, + file: FileId, + ) -> Result)>, (CompilationError, FileId)> { + let (tokens, mut lexing_errors) = Lexer::lex(annotation); + if !lexing_errors.is_empty() { + return Err((lexing_errors.swap_remove(0).into(), file)); + } + + let expression = parser::expression() + .parse(tokens) + .map_err(|mut errors| (errors.swap_remove(0).into(), file))?; + + Ok(match expression.kind { + ExpressionKind::Call(call) => Some((*call.func, call.arguments)), + ExpressionKind::Variable(_) => Some((expression, Vec::new())), + _ => None, + }) + } + + fn handle_attribute_arguments( + interpreter: &mut Interpreter, + function: FuncId, + arguments: Vec, + location: Location, + ) -> Result, InterpreterError> { + let meta = interpreter.elaborator.interner.function_meta(&function); + let mut parameters = vecmap(&meta.parameters.0, |(_, typ, _)| typ.clone()); + + // Remove the initial parameter for the comptime item since that is not included + // in `arguments` at this point. + parameters.remove(0); + + // If the function is varargs, push the type of the last slice element N times + // to account for N extra arguments. + let modifiers = interpreter.elaborator.interner.function_modifiers(&function); + let is_varargs = modifiers.attributes.is_varargs(); + let varargs_type = if is_varargs { parameters.pop() } else { None }; + + let varargs_elem_type = varargs_type.as_ref().and_then(|t| t.slice_element_type()); + + let mut new_arguments = Vec::with_capacity(arguments.len()); + let mut varargs = im::Vector::new(); + + for (i, arg) in arguments.into_iter().enumerate() { + let param_type = parameters.get(i).or(varargs_elem_type).unwrap_or(&Type::Error); + + let mut push_arg = |arg| { + if i >= parameters.len() { + varargs.push_back(arg); + } else { + new_arguments.push((arg, location)); + } + }; + + if *param_type == Type::Quoted(crate::QuotedType::TraitDefinition) { + let trait_id = match arg.kind { + ExpressionKind::Variable(path) => interpreter + .elaborator + .resolve_trait_by_path(path) + .ok_or(InterpreterError::FailedToResolveTraitDefinition { location }), + _ => Err(InterpreterError::TraitDefinitionMustBeAPath { location }), + }?; + push_arg(Value::TraitDefinition(trait_id)); + } else { + let expr_id = interpreter.elaborator.elaborate_expression(arg).0; + push_arg(interpreter.evaluate(expr_id)?); + } + } + + if is_varargs { + let typ = varargs_type.unwrap_or(Type::Error); + new_arguments.push((Value::Slice(varargs, typ), location)); + } + + Ok(new_arguments) + } + + fn add_items( + &mut self, + items: Vec, + generated_items: &mut CollectedItems, + location: Location, + ) { + for item in items { + self.add_item(item, generated_items, location); + } + } + + fn add_item( + &mut self, + item: TopLevelStatement, + generated_items: &mut CollectedItems, + location: Location, + ) { + match item { + TopLevelStatement::Function(function) => { + let id = self.interner.push_empty_fn(); + let module = self.module_id(); + self.interner.push_function(id, &function.def, module, location); + let functions = vec![(self.local_module, id, function)]; + generated_items.functions.push(UnresolvedFunctions { + file_id: self.file, + functions, + trait_id: None, + self_type: None, + }); + } + TopLevelStatement::TraitImpl(mut trait_impl) => { + let methods = dc_mod::collect_trait_impl_functions( + self.interner, + &mut trait_impl, + self.crate_id, + self.file, + self.local_module, + ); + + generated_items.trait_impls.push(UnresolvedTraitImpl { + file_id: self.file, + module_id: self.local_module, + trait_generics: trait_impl.trait_generics, + trait_path: trait_impl.trait_name, + object_type: trait_impl.object_type, + methods, + generics: trait_impl.impl_generics, + where_clause: trait_impl.where_clause, + + // These last fields are filled in later + trait_id: None, + impl_id: None, + resolved_object_type: None, + resolved_generics: Vec::new(), + resolved_trait_generics: Vec::new(), + }); + } + TopLevelStatement::Global(global) => { + let (global, error) = dc_mod::collect_global( + self.interner, + self.def_maps.get_mut(&self.crate_id).unwrap(), + global, + self.file, + self.local_module, + self.crate_id, + ); + + generated_items.globals.push(global); + if let Some(error) = error { + self.errors.push(error); + } + } + // Assume that an error has already been issued + TopLevelStatement::Error => (), + + TopLevelStatement::Module(_) + | TopLevelStatement::Import(_) + | TopLevelStatement::Struct(_) + | TopLevelStatement::Trait(_) + | TopLevelStatement::Impl(_) + | TopLevelStatement::TypeAlias(_) + | TopLevelStatement::SubModule(_) => { + let item = item.to_string(); + let error = InterpreterError::UnsupportedTopLevelItemUnquote { item, location }; + self.errors.push(error.into_compilation_error_pair()); + } + } + } + + pub fn setup_interpreter<'local>(&'local mut self) -> Interpreter<'local, 'context> { + let current_function = match self.current_item { + Some(DependencyId::Function(function)) => Some(function), + _ => None, + }; + Interpreter::new(self, self.crate_id, current_function) + } + + pub(super) fn debug_comptime T>( + &mut self, + location: Location, + mut expr_f: F, + ) { + if Some(location.file) == self.debug_comptime_in_file { + let displayed_expr = expr_f(self.interner); + self.errors.push(( + InterpreterError::debug_evaluate_comptime(displayed_expr, location).into(), + location.file, + )); + } + } + + /// Run all the attributes on each item. The ordering is unspecified to users but currently + /// we run trait attributes first to (e.g.) register derive handlers before derive is + /// called on structs. + /// Returns any new items generated by attributes. + pub(super) fn run_attributes( + &mut self, + traits: &BTreeMap, + types: &BTreeMap, + functions: &[UnresolvedFunctions], + ) -> CollectedItems { + let mut generated_items = CollectedItems::default(); + + for (trait_id, trait_) in traits { + let attributes = &trait_.trait_def.attributes; + let item = Value::TraitDefinition(*trait_id); + let span = trait_.trait_def.span; + self.local_module = trait_.module_id; + self.file = trait_.file_id; + self.run_comptime_attributes_on_item(attributes, item, span, &mut generated_items); + } + + for (struct_id, struct_def) in types { + let attributes = &struct_def.struct_def.attributes; + let item = Value::StructDefinition(*struct_id); + let span = struct_def.struct_def.span; + self.local_module = struct_def.module_id; + self.file = struct_def.file_id; + self.run_comptime_attributes_on_item(attributes, item, span, &mut generated_items); + } + + self.run_attributes_on_functions(functions, &mut generated_items); + generated_items + } + + fn run_attributes_on_functions( + &mut self, + function_sets: &[UnresolvedFunctions], + generated_items: &mut CollectedItems, + ) { + for function_set in function_sets { + self.file = function_set.file_id; + self.self_type = function_set.self_type.clone(); + + for (local_module, function_id, function) in &function_set.functions { + self.local_module = *local_module; + let attributes = function.secondary_attributes(); + let item = Value::FunctionDefinition(*function_id); + let span = function.span(); + self.run_comptime_attributes_on_item(attributes, item, span, generated_items); + } + } + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs index 853098ce931..6e2756f0301 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -35,11 +35,11 @@ use crate::{ use super::{Elaborator, LambdaContext}; impl<'context> Elaborator<'context> { - pub(super) fn elaborate_expression(&mut self, expr: Expression) -> (ExprId, Type) { + pub(crate) fn elaborate_expression(&mut self, expr: Expression) -> (ExprId, Type) { let (hir_expr, typ) = match expr.kind { ExpressionKind::Literal(literal) => self.elaborate_literal(literal, expr.span), ExpressionKind::Block(block) => self.elaborate_block(block), - ExpressionKind::Prefix(prefix) => return self.elaborate_prefix(*prefix), + ExpressionKind::Prefix(prefix) => return self.elaborate_prefix(*prefix, expr.span), ExpressionKind::Index(index) => self.elaborate_index(*index), ExpressionKind::Call(call) => self.elaborate_call(*call, expr.span), ExpressionKind::MethodCall(call) => self.elaborate_method_call(*call, expr.span), @@ -50,9 +50,7 @@ impl<'context> Elaborator<'context> { ExpressionKind::Cast(cast) => self.elaborate_cast(*cast, expr.span), ExpressionKind::Infix(infix) => return self.elaborate_infix(*infix, expr.span), ExpressionKind::If(if_) => self.elaborate_if(*if_), - ExpressionKind::Variable(variable, generics) => { - return self.elaborate_variable(variable, generics) - } + ExpressionKind::Variable(variable) => return self.elaborate_variable(variable), ExpressionKind::Tuple(tuple) => self.elaborate_tuple(tuple), ExpressionKind::Lambda(lambda) => self.elaborate_lambda(*lambda), ExpressionKind::Parenthesized(expr) => return self.elaborate_expression(*expr), @@ -227,8 +225,7 @@ impl<'context> Elaborator<'context> { (HirExpression::Literal(HirLiteral::FmtStr(str, fmt_str_idents)), typ) } - fn elaborate_prefix(&mut self, prefix: PrefixExpression) -> (ExprId, Type) { - let span = prefix.rhs.span; + fn elaborate_prefix(&mut self, prefix: PrefixExpression, span: Span) -> (ExprId, Type) { let (rhs, rhs_type) = self.elaborate_expression(prefix.rhs); let trait_id = self.interner.get_prefix_operator_trait_method(&prefix.operator); @@ -352,7 +349,7 @@ impl<'context> Elaborator<'context> { &mut object, ); - self.resolve_turbofish_generics(&func_id, method_call.generics, span) + self.resolve_function_turbofish_generics(&func_id, method_call.generics, span) } else { None }; @@ -410,9 +407,12 @@ impl<'context> Elaborator<'context> { &mut self, constructor: ConstructorExpression, ) -> (HirExpression, Type) { + let exclude_last_segment = true; + self.check_unsupported_turbofish_usage(&constructor.type_name, exclude_last_segment); + let span = constructor.type_name.span(); let last_segment = constructor.type_name.last_segment(); - let is_self_type = last_segment.is_self_type_name(); + let is_self_type = last_segment.ident.is_self_type_name(); let (r#type, struct_generics) = if let Some(struct_id) = constructor.struct_type { let typ = self.interner.get_struct(struct_id); @@ -429,6 +429,15 @@ impl<'context> Elaborator<'context> { } }; + let turbofish_span = last_segment.turbofish_span(); + + let struct_generics = self.resolve_struct_turbofish_generics( + &r#type.borrow(), + struct_generics, + last_segment.generics, + turbofish_span, + ); + let struct_type = r#type.clone(); let generics = struct_generics.clone(); @@ -443,7 +452,7 @@ impl<'context> Elaborator<'context> { }); let struct_id = struct_type.borrow().id; - let reference_location = Location::new(last_segment.span(), self.file); + let reference_location = Location::new(last_segment.ident.span(), self.file); self.interner.add_struct_reference(struct_id, reference_location, is_self_type); (expr, Type::Struct(struct_type, generics)) @@ -544,7 +553,7 @@ impl<'context> Elaborator<'context> { fn elaborate_cast(&mut self, cast: CastExpression, span: Span) -> (HirExpression, Type) { let (lhs, lhs_type) = self.elaborate_expression(cast.lhs); let r#type = self.resolve_type(cast.r#type); - let result = self.check_cast(lhs_type, &r#type, span); + let result = self.check_cast(&lhs_type, &r#type, span); let expr = HirExpression::Cast(HirCastExpression { lhs, r#type }); (expr, result) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index e0affad1fbf..873da5a0c5e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -1,20 +1,16 @@ use std::{ collections::{BTreeMap, BTreeSet}, - fmt::Display, rc::Rc, }; use crate::{ ast::{FunctionKind, UnresolvedTraitConstraint}, hir::{ - comptime::{Interpreter, InterpreterError, Value}, - def_collector::{ - dc_crate::{ - filter_literal_globals, CompilationError, ImplMap, UnresolvedGlobal, - UnresolvedStruct, UnresolvedTypeAlias, - }, - dc_mod, + def_collector::dc_crate::{ + filter_literal_globals, CompilationError, ImplMap, UnresolvedGlobal, UnresolvedStruct, + UnresolvedTypeAlias, }, + def_map::DefMaps, resolution::{errors::ResolverError, path_resolver::PathResolver}, scope::ScopeForest as GenericScopeForest, type_check::TypeCheckError, @@ -25,18 +21,14 @@ use crate::{ traits::TraitConstraint, types::{Generics, Kind, ResolvedGeneric}, }, - lexer::Lexer, macros_api::{ BlockExpression, Ident, NodeInterner, NoirFunction, NoirStruct, Pattern, SecondaryAttribute, StructId, }, node_interner::{ - DefinitionId, DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, ReferenceId, TraitId, - TypeAliasId, + DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, ReferenceId, TraitId, TypeAliasId, }, - parser::TopLevelStatement, - token::Tokens, - Shared, Type, TypeBindings, TypeVariable, + Shared, Type, TypeVariable, }; use crate::{ ast::{TraitBound, UnresolvedGeneric, UnresolvedGenerics}, @@ -54,7 +46,7 @@ use crate::{ use crate::{ hir::{ def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}, - def_map::{CrateDefMap, ModuleData}, + def_map::ModuleData, }, hir_def::traits::TraitImpl, macros_api::ItemVisibility, @@ -74,7 +66,6 @@ mod unquote; use fm::FileId; use iter_extended::vecmap; use noirc_errors::{Location, Span}; -use rustc_hash::FxHashMap as HashMap; use self::traits::check_trait_impl_method_matches_declaration; @@ -102,7 +93,7 @@ pub struct Elaborator<'context> { pub(crate) interner: &'context mut NodeInterner, - def_maps: &'context mut BTreeMap, + def_maps: &'context mut DefMaps, file: FileId, @@ -130,8 +121,6 @@ pub struct Elaborator<'context> { /// to the corresponding trait impl ID. current_trait_impl: Option, - trait_id: Option, - /// In-resolution names /// /// This needs to be a set because we can have multiple in-resolution @@ -165,11 +154,6 @@ pub struct Elaborator<'context> { crate_id: CrateId, - /// Each value currently in scope in the comptime interpreter. - /// Each element of the Vec represents a scope with every scope together making - /// up all currently visible definitions. The first scope is always the global scope. - pub(crate) comptime_scopes: Vec>, - /// The scope of --debug-comptime, or None if unset debug_comptime_in_file: Option, @@ -177,6 +161,9 @@ pub struct Elaborator<'context> { /// This map is used to lazily evaluate these globals if they're encountered before /// they are elaborated (e.g. in a function's type or another global's RHS). unresolved_globals: BTreeMap, + + /// Temporary flag to enable the experimental arithmetic generics feature + enable_arithmetic_generics: bool, } #[derive(Default)] @@ -195,41 +182,65 @@ struct FunctionContext { impl<'context> Elaborator<'context> { pub fn new( - context: &'context mut Context, + interner: &'context mut NodeInterner, + def_maps: &'context mut DefMaps, crate_id: CrateId, debug_comptime_in_file: Option, + enable_arithmetic_generics: bool, ) -> Self { Self { scopes: ScopeForest::default(), errors: Vec::new(), - interner: &mut context.def_interner, - def_maps: &mut context.def_maps, + interner, + def_maps, file: FileId::dummy(), nested_loops: 0, generics: Vec::new(), lambda_stack: Vec::new(), self_type: None, current_item: None, - trait_id: None, local_module: LocalModuleId::dummy_id(), crate_id, resolving_ids: BTreeSet::new(), trait_bounds: Vec::new(), function_context: vec![FunctionContext::default()], current_trait_impl: None, - comptime_scopes: vec![HashMap::default()], debug_comptime_in_file, unresolved_globals: BTreeMap::new(), + enable_arithmetic_generics, } } + pub fn from_context( + context: &'context mut Context, + crate_id: CrateId, + debug_comptime_in_file: Option, + enable_arithmetic_generics: bool, + ) -> Self { + Self::new( + &mut context.def_interner, + &mut context.def_maps, + crate_id, + debug_comptime_in_file, + enable_arithmetic_generics, + ) + } + pub fn elaborate( context: &'context mut Context, crate_id: CrateId, items: CollectedItems, debug_comptime_in_file: Option, + enable_arithmetic_generics: bool, ) -> Vec<(CompilationError, FileId)> { - Self::elaborate_and_return_self(context, crate_id, items, debug_comptime_in_file).errors + Self::elaborate_and_return_self( + context, + crate_id, + items, + debug_comptime_in_file, + enable_arithmetic_generics, + ) + .errors } pub fn elaborate_and_return_self( @@ -237,17 +248,16 @@ impl<'context> Elaborator<'context> { crate_id: CrateId, items: CollectedItems, debug_comptime_in_file: Option, + enable_arithmetic_generics: bool, ) -> Self { - let mut this = Self::new(context, crate_id, debug_comptime_in_file); - - // Filter out comptime items to execute their functions first if needed. - // This step is why comptime items can only refer to other comptime items - // in the same crate, but can refer to any item in dependencies. Trying to - // run these at the same time as other items would lead to them seeing empty - // function bodies from functions that have yet to be elaborated. - let (comptime_items, runtime_items) = Self::filter_comptime_items(items); - this.elaborate_items(comptime_items); - this.elaborate_items(runtime_items); + let mut this = Self::from_context( + context, + crate_id, + debug_comptime_in_file, + enable_arithmetic_generics, + ); + this.elaborate_items(items); + this.check_and_pop_function_context(); this } @@ -271,11 +281,11 @@ impl<'context> Elaborator<'context> { } // Must resolve structs before we resolve globals. - let mut generated_items = self.collect_struct_definitions(items.types); + self.collect_struct_definitions(&items.types); self.define_function_metas(&mut items.functions, &mut items.impls, &mut items.trait_impls); - self.collect_traits(items.traits, &mut generated_items); + self.collect_traits(&items.traits); // Before we resolve any function symbols we must go through our impls and // re-collect the methods within into their proper module. This cannot be @@ -299,7 +309,7 @@ impl<'context> Elaborator<'context> { // We have to run any comptime attributes on functions before the function is elaborated // since the generated items are checked beforehand as well. - self.run_attributes_on_functions(&items.functions, &mut generated_items); + let generated_items = self.run_attributes(&items.traits, &items.types, &items.functions); // After everything is collected, we can elaborate our generated items. // It may be better to inline these within `items` entirely since elaborating them @@ -336,17 +346,12 @@ impl<'context> Elaborator<'context> { } fn elaborate_functions(&mut self, functions: UnresolvedFunctions) { - self.file = functions.file_id; - self.trait_id = functions.trait_id; // TODO: Resolve? - self.self_type = functions.self_type; - - for (local_module, id, _) in functions.functions { - self.local_module = local_module; - self.recover_generics(|this| this.elaborate_function(id)); + for (_, id, _) in functions.functions { + self.elaborate_function(id); } + self.generics.clear(); self.self_type = None; - self.trait_id = None; } fn introduce_generics_into_scope(&mut self, all_generics: Vec) { @@ -364,7 +369,7 @@ impl<'context> Elaborator<'context> { self.generics = all_generics; } - fn elaborate_function(&mut self, id: FuncId) { + pub(crate) fn elaborate_function(&mut self, id: FuncId) { let func_meta = self.interner.func_meta.get_mut(&id); let func_meta = func_meta.expect("FuncMetas should be declared before a function is elaborated"); @@ -377,11 +382,21 @@ impl<'context> Elaborator<'context> { FunctionBody::Resolving => return, }; + let func_meta = func_meta.clone(); + + assert_eq!( + self.crate_id, func_meta.source_crate, + "Functions in other crates should be already elaborated" + ); + + self.local_module = func_meta.source_module; + self.file = func_meta.source_file; + self.self_type = func_meta.self_type.clone(); + self.current_trait_impl = func_meta.trait_impl; + self.scopes.start_function(); let old_item = std::mem::replace(&mut self.current_item, Some(DependencyId::Function(id))); - let func_meta = func_meta.clone(); - self.trait_bounds = func_meta.trait_constraints.clone(); self.function_context.push(FunctionContext::default()); @@ -508,35 +523,72 @@ impl<'context> Elaborator<'context> { /// Each generic will have a fresh Shared associated with it. pub fn add_generics(&mut self, generics: &UnresolvedGenerics) -> Generics { vecmap(generics, |generic| { - // Map the generic to a fresh type variable - let id = self.interner.next_type_variable_id(); - let typevar = TypeVariable::unbound(id); - let ident = generic.ident(); - let span = ident.0.span(); + let mut is_error = false; + let (type_var, name, kind) = match self.resolve_generic(generic) { + Ok(values) => values, + Err(error) => { + self.push_err(error); + is_error = true; + let id = self.interner.next_type_variable_id(); + (TypeVariable::unbound(id), Rc::new("(error)".into()), Kind::Normal) + } + }; - // Resolve the generic's kind - let kind = self.resolve_generic_kind(generic); + let span = generic.span(); + let name_owned = name.as_ref().clone(); + let resolved_generic = ResolvedGeneric { name, type_var, kind, span }; // Check for name collisions of this generic - let name = Rc::new(ident.0.contents.clone()); - - let resolved_generic = - ResolvedGeneric { name: name.clone(), type_var: typevar.clone(), kind, span }; - - if let Some(generic) = self.find_generic(&name) { - self.push_err(ResolverError::DuplicateDefinition { - name: ident.0.contents.clone(), - first_span: generic.span, - second_span: span, - }); - } else { - self.generics.push(resolved_generic.clone()); + // Checking `is_error` here prevents DuplicateDefinition errors when + // we have multiple generics from macros which fail to resolve and + // are all given the same default name "(error)". + if !is_error { + if let Some(generic) = self.find_generic(&name_owned) { + self.push_err(ResolverError::DuplicateDefinition { + name: name_owned, + first_span: generic.span, + second_span: span, + }); + } else { + self.generics.push(resolved_generic.clone()); + } } resolved_generic }) } + fn resolve_generic( + &mut self, + generic: &UnresolvedGeneric, + ) -> Result<(TypeVariable, Rc, Kind), ResolverError> { + // Map the generic to a fresh type variable + match generic { + UnresolvedGeneric::Variable(_) | UnresolvedGeneric::Numeric { .. } => { + let id = self.interner.next_type_variable_id(); + let typevar = TypeVariable::unbound(id); + let ident = generic.ident(); + + let kind = self.resolve_generic_kind(generic); + let name = Rc::new(ident.0.contents.clone()); + Ok((typevar, name, kind)) + } + // An already-resolved generic is only possible if it is the result of a + // previous macro call being inserted into a generics list. + UnresolvedGeneric::Resolved(id, span) => { + match self.interner.get_quoted_type(*id).follow_bindings() { + Type::NamedGeneric(type_variable, name, kind) => { + Ok((type_variable, name, kind)) + } + other => Err(ResolverError::MacroResultInGenericsListNotAGeneric { + span: *span, + typ: other.clone(), + }), + } + } + } + } + /// Return the kind of an unresolved generic. /// If a numeric generic has been specified, resolve the annotated type to make /// sure only primitive numeric types are being used. @@ -608,7 +660,11 @@ impl<'context> Elaborator<'context> { self.resolve_trait_bound(&constraint.trait_bound, typ) } - fn resolve_trait_bound(&mut self, bound: &TraitBound, typ: Type) -> Option { + pub fn resolve_trait_bound( + &mut self, + bound: &TraitBound, + typ: Type, + ) -> Option { let the_trait = self.lookup_trait_or_error(bound.trait_path.clone())?; let resolved_generics = &the_trait.generics.clone(); @@ -774,6 +830,8 @@ impl<'context> Elaborator<'context> { source_crate: self.crate_id, source_module: self.local_module, function_body: FunctionBody::Unresolved(func.kind, body, func.def.span), + self_type: self.self_type.clone(), + source_file: self.file, }; self.interner.push_fn_meta(meta, func_id); @@ -953,7 +1011,16 @@ impl<'context> Elaborator<'context> { self.collect_trait_impl_methods(trait_id, trait_impl, &where_clause); - let span = trait_impl.object_type.span.expect("All trait self types should have spans"); + let span = trait_impl.object_type.span; + + let span = if let Some(span) = span { + span + } else if self.interner.is_in_lsp_mode() { + // A span might not be set if this was generated by a macro + Default::default() + } else { + span.expect("All trait self types should have spans") + }; self.declare_methods_on_struct(true, &mut trait_impl.methods, span); let methods = trait_impl.methods.function_ids(); @@ -964,7 +1031,7 @@ impl<'context> Elaborator<'context> { let trait_generics = trait_impl.resolved_trait_generics.clone(); let resolved_trait_impl = Shared::new(TraitImpl { - ident: trait_impl.trait_path.last_segment().clone(), + ident: trait_impl.trait_path.last_ident(), typ: self_type.clone(), trait_id, trait_generics: trait_generics.clone(), @@ -1002,10 +1069,7 @@ impl<'context> Elaborator<'context> { self.self_type = None; } - fn get_module_mut( - def_maps: &mut BTreeMap, - module: ModuleId, - ) -> &mut ModuleData { + fn get_module_mut(def_maps: &mut DefMaps, module: ModuleId) -> &mut ModuleData { let message = "A crate should always be present for a given crate id"; &mut def_maps.get_mut(&module.krate).expect(message).modules[module.local_id.0] } @@ -1102,30 +1166,20 @@ impl<'context> Elaborator<'context> { self.generics.clear(); } - fn collect_struct_definitions( - &mut self, - structs: BTreeMap, - ) -> CollectedItems { + fn collect_struct_definitions(&mut self, structs: &BTreeMap) { // This is necessary to avoid cloning the entire struct map // when adding checks after each struct field is resolved. let struct_ids = structs.keys().copied().collect::>(); - // This will contain any additional top-level items that are generated at compile-time - // via macros. This often includes derived trait impls. - let mut generated_items = CollectedItems::default(); - // Resolve each field in each struct. // Each struct should already be present in the NodeInterner after def collection. - for (type_id, mut typ) in structs { + for (type_id, typ) in structs { self.file = typ.file_id; self.local_module = typ.module_id; - let attributes = std::mem::take(&mut typ.struct_def.attributes); - let span = typ.struct_def.span; - - let fields = self.resolve_struct_fields(typ.struct_def, type_id); + let fields = self.resolve_struct_fields(&typ.struct_def, *type_id); let fields_len = fields.len(); - self.interner.update_struct(type_id, |struct_def| { + self.interner.update_struct(*type_id, |struct_def| { struct_def.set_fields(fields); // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this with implicit numeric generics @@ -1152,12 +1206,11 @@ impl<'context> Elaborator<'context> { }); for field_index in 0..fields_len { - self.interner - .add_definition_location(ReferenceId::StructMember(type_id, field_index), None); + self.interner.add_definition_location( + ReferenceId::StructMember(*type_id, field_index), + None, + ); } - - let item = Value::StructDefinition(type_id); - self.run_comptime_attributes_on_item(&attributes, item, span, &mut generated_items); } // Check whether the struct fields have nested slices @@ -1179,125 +1232,11 @@ impl<'context> Elaborator<'context> { } } } - - generated_items - } - - fn run_comptime_attributes_on_item( - &mut self, - attributes: &[SecondaryAttribute], - item: Value, - span: Span, - generated_items: &mut CollectedItems, - ) { - for attribute in attributes { - if let SecondaryAttribute::Custom(name) = attribute { - if let Err(error) = - self.run_comptime_attribute_on_item(name, item.clone(), span, generated_items) - { - self.errors.push(error); - } - } - } - } - - fn run_comptime_attribute_on_item( - &mut self, - attribute: &str, - item: Value, - span: Span, - generated_items: &mut CollectedItems, - ) -> Result<(), (CompilationError, FileId)> { - let location = Location::new(span, self.file); - let (function_name, mut arguments) = Self::parse_attribute(attribute, location) - .unwrap_or_else(|| (attribute.to_string(), Vec::new())); - - let Ok(id) = self.lookup_global(Path::from_single(function_name, span)) else { - // Do not issue an error if the attribute is unknown - return Ok(()); - }; - - let definition = self.interner.definition(id); - let DefinitionKind::Function(function) = definition.kind else { - return Err((ResolverError::NonFunctionInAnnotation { span }.into(), self.file)); - }; - - self.handle_varargs_attribute(function, &mut arguments, location); - arguments.insert(0, (item, location)); - - let mut interpreter = self.setup_interpreter(); - - let value = interpreter - .call_function(function, arguments, TypeBindings::new(), location) - .map_err(|error| error.into_compilation_error_pair())?; - - if value != Value::Unit { - let items = value - .into_top_level_items(location) - .map_err(|error| error.into_compilation_error_pair())?; - - self.add_items(items, generated_items, location); - } - - Ok(()) - } - - /// Parses an attribute in the form of a function call (e.g. `#[foo(a b, c d)]`) into - /// the function and quoted arguments called (e.g. `("foo", vec![(a b, location), (c d, location)])`) - fn parse_attribute( - annotation: &str, - location: Location, - ) -> Option<(String, Vec<(Value, Location)>)> { - let (tokens, errors) = Lexer::lex(annotation); - if !errors.is_empty() { - return None; - } - - let mut tokens = tokens.0; - if tokens.len() >= 4 { - // Remove the outer `ident ( )` wrapping the function arguments - let first = tokens.remove(0).into_token(); - let second = tokens.remove(0).into_token(); - - // Last token is always an EndOfInput - let _ = tokens.pop().unwrap().into_token(); - let last = tokens.pop().unwrap().into_token(); - - use crate::lexer::token::Token::*; - if let (Ident(name), LeftParen, RightParen) = (first, second, last) { - let args = tokens.split(|token| *token.token() == Comma); - let args = - vecmap(args, |arg| (Value::Code(Rc::new(Tokens(arg.to_vec()))), location)); - return Some((name, args)); - } - } - - None - } - - /// Checks if the given attribute function is a varargs function. - /// If so, we should pass its arguments in one slice rather than as separate arguments. - fn handle_varargs_attribute( - &mut self, - function: FuncId, - arguments: &mut Vec<(Value, Location)>, - location: Location, - ) { - let meta = self.interner.function_meta(&function); - let parameters = &meta.parameters.0; - - // If the last parameter is a slice, this is a varargs function. - if parameters.last().map_or(false, |(_, typ, _)| matches!(typ, Type::Slice(_))) { - let typ = Type::Slice(Box::new(Type::Quoted(crate::QuotedType::Quoted))); - let slice_elements = arguments.drain(..).map(|(value, _)| value); - let slice = Value::Slice(slice_elements.collect(), typ); - arguments.push((slice, location)); - } } pub fn resolve_struct_fields( &mut self, - unresolved: NoirStruct, + unresolved: &NoirStruct, struct_id: StructId, ) -> Vec<(Ident, Type)> { self.recover_generics(|this| { @@ -1308,7 +1247,9 @@ impl<'context> Elaborator<'context> { let struct_def = this.interner.get_struct(struct_id); this.add_existing_generics(&unresolved.generics, &struct_def.borrow().generics); - let fields = vecmap(unresolved.fields, |(ident, typ)| (ident, this.resolve_type(typ))); + let fields = vecmap(&unresolved.fields, |(ident, typ)| { + (ident.clone(), this.resolve_type(typ.clone())) + }); this.resolving_ids.remove(&struct_id); @@ -1417,6 +1358,11 @@ impl<'context> Elaborator<'context> { self.add_generics(&trait_impl.generics); trait_impl.resolved_generics = self.generics.clone(); + for (_, _, method) in trait_impl.methods.functions.iter_mut() { + // Attach any trait constraints on the impl to the function + method.def.where_clause.append(&mut trait_impl.where_clause.clone()); + } + // Fetch trait constraints here let trait_generics = trait_impl .trait_id @@ -1442,7 +1388,7 @@ impl<'context> Elaborator<'context> { self.generics.clear(); if let Some(trait_id) = trait_id { - let trait_name = trait_impl.trait_path.last_segment(); + let trait_name = trait_impl.trait_path.last_ident(); self.interner.add_trait_reference( trait_id, Location::new(trait_name.span(), trait_impl.file_id), @@ -1486,191 +1432,4 @@ impl<'context> Elaborator<'context> { _ => true, }) } - - /// Filters out comptime items from non-comptime items. - /// Returns a pair of (comptime items, non-comptime items) - fn filter_comptime_items(mut items: CollectedItems) -> (CollectedItems, CollectedItems) { - let mut function_sets = Vec::with_capacity(items.functions.len()); - let mut comptime_function_sets = Vec::new(); - - for function_set in items.functions { - let mut functions = Vec::with_capacity(function_set.functions.len()); - let mut comptime_functions = Vec::new(); - - for function in function_set.functions { - if function.2.def.is_comptime { - comptime_functions.push(function); - } else { - functions.push(function); - } - } - - let file_id = function_set.file_id; - let self_type = function_set.self_type; - let trait_id = function_set.trait_id; - - if !comptime_functions.is_empty() { - comptime_function_sets.push(UnresolvedFunctions { - functions: comptime_functions, - file_id, - trait_id, - self_type: self_type.clone(), - }); - } - - function_sets.push(UnresolvedFunctions { functions, file_id, trait_id, self_type }); - } - - let (comptime_trait_impls, trait_impls) = - items.trait_impls.into_iter().partition(|trait_impl| trait_impl.is_comptime); - - let (comptime_structs, structs) = - items.types.into_iter().partition(|typ| typ.1.struct_def.is_comptime); - - let comptime = CollectedItems { - functions: comptime_function_sets, - types: comptime_structs, - type_aliases: BTreeMap::new(), - traits: BTreeMap::new(), - trait_impls: comptime_trait_impls, - globals: Vec::new(), - impls: rustc_hash::FxHashMap::default(), - }; - - items.functions = function_sets; - items.trait_impls = trait_impls; - items.types = structs; - (comptime, items) - } - - fn add_items( - &mut self, - items: Vec, - generated_items: &mut CollectedItems, - location: Location, - ) { - for item in items { - self.add_item(item, generated_items, location); - } - } - - fn add_item( - &mut self, - item: TopLevelStatement, - generated_items: &mut CollectedItems, - location: Location, - ) { - match item { - TopLevelStatement::Function(function) => { - let id = self.interner.push_empty_fn(); - let module = self.module_id(); - self.interner.push_function(id, &function.def, module, location); - let functions = vec![(self.local_module, id, function)]; - generated_items.functions.push(UnresolvedFunctions { - file_id: self.file, - functions, - trait_id: None, - self_type: None, - }); - } - TopLevelStatement::TraitImpl(mut trait_impl) => { - let methods = dc_mod::collect_trait_impl_functions( - self.interner, - &mut trait_impl, - self.crate_id, - self.file, - self.local_module, - ); - - generated_items.trait_impls.push(UnresolvedTraitImpl { - file_id: self.file, - module_id: self.local_module, - trait_generics: trait_impl.trait_generics, - trait_path: trait_impl.trait_name, - object_type: trait_impl.object_type, - methods, - generics: trait_impl.impl_generics, - where_clause: trait_impl.where_clause, - is_comptime: trait_impl.is_comptime, - - // These last fields are filled in later - trait_id: None, - impl_id: None, - resolved_object_type: None, - resolved_generics: Vec::new(), - resolved_trait_generics: Vec::new(), - }); - } - TopLevelStatement::Global(global) => { - let (global, error) = dc_mod::collect_global( - self.interner, - self.def_maps.get_mut(&self.crate_id).unwrap(), - global, - self.file, - self.local_module, - self.crate_id, - ); - - generated_items.globals.push(global); - if let Some(error) = error { - self.errors.push(error); - } - } - // Assume that an error has already been issued - TopLevelStatement::Error => (), - - TopLevelStatement::Module(_) - | TopLevelStatement::Import(_) - | TopLevelStatement::Struct(_) - | TopLevelStatement::Trait(_) - | TopLevelStatement::Impl(_) - | TopLevelStatement::TypeAlias(_) - | TopLevelStatement::SubModule(_) => { - let item = item.to_string(); - let error = InterpreterError::UnsupportedTopLevelItemUnquote { item, location }; - self.errors.push(error.into_compilation_error_pair()); - } - } - } - - pub fn setup_interpreter<'local>(&'local mut self) -> Interpreter<'local, 'context> { - let current_function = match self.current_item { - Some(DependencyId::Function(function)) => Some(function), - _ => None, - }; - Interpreter::new(self, self.crate_id, current_function) - } - - fn debug_comptime T>( - &mut self, - location: Location, - mut expr_f: F, - ) { - if Some(location.file) == self.debug_comptime_in_file { - let displayed_expr = expr_f(self.interner); - self.errors.push(( - InterpreterError::debug_evaluate_comptime(displayed_expr, location).into(), - location.file, - )); - } - } - - fn run_attributes_on_functions( - &mut self, - function_sets: &[UnresolvedFunctions], - generated_items: &mut CollectedItems, - ) { - for function_set in function_sets { - self.file = function_set.file_id; - self.self_type = function_set.self_type.clone(); - - for (local_module, function_id, function) in &function_set.functions { - self.local_module = *local_module; - let attributes = function.secondary_attributes(); - let item = Value::FunctionDefinition(*function_id); - let span = function.span(); - self.run_comptime_attributes_on_item(attributes, item, span, generated_items); - } - } - } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs index e24b6a3a067..ade5420bce4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -15,7 +15,7 @@ use crate::{ }, macros_api::{HirExpression, Ident, Path, Pattern}, node_interner::{DefinitionId, DefinitionKind, ExprId, FuncId, GlobalId, TraitImplKind}, - Shared, StructType, Type, TypeBindings, + ResolvedGeneric, Shared, StructType, Type, TypeBindings, }; use super::{Elaborator, ResolverMeta}; @@ -157,8 +157,12 @@ impl<'context> Elaborator<'context> { mutable: Option, new_definitions: &mut Vec, ) -> HirPattern { - let name_span = name.last_segment().span(); - let is_self_type = name.last_segment().is_self_type_name(); + let exclude_last_segment = true; + self.check_unsupported_turbofish_usage(&name, exclude_last_segment); + + let last_segment = name.last_segment(); + let name_span = last_segment.ident.span(); + let is_self_type = last_segment.ident.is_self_type_name(); let error_identifier = |this: &mut Self| { // Must create a name here to return a HirPattern::Identifier. Allowing @@ -178,6 +182,15 @@ impl<'context> Elaborator<'context> { } }; + let turbofish_span = last_segment.turbofish_span(); + + let generics = self.resolve_struct_turbofish_generics( + &struct_type.borrow(), + generics, + last_segment.generics, + turbofish_span, + ); + let actual_type = Type::Struct(struct_type.clone(), generics); let location = Location::new(span, self.file); @@ -404,7 +417,7 @@ impl<'context> Elaborator<'context> { } /// Resolve generics using the expected kinds of the function we are calling - pub(super) fn resolve_turbofish_generics( + pub(super) fn resolve_function_turbofish_generics( &mut self, func_id: &FuncId, unresolved_turbofish: Option>, @@ -412,28 +425,61 @@ impl<'context> Elaborator<'context> { ) -> Option> { let direct_generics = self.interner.function_meta(func_id).direct_generics.clone(); - unresolved_turbofish.map(|option_inner| { - if option_inner.len() != direct_generics.len() { + unresolved_turbofish.map(|unresolved_turbofish| { + if unresolved_turbofish.len() != direct_generics.len() { let type_check_err = TypeCheckError::IncorrectTurbofishGenericCount { expected_count: direct_generics.len(), - actual_count: option_inner.len(), + actual_count: unresolved_turbofish.len(), span, }; self.push_err(type_check_err); } - let generics_with_types = direct_generics.iter().zip(option_inner); - vecmap(generics_with_types, |(generic, unresolved_type)| { - self.resolve_type_inner(unresolved_type, &generic.kind) - }) + self.resolve_turbofish_generics(&direct_generics, unresolved_turbofish) }) } - pub(super) fn elaborate_variable( + pub(super) fn resolve_struct_turbofish_generics( &mut self, - variable: Path, + struct_type: &StructType, + generics: Vec, unresolved_turbofish: Option>, - ) -> (ExprId, Type) { + span: Span, + ) -> Vec { + let Some(turbofish_generics) = unresolved_turbofish else { + return generics; + }; + + if turbofish_generics.len() != generics.len() { + self.push_err(TypeCheckError::GenericCountMismatch { + item: format!("struct {}", struct_type.name), + expected: generics.len(), + found: turbofish_generics.len(), + span, + }); + return generics; + } + + self.resolve_turbofish_generics(&struct_type.generics, turbofish_generics) + } + + pub(super) fn resolve_turbofish_generics( + &mut self, + generics: &[ResolvedGeneric], + turbofish_generics: Vec, + ) -> Vec { + let generics_with_types = generics.iter().zip(turbofish_generics); + vecmap(generics_with_types, |(generic, unresolved_type)| { + self.resolve_type_inner(unresolved_type, &generic.kind) + }) + } + + pub(super) fn elaborate_variable(&mut self, variable: Path) -> (ExprId, Type) { + let exclude_last_segment = true; + self.check_unsupported_turbofish_usage(&variable, exclude_last_segment); + + let unresolved_turbofish = variable.segments.last().unwrap().generics.clone(); + let span = variable.span; let expr = self.resolve_variable(variable); let definition_id = expr.id; @@ -445,7 +491,7 @@ impl<'context> Elaborator<'context> { // and if the turbofish operator was used. let generics = definition_kind.and_then(|definition_kind| match &definition_kind { DefinitionKind::Function(function) => { - self.resolve_turbofish_generics(function, unresolved_turbofish, span) + self.resolve_function_turbofish_generics(function, unresolved_turbofish, span) } _ => None, }); @@ -648,7 +694,7 @@ impl<'context> Elaborator<'context> { } pub fn get_ident_from_path(&mut self, path: Path) -> (HirIdent, usize) { - let location = Location::new(path.last_segment().span(), self.file); + let location = Location::new(path.last_ident().span(), self.file); let error = match path.as_ident().map(|ident| self.use_variable(ident)) { Some(Ok(found)) => return found, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs index 23638b03cf5..b2367e0cf0e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/scope.rs @@ -1,6 +1,6 @@ use noirc_errors::{Location, Spanned}; -use crate::ast::ERROR_IDENT; +use crate::ast::{PathKind, ERROR_IDENT}; use crate::hir::def_map::{LocalModuleId, ModuleId}; use crate::hir::resolution::path_resolver::{PathResolver, StandardPathResolver}; use crate::hir::scope::{Scope as GenericScope, ScopeTree as GenericScopeTree}; @@ -43,11 +43,38 @@ impl<'context> Elaborator<'context> { } pub(super) fn resolve_path(&mut self, path: Path) -> Result { - let resolver = StandardPathResolver::new(self.module_id()); + let mut module_id = self.module_id(); + let mut path = path; + + if path.kind == PathKind::Plain && path.first_name() == SELF_TYPE_NAME { + if let Some(Type::Struct(struct_type, _)) = &self.self_type { + let struct_type = struct_type.borrow(); + if path.segments.len() == 1 { + return Ok(ModuleDefId::TypeId(struct_type.id)); + } + + module_id = struct_type.id.module_id(); + path = Path { + segments: path.segments[1..].to_vec(), + kind: PathKind::Plain, + span: path.span(), + }; + } + } + + self.resolve_path_in_module(path, module_id) + } + + fn resolve_path_in_module( + &mut self, + path: Path, + module_id: ModuleId, + ) -> Result { + let resolver = StandardPathResolver::new(module_id); let path_resolution; - if self.interner.track_references { - let last_segment = path.last_segment(); + if self.interner.lsp_mode { + let last_segment = path.last_ident(); let location = Location::new(last_segment.span(), self.file); let is_self_type_name = last_segment.is_self_type_name(); @@ -55,14 +82,14 @@ impl<'context> Elaborator<'context> { path_resolution = resolver.resolve(self.def_maps, path.clone(), &mut Some(&mut references))?; - for (referenced, ident) in references.iter().zip(path.segments) { + for (referenced, segment) in references.iter().zip(path.segments) { let Some(referenced) = referenced else { continue; }; self.interner.add_reference( *referenced, - Location::new(ident.span(), self.file), - ident.is_self_type_name(), + Location::new(segment.ident.span(), self.file), + segment.ident.is_self_type_name(), ); } @@ -144,12 +171,12 @@ impl<'context> Elaborator<'context> { pub fn push_scope(&mut self) { self.scopes.start_scope(); - self.comptime_scopes.push(Default::default()); + self.interner.comptime_scopes.push(Default::default()); } pub fn pop_scope(&mut self) { let scope = self.scopes.end_scope(); - self.comptime_scopes.pop(); + self.interner.comptime_scopes.pop(); self.check_for_unused_variables_in_scope_tree(scope.into()); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs index a00e770218e..1e48fdd07e7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs @@ -8,9 +8,7 @@ use crate::{ FunctionKind, TraitItem, UnresolvedGeneric, UnresolvedGenerics, UnresolvedTraitConstraint, }, hir::{ - def_collector::dc_crate::{ - CollectedItems, CompilationError, UnresolvedTrait, UnresolvedTraitImpl, - }, + def_collector::dc_crate::{CompilationError, UnresolvedTrait, UnresolvedTraitImpl}, type_check::TypeCheckError, }, hir_def::{ @@ -29,14 +27,10 @@ use crate::{ use super::Elaborator; impl<'context> Elaborator<'context> { - pub fn collect_traits( - &mut self, - traits: BTreeMap, - generated_items: &mut CollectedItems, - ) { + pub fn collect_traits(&mut self, traits: &BTreeMap) { for (trait_id, unresolved_trait) in traits { self.recover_generics(|this| { - let resolved_generics = this.interner.get_trait(trait_id).generics.clone(); + let resolved_generics = this.interner.get_trait(*trait_id).generics.clone(); this.add_existing_generics( &unresolved_trait.trait_def.generics, &resolved_generics, @@ -44,28 +38,23 @@ impl<'context> Elaborator<'context> { // Resolve order // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) - let _ = this.resolve_trait_types(&unresolved_trait); + let _ = this.resolve_trait_types(unresolved_trait); // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) - let _ = this.resolve_trait_constants(&unresolved_trait); + let _ = this.resolve_trait_constants(unresolved_trait); // 3. Trait Methods - let methods = this.resolve_trait_methods(trait_id, &unresolved_trait); + let methods = this.resolve_trait_methods(*trait_id, unresolved_trait); - this.interner.update_trait(trait_id, |trait_def| { + this.interner.update_trait(*trait_id, |trait_def| { trait_def.set_methods(methods); }); - - let attributes = &unresolved_trait.trait_def.attributes; - let item = crate::hir::comptime::Value::TraitDefinition(trait_id); - let span = unresolved_trait.trait_def.span; - this.run_comptime_attributes_on_item(attributes, item, span, generated_items); }); // This check needs to be after the trait's methods are set since // the interner may set `interner.ordering_type` based on the result type // of the Cmp trait, if this is it. if self.crate_id.is_stdlib() { - self.interner.try_add_infix_operator_trait(trait_id); - self.interner.try_add_prefix_operator_trait(trait_id); + self.interner.try_add_infix_operator_trait(*trait_id); + self.interner.try_add_prefix_operator_trait(*trait_id); } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs index d5dbb170843..7448ccaa42b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -41,7 +41,7 @@ pub const WILDCARD_TYPE: &str = "_"; impl<'context> Elaborator<'context> { /// Translates an UnresolvedType to a Type with a `TypeKind::Normal` - pub(super) fn resolve_type(&mut self, typ: UnresolvedType) -> Type { + pub(crate) fn resolve_type(&mut self, typ: UnresolvedType) -> Type { let span = typ.span; let resolved_type = self.resolve_type_inner(typ, &Kind::Normal); if resolved_type.is_nested_slice() { @@ -61,8 +61,8 @@ impl<'context> Elaborator<'context> { let (named_path_span, is_self_type_name, is_synthetic) = if let Named(ref named_path, _, synthetic) = typ.typ { ( - Some(named_path.last_segment().span()), - named_path.last_segment().is_self_type_name(), + Some(named_path.last_ident().span()), + named_path.last_ident().is_self_type_name(), synthetic, ) } else { @@ -221,7 +221,7 @@ impl<'context> Elaborator<'context> { // Check if the path is a type variable first. We currently disallow generics on type // variables since we do not support higher-kinded types. if path.segments.len() == 1 { - let name = &path.last_segment().0.contents; + let name = path.last_name(); if name == SELF_TYPE_NAME { if let Some(self_type) = self.self_type.clone() { @@ -239,6 +239,7 @@ impl<'context> Elaborator<'context> { if let Some(type_alias) = self.lookup_type_alias(path.clone()) { let type_alias = type_alias.borrow(); + let actual_generic_count = args.len(); let expected_generic_count = type_alias.generics.len(); let type_alias_string = type_alias.to_string(); let id = type_alias.id; @@ -247,9 +248,13 @@ impl<'context> Elaborator<'context> { self.resolve_type_inner(arg, &generic.kind) }); - self.verify_generics_count(expected_generic_count, &mut args, span, || { - type_alias_string - }); + self.verify_generics_count( + expected_generic_count, + actual_generic_count, + &mut args, + span, + || type_alias_string, + ); if let Some(item) = self.current_item { self.interner.add_type_alias_dependency(item, id); @@ -279,6 +284,8 @@ impl<'context> Elaborator<'context> { } let expected_generic_count = struct_type.borrow().generics.len(); + let actual_generic_count = args.len(); + if !self.in_contract() && self .interner @@ -296,9 +303,13 @@ impl<'context> Elaborator<'context> { self.resolve_type_inner(arg, &generic.kind) }); - self.verify_generics_count(expected_generic_count, &mut args, span, || { - struct_type.borrow().to_string() - }); + self.verify_generics_count( + expected_generic_count, + actual_generic_count, + &mut args, + span, + || struct_type.borrow().to_string(), + ); if let Some(current_item) = self.current_item { let dependency_id = struct_type.borrow().id; @@ -333,15 +344,16 @@ impl<'context> Elaborator<'context> { fn verify_generics_count( &mut self, expected_count: usize, + actual_count: usize, args: &mut Vec, span: Span, type_name: impl FnOnce() -> String, ) { - if args.len() != expected_count { + if actual_count != expected_count { self.push_err(ResolverError::IncorrectGenericCount { span, item_name: type_name(), - actual: args.len(), + actual: actual_count, expected: expected_count, }); @@ -352,7 +364,7 @@ impl<'context> Elaborator<'context> { pub fn lookup_generic_or_global_type(&mut self, path: &Path) -> Option { if path.segments.len() == 1 { - let name = &path.last_segment().0.contents; + let name = path.last_name(); if let Some(generic) = self.find_generic(name) { let generic = generic.clone(); return Some(Type::NamedGeneric(generic.type_var, generic.name, generic.kind)); @@ -391,13 +403,16 @@ impl<'context> Elaborator<'context> { match (lhs, rhs) { (Type::Constant(lhs), Type::Constant(rhs)) => { - Type::Constant(op.function()(lhs, rhs)) + Type::Constant(op.function(lhs, rhs)) } - (lhs, _) => { - let span = - if !matches!(lhs, Type::Constant(_)) { lhs_span } else { rhs_span }; - self.push_err(ResolverError::InvalidArrayLengthExpr { span }); - Type::Constant(0) + (lhs, rhs) => { + if !self.enable_arithmetic_generics { + let span = + if !matches!(lhs, Type::Constant(_)) { lhs_span } else { rhs_span }; + self.push_err(ResolverError::InvalidArrayLengthExpr { span }); + } + + Type::InfixExpr(Box::new(lhs), op, Box::new(rhs)).canonicalize() } } } @@ -412,11 +427,12 @@ impl<'context> Elaborator<'context> { &mut self, path: &Path, ) -> Option<(TraitMethodId, TraitConstraint, bool)> { - let trait_id = self.trait_id?; + let trait_impl = self.current_trait_impl?; + let trait_id = self.interner.try_get_trait_implementation(trait_impl)?.borrow().trait_id; if path.kind == PathKind::Plain && path.segments.len() == 2 { - let name = &path.segments[0].0.contents; - let method = &path.segments[1]; + let name = &path.segments[0].ident.0.contents; + let method = &path.segments[1].ident; if name == SELF_TYPE_NAME { let the_trait = self.interner.get_trait(trait_id); @@ -449,7 +465,7 @@ impl<'context> Elaborator<'context> { let meta = self.interner.function_meta(&func_id); let trait_id = meta.trait_id?; let the_trait = self.interner.get_trait(trait_id); - let method = the_trait.find_method(&path.last_segment().0.contents)?; + let method = the_trait.find_method(path.last_name())?; let constraint = TraitConstraint { typ: Type::TypeVariable(the_trait.self_type_typevar.clone(), TypeVariableKind::Normal), trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { @@ -477,14 +493,12 @@ impl<'context> Elaborator<'context> { for constraint in self.trait_bounds.clone() { if let Type::NamedGeneric(_, name, _) = &constraint.typ { // if `path` is `T::method_name`, we're looking for constraint of the form `T: SomeTrait` - if path.segments[0].0.contents != name.as_str() { + if path.segments[0].ident.0.contents != name.as_str() { continue; } let the_trait = self.interner.get_trait(constraint.trait_id); - if let Some(method) = - the_trait.find_method(path.segments.last().unwrap().0.contents.as_str()) - { + if let Some(method) = the_trait.find_method(path.last_name()) { return Some((method, constraint, true)); } } @@ -768,7 +782,7 @@ impl<'context> Elaborator<'context> { } } - pub(super) fn check_cast(&mut self, from: Type, to: &Type, span: Span) -> Type { + pub(super) fn check_cast(&mut self, from: &Type, to: &Type, span: Span) -> Type { match from.follow_bindings() { Type::Integer(..) | Type::FieldElement @@ -777,8 +791,13 @@ impl<'context> Elaborator<'context> { | Type::Bool => (), Type::TypeVariable(_, _) => { - self.push_err(TypeCheckError::TypeAnnotationsNeeded { span }); - return Type::Error; + // NOTE: in reality the expected type can also include bool, but for the compiler's simplicity + // we only allow integer types. If a bool is in `from` it will need an explicit type annotation. + let expected = Type::polymorphic_integer_or_field(self.interner); + self.unify(from, &expected, || TypeCheckError::InvalidCast { + from: from.clone(), + span, + }); } Type::Error => return Type::Error, from => { @@ -1211,37 +1230,7 @@ impl<'context> Elaborator<'context> { None } Type::NamedGeneric(_, _, _) => { - let func_id = match self.current_item { - Some(DependencyId::Function(id)) => id, - _ => panic!("unexpected method outside a function"), - }; - let func_meta = self.interner.function_meta(&func_id); - - for constraint in &func_meta.trait_constraints { - if *object_type == constraint.typ { - if let Some(the_trait) = self.interner.try_get_trait(constraint.trait_id) { - for (method_index, method) in the_trait.methods.iter().enumerate() { - if method.name.0.contents == method_name { - let trait_method = TraitMethodId { - trait_id: constraint.trait_id, - method_index, - }; - return Some(HirMethodReference::TraitMethodId( - trait_method, - constraint.trait_generics.clone(), - )); - } - } - } - } - } - - self.push_err(TypeCheckError::UnresolvedMethodCall { - method_name: method_name.to_string(), - object_type: object_type.clone(), - span, - }); - None + self.lookup_method_in_trait_constraints(object_type, method_name, span) } // Mutable references to another type should resolve to methods of their element type. // This may be a struct or a primitive type. @@ -1264,17 +1253,53 @@ impl<'context> Elaborator<'context> { other => match self.interner.lookup_primitive_method(&other, method_name) { Some(method_id) => Some(HirMethodReference::FuncId(method_id)), None => { - self.push_err(TypeCheckError::UnresolvedMethodCall { - method_name: method_name.to_string(), - object_type: object_type.clone(), - span, - }); - None + // It could be that this type is a composite type that is bound to a trait, + // for example `x: (T, U) ... where (T, U): SomeTrait` + // (so this case is a generalization of the NamedGeneric case) + self.lookup_method_in_trait_constraints(object_type, method_name, span) } }, } } + fn lookup_method_in_trait_constraints( + &mut self, + object_type: &Type, + method_name: &str, + span: Span, + ) -> Option { + let func_id = match self.current_item { + Some(DependencyId::Function(id)) => id, + _ => panic!("unexpected method outside a function"), + }; + let func_meta = self.interner.function_meta(&func_id); + + for constraint in &func_meta.trait_constraints { + if *object_type == constraint.typ { + if let Some(the_trait) = self.interner.try_get_trait(constraint.trait_id) { + for (method_index, method) in the_trait.methods.iter().enumerate() { + if method.name.0.contents == method_name { + let trait_method = + TraitMethodId { trait_id: constraint.trait_id, method_index }; + return Some(HirMethodReference::TraitMethodId( + trait_method, + constraint.trait_generics.clone(), + )); + } + } + } + } + } + + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + + None + } + pub(super) fn type_check_call( &mut self, call: &HirCallExpression, @@ -1592,6 +1617,10 @@ impl<'context> Elaborator<'context> { } Self::find_numeric_generics_in_type(fields, found); } + Type::InfixExpr(lhs, _op, rhs) => { + Self::find_numeric_generics_in_type(lhs, found); + Self::find_numeric_generics_in_type(rhs, found); + } } } @@ -1610,6 +1639,19 @@ impl<'context> Elaborator<'context> { let context = context.expect("The function_context stack should always be non-empty"); context.trait_constraints.push((constraint, expr_id)); } + + pub fn check_unsupported_turbofish_usage(&mut self, path: &Path, exclude_last_segment: bool) { + for (index, segment) in path.segments.iter().enumerate() { + if exclude_last_segment && index == path.segments.len() - 1 { + continue; + } + + if segment.generics.is_some() { + let span = segment.turbofish_span(); + self.push_err(TypeCheckError::UnsupportedTurbofishUsage { span }); + } + } + } } /// Gives an error if a user tries to create a mutable reference diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/unquote.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/unquote.rs index ed12ba21398..fd7e02df905 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/unquote.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/unquote.rs @@ -27,7 +27,7 @@ impl<'a> Elaborator<'a> { // Don't want the leading `$` anymore new_tokens.pop(); let path = Path::from_single(name, span); - let (expr_id, _) = self.elaborate_variable(path, None); + let (expr_id, _) = self.elaborate_variable(path); new_tokens.push(SpannedToken::new(Token::UnquoteMarker(expr_id), span)); } other_next => new_tokens.push(SpannedToken::new(other_next, span)), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs index b52201146dd..7898f13945f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -2,9 +2,10 @@ use std::fmt::Display; use std::rc::Rc; use crate::{ + ast::TraitBound, hir::{def_collector::dc_crate::CompilationError, type_check::NoMatchingImplFoundError}, parser::ParserError, - token::Tokens, + token::Token, Type, }; use acvm::{acir::AcirField, BlackBoxResolutionError, FieldElement}; @@ -12,52 +13,183 @@ use fm::FileId; use iter_extended::vecmap; use noirc_errors::{CustomDiagnostic, Location}; -use super::value::Value; - /// The possible errors that can halt the interpreter. #[derive(Debug, Clone, PartialEq, Eq)] pub enum InterpreterError { - ArgumentCountMismatch { expected: usize, actual: usize, location: Location }, - TypeMismatch { expected: Type, value: Value, location: Location }, - NonComptimeVarReferenced { name: String, location: Location }, - VariableNotInScope { location: Location }, - IntegerOutOfRangeForType { value: FieldElement, typ: Type, location: Location }, - ErrorNodeEncountered { location: Location }, - NonFunctionCalled { value: Value, location: Location }, - NonBoolUsedInIf { value: Value, location: Location }, - NonBoolUsedInConstrain { value: Value, location: Location }, - FailingConstraint { message: Option, location: Location }, - NoMethodFound { name: String, typ: Type, location: Location }, - NonIntegerUsedInLoop { value: Value, location: Location }, - NonPointerDereferenced { value: Value, location: Location }, - NonTupleOrStructInMemberAccess { value: Value, location: Location }, - NonArrayIndexed { value: Value, location: Location }, - NonIntegerUsedAsIndex { value: Value, location: Location }, - NonIntegerIntegerLiteral { typ: Type, location: Location }, - NonIntegerArrayLength { typ: Type, location: Location }, - NonNumericCasted { value: Value, location: Location }, - IndexOutOfBounds { index: usize, length: usize, location: Location }, - ExpectedStructToHaveField { value: Value, field_name: String, location: Location }, - TypeUnsupported { typ: Type, location: Location }, - InvalidValueForUnary { value: Value, operator: &'static str, location: Location }, - InvalidValuesForBinary { lhs: Value, rhs: Value, operator: &'static str, location: Location }, - CastToNonNumericType { typ: Type, location: Location }, - QuoteInRuntimeCode { location: Location }, - NonStructInConstructor { typ: Type, location: Location }, - CannotInlineMacro { value: Value, location: Location }, - UnquoteFoundDuringEvaluation { location: Location }, - DebugEvaluateComptime { diagnostic: CustomDiagnostic, location: Location }, - FailedToParseMacro { error: ParserError, tokens: Rc, rule: &'static str, file: FileId }, - UnsupportedTopLevelItemUnquote { item: String, location: Location }, - NonComptimeFnCallInSameCrate { function: String, location: Location }, - NoImpl { location: Location }, - NoMatchingImplFound { error: NoMatchingImplFoundError, file: FileId }, - ImplMethodTypeMismatch { expected: Type, actual: Type, location: Location }, - BreakNotInLoop { location: Location }, - ContinueNotInLoop { location: Location }, + ArgumentCountMismatch { + expected: usize, + actual: usize, + location: Location, + }, + TypeMismatch { + expected: Type, + actual: Type, + location: Location, + }, + NonComptimeVarReferenced { + name: String, + location: Location, + }, + VariableNotInScope { + location: Location, + }, + IntegerOutOfRangeForType { + value: FieldElement, + typ: Type, + location: Location, + }, + ErrorNodeEncountered { + location: Location, + }, + NonFunctionCalled { + typ: Type, + location: Location, + }, + NonBoolUsedInIf { + typ: Type, + location: Location, + }, + NonBoolUsedInConstrain { + typ: Type, + location: Location, + }, + FailingConstraint { + message: Option, + location: Location, + }, + NoMethodFound { + name: String, + typ: Type, + location: Location, + }, + NonIntegerUsedInLoop { + typ: Type, + location: Location, + }, + NonPointerDereferenced { + typ: Type, + location: Location, + }, + NonTupleOrStructInMemberAccess { + typ: Type, + location: Location, + }, + NonArrayIndexed { + typ: Type, + location: Location, + }, + NonIntegerUsedAsIndex { + typ: Type, + location: Location, + }, + NonIntegerIntegerLiteral { + typ: Type, + location: Location, + }, + NonIntegerArrayLength { + typ: Type, + location: Location, + }, + NonNumericCasted { + typ: Type, + location: Location, + }, + IndexOutOfBounds { + index: usize, + length: usize, + location: Location, + }, + ExpectedStructToHaveField { + typ: Type, + field_name: String, + location: Location, + }, + TypeUnsupported { + typ: Type, + location: Location, + }, + InvalidValueForUnary { + typ: Type, + operator: &'static str, + location: Location, + }, + InvalidValuesForBinary { + lhs: Type, + rhs: Type, + operator: &'static str, + location: Location, + }, + CastToNonNumericType { + typ: Type, + location: Location, + }, + QuoteInRuntimeCode { + location: Location, + }, + NonStructInConstructor { + typ: Type, + location: Location, + }, + CannotInlineMacro { + value: String, + typ: Type, + location: Location, + }, + UnquoteFoundDuringEvaluation { + location: Location, + }, + DebugEvaluateComptime { + diagnostic: CustomDiagnostic, + location: Location, + }, + FailedToParseMacro { + error: ParserError, + tokens: Rc>, + rule: &'static str, + file: FileId, + }, + UnsupportedTopLevelItemUnquote { + item: String, + location: Location, + }, + ComptimeDependencyCycle { + function: String, + location: Location, + }, + NoImpl { + location: Location, + }, + NoMatchingImplFound { + error: NoMatchingImplFoundError, + file: FileId, + }, + ImplMethodTypeMismatch { + expected: Type, + actual: Type, + location: Location, + }, + BreakNotInLoop { + location: Location, + }, + ContinueNotInLoop { + location: Location, + }, BlackBoxError(BlackBoxResolutionError, Location), + FailedToResolveTraitBound { + trait_bound: TraitBound, + location: Location, + }, + TraitDefinitionMustBeAPath { + location: Location, + }, + FailedToResolveTraitDefinition { + location: Location, + }, - Unimplemented { item: String, location: Location }, + Unimplemented { + item: String, + location: Location, + }, // These cases are not errors, they are just used to prevent us from running more code // until the loop can be resumed properly. These cases will never be displayed to users. @@ -112,14 +244,17 @@ impl InterpreterError { | InterpreterError::CannotInlineMacro { location, .. } | InterpreterError::UnquoteFoundDuringEvaluation { location, .. } | InterpreterError::UnsupportedTopLevelItemUnquote { location, .. } - | InterpreterError::NonComptimeFnCallInSameCrate { location, .. } + | InterpreterError::ComptimeDependencyCycle { location, .. } | InterpreterError::Unimplemented { location, .. } | InterpreterError::NoImpl { location, .. } | InterpreterError::ImplMethodTypeMismatch { location, .. } | InterpreterError::DebugEvaluateComptime { location, .. } | InterpreterError::BlackBoxError(_, location) | InterpreterError::BreakNotInLoop { location, .. } - | InterpreterError::ContinueNotInLoop { location, .. } => *location, + | InterpreterError::ContinueNotInLoop { location, .. } + | InterpreterError::TraitDefinitionMustBeAPath { location } + | InterpreterError::FailedToResolveTraitDefinition { location } + | InterpreterError::FailedToResolveTraitBound { location, .. } => *location, InterpreterError::FailedToParseMacro { error, file, .. } => { Location::new(error.span(), *file) @@ -163,9 +298,8 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let secondary = format!("Too {few_many} arguments"); CustomDiagnostic::simple_error(msg, secondary, location.span) } - InterpreterError::TypeMismatch { expected, value, location } => { - let typ = value.get_type(); - let msg = format!("Expected `{expected}` but a value of type `{typ}` was given"); + InterpreterError::TypeMismatch { expected, actual, location } => { + let msg = format!("Expected `{expected}` but a value of type `{actual}` was given"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } InterpreterError::NonComptimeVarReferenced { name, location } => { @@ -191,23 +325,23 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let secondary = "This is a bug, please report this if found!".to_string(); CustomDiagnostic::simple_error(msg, secondary, location.span) } - InterpreterError::NonFunctionCalled { value, location } => { + InterpreterError::NonFunctionCalled { typ, location } => { let msg = "Only functions may be called".to_string(); - let secondary = format!("Expression has type {}", value.get_type()); + let secondary = format!("Expression has type {typ}"); CustomDiagnostic::simple_error(msg, secondary, location.span) } - InterpreterError::NonBoolUsedInIf { value, location } => { - let msg = format!("Expected a `bool` but found `{}`", value.get_type()); + InterpreterError::NonBoolUsedInIf { typ, location } => { + let msg = format!("Expected a `bool` but found `{typ}`"); let secondary = "If conditions must be a boolean value".to_string(); CustomDiagnostic::simple_error(msg, secondary, location.span) } - InterpreterError::NonBoolUsedInConstrain { value, location } => { - let msg = format!("Expected a `bool` but found `{}`", value.get_type()); + InterpreterError::NonBoolUsedInConstrain { typ, location } => { + let msg = format!("Expected a `bool` but found `{typ}`"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } InterpreterError::FailingConstraint { message, location } => { let (primary, secondary) = match message { - Some(msg) => (format!("{msg:?}"), "Assertion failed".into()), + Some(msg) => (msg.clone(), "Assertion failed".into()), None => ("Assertion failed".into(), String::new()), }; CustomDiagnostic::simple_error(primary, secondary, location.span) @@ -216,32 +350,30 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let msg = format!("No method named `{name}` found for type `{typ}`"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } - InterpreterError::NonIntegerUsedInLoop { value, location } => { - let typ = value.get_type(); + InterpreterError::NonIntegerUsedInLoop { typ, location } => { let msg = format!("Non-integer type `{typ}` used in for loop"); - let secondary = if matches!(typ.as_ref(), &Type::FieldElement) { + let secondary = if matches!(typ, Type::FieldElement) { "`field` is not an integer type, try `u32` instead".to_string() } else { String::new() }; CustomDiagnostic::simple_error(msg, secondary, location.span) } - InterpreterError::NonPointerDereferenced { value, location } => { - let typ = value.get_type(); + InterpreterError::NonPointerDereferenced { typ, location } => { let msg = format!("Only references may be dereferenced, but found `{typ}`"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } - InterpreterError::NonTupleOrStructInMemberAccess { value, location } => { - let msg = format!("The type `{}` has no fields to access", value.get_type()); + InterpreterError::NonTupleOrStructInMemberAccess { typ, location } => { + let msg = format!("The type `{typ}` has no fields to access"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } - InterpreterError::NonArrayIndexed { value, location } => { - let msg = format!("Expected an array or slice but found a(n) {}", value.get_type()); + InterpreterError::NonArrayIndexed { typ, location } => { + let msg = format!("Expected an array or slice but found a(n) {typ}"); let secondary = "Only arrays or slices may be indexed".into(); CustomDiagnostic::simple_error(msg, secondary, location.span) } - InterpreterError::NonIntegerUsedAsIndex { value, location } => { - let msg = format!("Expected an integer but found a(n) {}", value.get_type()); + InterpreterError::NonIntegerUsedAsIndex { typ, location } => { + let msg = format!("Expected an integer but found a(n) {typ}"); let secondary = "Only integers may be indexed. Note that this excludes `field`s".into(); CustomDiagnostic::simple_error(msg, secondary, location.span) @@ -256,17 +388,16 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let secondary = "Array lengths must be integers".into(); CustomDiagnostic::simple_error(msg, secondary, location.span) } - InterpreterError::NonNumericCasted { value, location } => { + InterpreterError::NonNumericCasted { typ, location } => { let msg = "Only numeric types may be casted".into(); - let secondary = format!("`{}` is non-numeric", value.get_type()); + let secondary = format!("`{typ}` is non-numeric"); CustomDiagnostic::simple_error(msg, secondary, location.span) } InterpreterError::IndexOutOfBounds { index, length, location } => { let msg = format!("{index} is out of bounds for the array of length {length}"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } - InterpreterError::ExpectedStructToHaveField { value, field_name, location } => { - let typ = value.get_type(); + InterpreterError::ExpectedStructToHaveField { typ, field_name, location } => { let msg = format!("The type `{typ}` has no field named `{field_name}`"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } @@ -275,13 +406,11 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { format!("The type `{typ}` is currently unsupported in comptime expressions"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } - InterpreterError::InvalidValueForUnary { value, operator, location } => { - let msg = format!("`{}` cannot be used with unary {operator}", value.get_type()); + InterpreterError::InvalidValueForUnary { typ, operator, location } => { + let msg = format!("`{typ}` cannot be used with unary {operator}"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } InterpreterError::InvalidValuesForBinary { lhs, rhs, operator, location } => { - let lhs = lhs.get_type(); - let rhs = rhs.get_type(); let msg = format!("No implementation for `{lhs}` {operator} `{rhs}`",); CustomDiagnostic::simple_error(msg, String::new(), location.span) } @@ -297,10 +426,9 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let msg = format!("`{typ}` is not a struct type"); CustomDiagnostic::simple_error(msg, String::new(), location.span) } - InterpreterError::CannotInlineMacro { value, location } => { - let typ = value.get_type(); + InterpreterError::CannotInlineMacro { value, typ, location } => { let msg = format!("Cannot inline values of type `{typ}` into this position"); - let secondary = format!("Cannot inline value {value:?}"); + let secondary = format!("Cannot inline value `{value}`"); CustomDiagnostic::simple_error(msg, secondary, location.span) } InterpreterError::UnquoteFoundDuringEvaluation { location } => { @@ -311,7 +439,7 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { InterpreterError::DebugEvaluateComptime { diagnostic, .. } => diagnostic.clone(), InterpreterError::FailedToParseMacro { error, tokens, rule, file: _ } => { let message = format!("Failed to parse macro's token stream into {rule}"); - let tokens = vecmap(&tokens.0, ToString::to_string).join(" "); + let tokens = vecmap(tokens.iter(), ToString::to_string).join(" "); // 10 is an aribtrary number of tokens here chosen to fit roughly onto one line let token_stream = if tokens.len() > 10 { @@ -342,10 +470,10 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { error.add_note(format!("Unquoted item was:\n{item}")); error } - InterpreterError::NonComptimeFnCallInSameCrate { function, location } => { - let msg = format!("`{function}` cannot be called in a `comptime` context here"); + InterpreterError::ComptimeDependencyCycle { function, location } => { + let msg = format!("Comptime dependency cycle while resolving `{function}`"); let secondary = - "This function must be `comptime` or in a separate crate to be called".into(); + "This function uses comptime code internally which calls into itself".into(); CustomDiagnostic::simple_error(msg, secondary, location.span) } InterpreterError::Unimplemented { item, location } => { @@ -373,9 +501,21 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { InterpreterError::BlackBoxError(error, location) => { CustomDiagnostic::simple_error(error.to_string(), String::new(), location.span) } + InterpreterError::FailedToResolveTraitBound { trait_bound, location } => { + let msg = format!("Failed to resolve trait bound `{trait_bound}`"); + CustomDiagnostic::simple_error(msg, String::new(), location.span) + } InterpreterError::NoMatchingImplFound { error, .. } => error.into(), InterpreterError::Break => unreachable!("Uncaught InterpreterError::Break"), InterpreterError::Continue => unreachable!("Uncaught InterpreterError::Continue"), + InterpreterError::TraitDefinitionMustBeAPath { location } => { + let msg = "Trait definition arguments must be a variable or path".to_string(); + CustomDiagnostic::simple_error(msg, String::new(), location.span) + } + InterpreterError::FailedToResolveTraitDefinition { location } => { + let msg = "Failed to resolve to a trait definition".to_string(); + CustomDiagnostic::simple_error(msg, String::new(), location.span) + } } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index 22763c9cb64..bc48b2875c8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -5,8 +5,8 @@ use crate::ast::{ ArrayLiteral, AssignStatement, BlockExpression, CallExpression, CastExpression, ConstrainKind, ConstructorExpression, ExpressionKind, ForLoopStatement, ForRange, Ident, IfExpression, IndexExpression, InfixExpression, LValue, Lambda, LetStatement, Literal, - MemberAccessExpression, MethodCallExpression, Path, Pattern, PrefixExpression, UnresolvedType, - UnresolvedTypeData, UnresolvedTypeExpression, + MemberAccessExpression, MethodCallExpression, Path, PathSegment, Pattern, PrefixExpression, + UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, }; use crate::ast::{ConstrainStatement, Expression, Statement, StatementKind}; use crate::hir_def::expr::{HirArrayLiteral, HirBlockExpression, HirExpression, HirIdent}; @@ -88,13 +88,19 @@ impl HirExpression { pub fn to_display_ast(&self, interner: &NodeInterner, span: Span) -> Expression { let kind = match self { HirExpression::Ident(ident, generics) => { - let path = Path::from_ident(ident.to_display_ast(interner)); - ExpressionKind::Variable( - path, - generics.as_ref().map(|option| { + let ident = ident.to_display_ast(interner); + let segment = PathSegment { + ident, + generics: generics.as_ref().map(|option| { option.iter().map(|generic| generic.to_display_ast()).collect() }), - ) + span, + }; + + let path = + Path { segments: vec![segment], kind: crate::ast::PathKind::Plain, span }; + + ExpressionKind::Variable(path) } HirExpression::Literal(HirLiteral::Array(array)) => { let array = array.to_display_ast(interner, span); @@ -352,6 +358,13 @@ impl Type { Type::Constant(_) => panic!("Type::Constant where a type was expected: {self:?}"), Type::Quoted(quoted_type) => UnresolvedTypeData::Quoted(*quoted_type), Type::Error => UnresolvedTypeData::Error, + Type::InfixExpr(lhs, op, rhs) => { + let lhs = Box::new(lhs.to_type_expression()); + let rhs = Box::new(rhs.to_type_expression()); + let span = Span::default(); + let expr = UnresolvedTypeExpression::BinaryOperation(lhs, *op, rhs, span); + UnresolvedTypeData::Expression(expr) + } }; UnresolvedType { typ, span: None } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 2090310585c..0bb53432b78 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -1,3 +1,4 @@ +use std::collections::VecDeque; use std::{collections::hash_map::Entry, rc::Rc}; use acvm::{acir::AcirField, FieldElement}; @@ -10,12 +11,14 @@ use crate::ast::{BinaryOpKind, FunctionKind, IntegerBitSize, Signedness}; use crate::elaborator::Elaborator; use crate::graph::CrateId; use crate::hir_def::expr::ImplKind; +use crate::hir_def::function::FunctionBody; use crate::macros_api::UnaryOp; use crate::monomorphization::{ perform_impl_bindings, perform_instantiation_bindings, resolve_trait_method, undo_instantiation_bindings, }; use crate::token::Tokens; +use crate::TypeVariable; use crate::{ hir_def::{ expr::{ @@ -51,6 +54,12 @@ pub struct Interpreter<'local, 'interner> { in_loop: bool, current_function: Option, + + /// Maps each bound generic to each binding it has in the current callstack. + /// Since the interpreter monomorphizes as it interprets, we can bind over the same generic + /// multiple times. Without this map, when one of these inner functions exits we would + /// unbind the generic completely instead of resetting it to its previous binding. + bound_generics: Vec>, } #[allow(unused)] @@ -60,28 +69,41 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { crate_id: CrateId, current_function: Option, ) -> Self { - Self { elaborator, crate_id, current_function, in_loop: false } + let bound_generics = Vec::new(); + Self { elaborator, crate_id, current_function, bound_generics, in_loop: false } } pub(crate) fn call_function( &mut self, function: FuncId, arguments: Vec<(Value, Location)>, - instantiation_bindings: TypeBindings, + mut instantiation_bindings: TypeBindings, location: Location, ) -> IResult { let trait_method = self.elaborator.interner.get_trait_method_id(function); + // To match the monomorphizer, we need to call follow_bindings on each of + // the instantiation bindings before we unbind the generics from the previous function. + // This is because the instantiation bindings refer to variables from the call site. + for (_, binding) in instantiation_bindings.values_mut() { + *binding = binding.follow_bindings(); + } + + self.unbind_generics_from_previous_function(); perform_instantiation_bindings(&instantiation_bindings); - let impl_bindings = + let mut impl_bindings = perform_impl_bindings(self.elaborator.interner, trait_method, function, location)?; - let old_function = self.current_function.replace(function); + for (_, binding) in impl_bindings.values_mut() { + *binding = binding.follow_bindings(); + } + + self.remember_bindings(&instantiation_bindings, &impl_bindings); let result = self.call_function_inner(function, arguments, location); - self.current_function = old_function; undo_instantiation_bindings(impl_bindings); undo_instantiation_bindings(instantiation_bindings); + self.rebind_generics_from_previous_function(); result } @@ -100,19 +122,27 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { }); } - let is_comptime = self.elaborator.interner.function_modifiers(&function).is_comptime; - if !is_comptime && meta.source_crate == self.crate_id { - // Calling non-comptime functions from within the current crate is restricted - // as non-comptime items will have not been elaborated yet. - let function = self.elaborator.interner.function_name(&function).to_owned(); - return Err(InterpreterError::NonComptimeFnCallInSameCrate { function, location }); - } - if meta.kind != FunctionKind::Normal { let return_type = meta.return_type().follow_bindings(); - return self.call_builtin(function, arguments, return_type, location); + return self.call_special(function, arguments, return_type, location); } + // Wait until after call_special to set the current function so that builtin functions like + // `.as_type()` still call the resolver in the caller's scope. + let old_function = self.current_function.replace(function); + let result = self.call_user_defined_function(function, arguments, location); + self.current_function = old_function; + result + } + + /// Call a non-builtin function + fn call_user_defined_function( + &mut self, + function: FuncId, + arguments: Vec<(Value, Location)>, + location: Location, + ) -> IResult { + let meta = self.elaborator.interner.function_meta(&function); let parameters = meta.parameters.0.clone(); let previous_state = self.enter_function(); @@ -120,19 +150,47 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { self.define_pattern(parameter, typ, argument, arg_location)?; } - let function_body = - self.elaborator.interner.function(&function).try_as_expr().ok_or_else(|| { - let function = self.elaborator.interner.function_name(&function).to_owned(); - InterpreterError::NonComptimeFnCallInSameCrate { function, location } - })?; - + let function_body = self.get_function_body(function, location)?; let result = self.evaluate(function_body)?; - self.exit_function(previous_state); Ok(result) } - fn call_builtin( + /// Try to retrieve a function's body. + /// If the function has not yet been resolved this will attempt to lazily resolve it. + /// Afterwards, if the function's body is still not known or the function is still + /// in a Resolving state we issue an error. + fn get_function_body(&mut self, function: FuncId, location: Location) -> IResult { + let meta = self.elaborator.interner.function_meta(&function); + match self.elaborator.interner.function(&function).try_as_expr() { + Some(body) => Ok(body), + None => { + if matches!(&meta.function_body, FunctionBody::Unresolved(..)) { + self.elaborate_item(None, |elaborator| { + elaborator.elaborate_function(function); + }); + + self.get_function_body(function, location) + } else { + let function = self.elaborator.interner.function_name(&function).to_owned(); + Err(InterpreterError::ComptimeDependencyCycle { function, location }) + } + } + } + } + + fn elaborate_item( + &mut self, + function: Option, + f: impl FnOnce(&mut Elaborator) -> T, + ) -> T { + self.unbind_generics_from_previous_function(); + let result = self.elaborator.elaborate_item_from_comptime(function, f); + self.rebind_generics_from_previous_function(); + result + } + + fn call_special( &mut self, function: FuncId, arguments: Vec<(Value, Location)>, @@ -145,19 +203,16 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { if let Some(builtin) = func_attrs.builtin() { let builtin = builtin.clone(); - builtin::call_builtin( - self.elaborator.interner, - &builtin, - arguments, - return_type, - location, - ) + self.call_builtin(&builtin, arguments, return_type, location) } else if let Some(foreign) = func_attrs.foreign() { let foreign = foreign.clone(); foreign::call_foreign(self.elaborator.interner, &foreign, arguments, location) } else if let Some(oracle) = func_attrs.oracle() { if oracle == "print" { self.print_oracle(arguments) + // Ignore debugger functions + } else if oracle.starts_with("__debug") { + Ok(Value::Unit) } else { let item = format!("Comptime evaluation for oracle functions like {oracle}"); Err(InterpreterError::Unimplemented { item, location }) @@ -203,8 +258,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { pub(super) fn enter_function(&mut self) -> (bool, Vec>) { // Drain every scope except the global scope let mut scope = Vec::new(); - if self.elaborator.comptime_scopes.len() > 1 { - scope = self.elaborator.comptime_scopes.drain(1..).collect(); + if self.elaborator.interner.comptime_scopes.len() > 1 { + scope = self.elaborator.interner.comptime_scopes.drain(1..).collect(); } self.push_scope(); (std::mem::take(&mut self.in_loop), scope) @@ -214,21 +269,57 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { self.in_loop = state.0; // Keep only the global scope - self.elaborator.comptime_scopes.truncate(1); - self.elaborator.comptime_scopes.append(&mut state.1); + self.elaborator.interner.comptime_scopes.truncate(1); + self.elaborator.interner.comptime_scopes.append(&mut state.1); } pub(super) fn push_scope(&mut self) { - self.elaborator.comptime_scopes.push(HashMap::default()); + self.elaborator.interner.comptime_scopes.push(HashMap::default()); } pub(super) fn pop_scope(&mut self) { - self.elaborator.comptime_scopes.pop(); + self.elaborator.interner.comptime_scopes.pop(); } fn current_scope_mut(&mut self) -> &mut HashMap { // the global scope is always at index zero, so this is always Some - self.elaborator.comptime_scopes.last_mut().unwrap() + self.elaborator.interner.comptime_scopes.last_mut().unwrap() + } + + fn unbind_generics_from_previous_function(&mut self) { + if let Some(bindings) = self.bound_generics.last() { + for var in bindings.keys() { + var.unbind(var.id()); + } + } + // Push a new bindings list for the current function + self.bound_generics.push(HashMap::default()); + } + + fn rebind_generics_from_previous_function(&mut self) { + // Remove the currently bound generics first. + self.bound_generics.pop(); + + if let Some(bindings) = self.bound_generics.last() { + for (var, binding) in bindings { + var.force_bind(binding.clone()); + } + } + } + + fn remember_bindings(&mut self, main_bindings: &TypeBindings, impl_bindings: &TypeBindings) { + let bound_generics = self + .bound_generics + .last_mut() + .expect("remember_bindings called with no bound_generics on the stack"); + + for (var, binding) in main_bindings.values() { + bound_generics.insert(var.clone(), binding.follow_bindings()); + } + + for (var, binding) in impl_bindings.values() { + bound_generics.insert(var.clone(), binding.follow_bindings()); + } } pub(super) fn define_pattern( @@ -248,21 +339,30 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let argument = Value::Pointer(Shared::new(argument), true); self.define_pattern(pattern, typ, argument, location) } - HirPattern::Tuple(pattern_fields, _) => match (argument, typ) { - (Value::Tuple(fields), Type::Tuple(type_fields)) - if fields.len() == pattern_fields.len() => - { - for ((pattern, typ), argument) in - pattern_fields.iter().zip(type_fields).zip(fields) + HirPattern::Tuple(pattern_fields, _) => { + let typ = &typ.follow_bindings(); + + match (argument, typ) { + (Value::Tuple(fields), Type::Tuple(type_fields)) + if fields.len() == pattern_fields.len() => { - self.define_pattern(pattern, typ, argument, location)?; + for ((pattern, typ), argument) in + pattern_fields.iter().zip(type_fields).zip(fields) + { + self.define_pattern(pattern, typ, argument, location)?; + } + Ok(()) + } + (value, _) => { + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { + expected: typ.clone(), + actual, + location, + }) } - Ok(()) - } - (value, _) => { - Err(InterpreterError::TypeMismatch { expected: typ.clone(), value, location }) } - }, + } HirPattern::Struct(struct_type, pattern_fields, _) => { self.push_scope(); @@ -271,7 +371,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { for (field_name, field_pattern) in pattern_fields { let field = fields.get(&field_name.0.contents).ok_or_else(|| { InterpreterError::ExpectedStructToHaveField { - value: Value::Struct(fields.clone(), struct_type.clone()), + typ: struct_type.clone(), field_name: field_name.0.contents.clone(), location, } @@ -289,7 +389,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } value => Err(InterpreterError::TypeMismatch { expected: typ.clone(), - value, + actual: value.get_type().into_owned(), location, }), }; @@ -311,7 +411,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { return Ok(()); } - for scope in self.elaborator.comptime_scopes.iter_mut().rev() { + for scope in self.elaborator.interner.comptime_scopes.iter_mut().rev() { if let Entry::Occupied(mut entry) = scope.entry(id) { entry.insert(argument); return Ok(()); @@ -325,7 +425,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } pub fn lookup_id(&self, id: DefinitionId, location: Location) -> IResult { - for scope in self.elaborator.comptime_scopes.iter().rev() { + for scope in self.elaborator.interner.comptime_scopes.iter().rev() { if let Some(value) = scope.get(&id) { return Ok(value.clone()); } @@ -449,16 +549,50 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { self.evaluate_integer(value, is_negative, id) } HirLiteral::Str(string) => Ok(Value::String(Rc::new(string))), - HirLiteral::FmtStr(_, _) => { - let item = "format strings in a comptime context".into(); - let location = self.elaborator.interner.expr_location(&id); - Err(InterpreterError::Unimplemented { item, location }) + HirLiteral::FmtStr(string, captures) => { + self.evaluate_format_string(string, captures, id) } HirLiteral::Array(array) => self.evaluate_array(array, id), HirLiteral::Slice(array) => self.evaluate_slice(array, id), } } + fn evaluate_format_string( + &mut self, + string: String, + captures: Vec, + id: ExprId, + ) -> IResult { + let mut result = String::new(); + let mut escaped = false; + let mut consuming = false; + + let mut values: VecDeque<_> = + captures.into_iter().map(|capture| self.evaluate(capture)).collect::>()?; + + for character in string.chars() { + match character { + '\\' => escaped = true, + '{' if !escaped => consuming = true, + '}' if !escaped && consuming => { + consuming = false; + + if let Some(value) = values.pop_front() { + result.push_str(&value.display(self.elaborator.interner).to_string()); + } + } + other if !consuming => { + escaped = false; + result.push(other); + } + _ => (), + } + } + + let typ = self.elaborator.interner.id_type(id); + Ok(Value::FormatString(Rc::new(result), typ)) + } + fn evaluate_integer( &self, value: FieldElement, @@ -644,7 +778,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { value => { let location = self.elaborator.interner.expr_location(&id); let operator = "minus"; - Err(InterpreterError::InvalidValueForUnary { value, location, operator }) + let typ = value.get_type().into_owned(); + Err(InterpreterError::InvalidValueForUnary { typ, location, operator }) } }, UnaryOp::Not => match rhs { @@ -659,7 +794,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Value::U64(value) => Ok(Value::U64(!value)), value => { let location = self.elaborator.interner.expr_location(&id); - Err(InterpreterError::InvalidValueForUnary { value, location, operator: "not" }) + let typ = value.get_type().into_owned(); + Err(InterpreterError::InvalidValueForUnary { typ, location, operator: "not" }) } }, UnaryOp::MutableReference => { @@ -675,7 +811,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Value::Pointer(element, _) => Ok(element.borrow().clone()), value => { let location = self.elaborator.interner.expr_location(&id); - Err(InterpreterError::NonPointerDereferenced { value, location }) + let typ = value.get_type().into_owned(); + Err(InterpreterError::NonPointerDereferenced { typ, location }) } }, } @@ -689,6 +826,13 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { return self.evaluate_overloaded_infix(infix, lhs, rhs, id); } + let make_error = |this: &mut Self, lhs: Value, rhs: Value, operator| { + let location = this.elaborator.interner.expr_location(&id); + let lhs = lhs.get_type().into_owned(); + let rhs = rhs.get_type().into_owned(); + Err(InvalidValuesForBinary { lhs, rhs, location, operator }) + }; + use InterpreterError::InvalidValuesForBinary; match infix.operator.kind { BinaryOpKind::Add => match (lhs, rhs) { @@ -701,10 +845,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs + rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs + rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs + rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "+" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "+"), }, BinaryOpKind::Subtract => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs - rhs)), @@ -716,10 +857,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs - rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs - rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs - rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "-" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "-"), }, BinaryOpKind::Multiply => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs * rhs)), @@ -731,10 +869,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs * rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs * rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs * rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "*" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "*"), }, BinaryOpKind::Divide => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Field(lhs / rhs)), @@ -746,10 +881,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs / rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs / rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs / rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "/" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "/"), }, BinaryOpKind::Equal => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs == rhs)), @@ -761,10 +893,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs == rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs == rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "==" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "=="), }, BinaryOpKind::NotEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs != rhs)), @@ -776,10 +905,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs != rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs != rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "!=" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "!="), }, BinaryOpKind::Less => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs < rhs)), @@ -791,10 +917,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs < rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs < rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "<" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "<"), }, BinaryOpKind::LessEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs <= rhs)), @@ -806,10 +929,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs <= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs <= rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "<=" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "<="), }, BinaryOpKind::Greater => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs > rhs)), @@ -821,10 +941,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs > rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs > rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: ">" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, ">"), }, BinaryOpKind::GreaterEqual => match (lhs, rhs) { (Value::Field(lhs), Value::Field(rhs)) => Ok(Value::Bool(lhs >= rhs)), @@ -836,10 +953,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::Bool(lhs >= rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::Bool(lhs >= rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: ">=" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, ">="), }, BinaryOpKind::And => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs & rhs)), @@ -851,10 +965,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs & rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs & rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs & rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "&" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "&"), }, BinaryOpKind::Or => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs | rhs)), @@ -866,10 +977,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs | rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs | rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs | rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "|" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "|"), }, BinaryOpKind::Xor => match (lhs, rhs) { (Value::Bool(lhs), Value::Bool(rhs)) => Ok(Value::Bool(lhs ^ rhs)), @@ -881,10 +989,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs ^ rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs ^ rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs ^ rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "^" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "^"), }, BinaryOpKind::ShiftRight => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs >> rhs)), @@ -895,10 +1000,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs >> rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs >> rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs >> rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: ">>" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, ">>"), }, BinaryOpKind::ShiftLeft => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs << rhs)), @@ -909,10 +1011,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs << rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs << rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs << rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "<<" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "<<"), }, BinaryOpKind::Modulo => match (lhs, rhs) { (Value::I8(lhs), Value::I8(rhs)) => Ok(Value::I8(lhs % rhs)), @@ -923,10 +1022,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { (Value::U16(lhs), Value::U16(rhs)) => Ok(Value::U16(lhs % rhs)), (Value::U32(lhs), Value::U32(rhs)) => Ok(Value::U32(lhs % rhs)), (Value::U64(lhs), Value::U64(rhs)) => Ok(Value::U64(lhs % rhs)), - (lhs, rhs) => { - let location = self.elaborator.interner.expr_location(&id); - Err(InvalidValuesForBinary { lhs, rhs, location, operator: "%" }) - } + (lhs, rhs) => make_error(self, lhs, rhs, "%"), }, } } @@ -1030,7 +1126,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Value::Array(array, _) => array, Value::Slice(array, _) => array, value => { - return Err(InterpreterError::NonArrayIndexed { value, location }); + let typ = value.get_type().into_owned(); + return Err(InterpreterError::NonArrayIndexed { typ, location }); } }; @@ -1050,7 +1147,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Value::U32(value) => value as usize, Value::U64(value) => value as usize, value => { - return Err(InterpreterError::NonIntegerUsedAsIndex { value, location }); + let typ = value.get_type().into_owned(); + return Err(InterpreterError::NonIntegerUsedAsIndex { typ, location }); } }; @@ -1097,7 +1195,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } value => { let location = self.elaborator.interner.expr_location(&id); - return Err(InterpreterError::NonTupleOrStructInMemberAccess { value, location }); + let typ = value.get_type().into_owned(); + return Err(InterpreterError::NonTupleOrStructInMemberAccess { typ, location }); } }; @@ -1105,7 +1204,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let location = self.elaborator.interner.expr_location(&id); let value = Value::Struct(fields, struct_type); let field_name = access.rhs.0.contents; - InterpreterError::ExpectedStructToHaveField { value, field_name, location } + let typ = value.get_type().into_owned(); + InterpreterError::ExpectedStructToHaveField { typ, field_name, location } }) } @@ -1122,15 +1222,18 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let mut result = self.call_function(function_id, arguments, bindings, location)?; if call.is_macro_call { let expr = result.into_expression(self.elaborator.interner, location)?; - let expr = self - .elaborator - .elaborate_expression_from_comptime(expr, self.current_function); + let expr = self.elaborate_item(self.current_function, |elaborator| { + elaborator.elaborate_expression(expr).0 + }); result = self.evaluate(expr)?; } Ok(result) } Value::Closure(closure, env, _) => self.call_closure(closure, env, arguments, location), - value => Err(InterpreterError::NonFunctionCalled { value, location }), + value => { + let typ = value.get_type().into_owned(); + Err(InterpreterError::NonFunctionCalled { typ, location }) + } } } @@ -1206,7 +1309,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } value => { let location = interner.expr_location(&id); - return Err(InterpreterError::NonNumericCasted { value, location }); + let typ = value.get_type().into_owned(); + return Err(InterpreterError::NonNumericCasted { typ, location }); } }; @@ -1271,7 +1375,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Value::Bool(value) => value, value => { let location = self.elaborator.interner.expr_location(&id); - return Err(InterpreterError::NonBoolUsedInIf { value, location }); + let typ = value.get_type().into_owned(); + return Err(InterpreterError::NonBoolUsedInIf { typ, location }); } }; @@ -1311,8 +1416,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { fn evaluate_quote(&mut self, mut tokens: Tokens, expr_id: ExprId) -> IResult { let location = self.elaborator.interner.expr_location(&expr_id); - tokens = self.substitute_unquoted_values_into_tokens(tokens, location)?; - Ok(Value::Code(Rc::new(tokens))) + let tokens = self.substitute_unquoted_values_into_tokens(tokens, location)?; + Ok(Value::Quoted(Rc::new(tokens))) } pub fn evaluate_statement(&mut self, statement: StmtId) -> IResult { @@ -1349,11 +1454,14 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Value::Bool(false) => { let location = self.elaborator.interner.expr_location(&constrain.0); let message = constrain.2.and_then(|expr| self.evaluate(expr).ok()); + let message = + message.map(|value| value.display(self.elaborator.interner).to_string()); Err(InterpreterError::FailingConstraint { location, message }) } value => { let location = self.elaborator.interner.expr_location(&constrain.0); - Err(InterpreterError::NonBoolUsedInConstrain { value, location }) + let typ = value.get_type().into_owned(); + Err(InterpreterError::NonBoolUsedInConstrain { typ, location }) } } } @@ -1373,7 +1481,10 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { *value.borrow_mut() = rhs; Ok(()) } - value => Err(InterpreterError::NonPointerDereferenced { value, location }), + value => { + let typ = value.get_type().into_owned(); + Err(InterpreterError::NonPointerDereferenced { typ, location }) + } } } HirLValue::MemberAccess { object, field_name, field_index, typ: _, location } => { @@ -1382,7 +1493,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let index = field_index.ok_or_else(|| { let value = object_value.clone(); let field_name = field_name.to_string(); - InterpreterError::ExpectedStructToHaveField { value, field_name, location } + let typ = value.get_type().into_owned(); + InterpreterError::ExpectedStructToHaveField { typ, field_name, location } })?; match object_value { @@ -1395,7 +1507,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { self.store_lvalue(*object, Value::Struct(fields, typ.follow_bindings())) } value => { - Err(InterpreterError::NonTupleOrStructInMemberAccess { value, location }) + let typ = value.get_type().into_owned(); + Err(InterpreterError::NonTupleOrStructInMemberAccess { typ, location }) } } } @@ -1427,7 +1540,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { match self.evaluate_lvalue(lvalue)? { Value::Pointer(value, _) => Ok(value.borrow().clone()), value => { - Err(InterpreterError::NonPointerDereferenced { value, location: *location }) + let typ = value.get_type().into_owned(); + Err(InterpreterError::NonPointerDereferenced { typ, location: *location }) } } } @@ -1438,14 +1552,15 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let value = object_value.clone(); let field_name = field_name.to_string(); let location = *location; - InterpreterError::ExpectedStructToHaveField { value, field_name, location } + let typ = value.get_type().into_owned(); + InterpreterError::ExpectedStructToHaveField { typ, field_name, location } })?; match object_value { Value::Tuple(mut values) => Ok(values.swap_remove(index)), Value::Struct(fields, _) => Ok(fields[&field_name.0.contents].clone()), value => Err(InterpreterError::NonTupleOrStructInMemberAccess { - value, + typ: value.get_type().into_owned(), location: *location, }), } @@ -1473,7 +1588,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { Value::U64(value) => Ok((value as i128, |i| Value::U64(i as u64))), value => { let location = this.elaborator.interner.expr_location(&expr); - Err(InterpreterError::NonIntegerUsedInLoop { value, location }) + let typ = value.get_type().into_owned(); + Err(InterpreterError::NonIntegerUsedInLoop { typ, location }) } } }; @@ -1527,9 +1643,9 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let print_newline = arguments[0].0 == Value::Bool(true); if print_newline { - println!("{}", arguments[1].0); + println!("{}", arguments[1].0.display(self.elaborator.interner)); } else { - print!("{}", arguments[1].0); + print!("{}", arguments[1].0.display(self.elaborator.interner)); } Ok(Value::Unit) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index 02c45165ee3..ebdbddb1c41 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -6,54 +6,71 @@ use std::{ use acvm::{AcirField, FieldElement}; use chumsky::Parser; use iter_extended::{try_vecmap, vecmap}; -use noirc_errors::{Location, Span}; +use noirc_errors::Location; use rustc_hash::FxHashMap as HashMap; use crate::{ - ast::{IntegerBitSize, TraitBound}, - hir::comptime::{errors::IResult, InterpreterError, Value}, - macros_api::{NodeInterner, Path, Signedness, UnresolvedTypeData}, + ast::IntegerBitSize, + hir::comptime::{errors::IResult, value::add_token_spans, InterpreterError, Value}, + macros_api::{NodeInterner, Signedness}, node_interner::TraitId, parser, - token::{SpannedToken, Token, Tokens}, + token::Token, QuotedType, Shared, Type, }; -pub(super) fn call_builtin( - interner: &mut NodeInterner, - name: &str, - arguments: Vec<(Value, Location)>, - return_type: Type, - location: Location, -) -> IResult { - match name { - "array_len" => array_len(interner, arguments, location), - "as_slice" => as_slice(interner, arguments, location), - "is_unconstrained" => Ok(Value::Bool(true)), - "modulus_be_bits" => modulus_be_bits(interner, arguments, location), - "modulus_be_bytes" => modulus_be_bytes(interner, arguments, location), - "modulus_le_bits" => modulus_le_bits(interner, arguments, location), - "modulus_le_bytes" => modulus_le_bytes(interner, arguments, location), - "modulus_num_bits" => modulus_num_bits(interner, arguments, location), - "slice_insert" => slice_insert(interner, arguments, location), - "slice_pop_back" => slice_pop_back(interner, arguments, location), - "slice_pop_front" => slice_pop_front(interner, arguments, location), - "slice_push_back" => slice_push_back(interner, arguments, location), - "slice_push_front" => slice_push_front(interner, arguments, location), - "slice_remove" => slice_remove(interner, arguments, location), - "struct_def_as_type" => struct_def_as_type(interner, arguments, location), - "struct_def_fields" => struct_def_fields(interner, arguments, location), - "struct_def_generics" => struct_def_generics(interner, arguments, location), - "trait_constraint_eq" => trait_constraint_eq(interner, arguments, location), - "trait_constraint_hash" => trait_constraint_hash(interner, arguments, location), - "trait_def_as_trait_constraint" => { - trait_def_as_trait_constraint(interner, arguments, location) - } - "quoted_as_trait_constraint" => quoted_as_trait_constraint(interner, arguments, location), - "zeroed" => zeroed(return_type, location), - _ => { - let item = format!("Comptime evaluation for builtin function {name}"); - Err(InterpreterError::Unimplemented { item, location }) +use super::Interpreter; + +impl<'local, 'context> Interpreter<'local, 'context> { + pub(super) fn call_builtin( + &mut self, + name: &str, + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, + ) -> IResult { + let interner = &mut self.elaborator.interner; + match name { + "array_len" => array_len(interner, arguments, location), + "as_slice" => as_slice(interner, arguments, location), + "is_unconstrained" => Ok(Value::Bool(true)), + "modulus_be_bits" => modulus_be_bits(interner, arguments, location), + "modulus_be_bytes" => modulus_be_bytes(interner, arguments, location), + "modulus_le_bits" => modulus_le_bits(interner, arguments, location), + "modulus_le_bytes" => modulus_le_bytes(interner, arguments, location), + "modulus_num_bits" => modulus_num_bits(interner, arguments, location), + "slice_insert" => slice_insert(interner, arguments, location), + "slice_pop_back" => slice_pop_back(interner, arguments, location), + "slice_pop_front" => slice_pop_front(interner, arguments, location), + "slice_push_back" => slice_push_back(interner, arguments, location), + "slice_push_front" => slice_push_front(interner, arguments, location), + "slice_remove" => slice_remove(interner, arguments, location), + "struct_def_as_type" => struct_def_as_type(interner, arguments, location), + "struct_def_fields" => struct_def_fields(interner, arguments, location), + "struct_def_generics" => struct_def_generics(interner, arguments, location), + "trait_constraint_eq" => trait_constraint_eq(interner, arguments, location), + "trait_constraint_hash" => trait_constraint_hash(interner, arguments, location), + "trait_def_as_trait_constraint" => { + trait_def_as_trait_constraint(interner, arguments, location) + } + "trait_def_eq" => trait_def_eq(interner, arguments, location), + "trait_def_hash" => trait_def_hash(interner, arguments, location), + "quoted_as_trait_constraint" => quoted_as_trait_constraint(self, arguments, location), + "quoted_as_type" => quoted_as_type(self, arguments, location), + "type_as_array" => type_as_array(arguments, return_type, location), + "type_as_constant" => type_as_constant(arguments, return_type, location), + "type_as_integer" => type_as_integer(arguments, return_type, location), + "type_as_slice" => type_as_slice(arguments, return_type, location), + "type_as_tuple" => type_as_tuple(arguments, return_type, location), + "type_eq" => type_eq(arguments, location), + "type_is_bool" => type_is_bool(arguments, location), + "type_is_field" => type_is_field(arguments, location), + "type_of" => type_of(arguments, location), + "zeroed" => zeroed(return_type), + _ => { + let item = format!("Comptime evaluation for builtin function {name}"); + Err(InterpreterError::Unimplemented { item, location }) + } } } } @@ -71,9 +88,42 @@ pub(super) fn check_argument_count( } } +pub(super) fn check_one_argument( + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + check_argument_count(1, &arguments, location)?; + + Ok(arguments.pop().unwrap().0) +} + +pub(super) fn check_two_arguments( + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult<(Value, Value)> { + check_argument_count(2, &arguments, location)?; + + let argument2 = arguments.pop().unwrap().0; + let argument1 = arguments.pop().unwrap().0; + + Ok((argument1, argument2)) +} + +pub(super) fn check_three_arguments( + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult<(Value, Value, Value)> { + check_argument_count(3, &arguments, location)?; + + let argument3 = arguments.pop().unwrap().0; + let argument2 = arguments.pop().unwrap().0; + let argument1 = arguments.pop().unwrap().0; + + Ok((argument1, argument2, argument3)) +} + fn failing_constraint(message: impl Into, location: Location) -> IResult { - let message = Some(Value::String(Rc::new(message.into()))); - Err(InterpreterError::FailingConstraint { message, location }) + Err(InterpreterError::FailingConstraint { message: Some(message.into()), location }) } pub(super) fn get_array( @@ -86,7 +136,8 @@ pub(super) fn get_array( value => { let type_var = Box::new(interner.next_type_variable()); let expected = Type::Array(type_var.clone(), type_var); - Err(InterpreterError::TypeMismatch { expected, value, location }) + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location }) } } } @@ -101,7 +152,8 @@ fn get_slice( value => { let type_var = Box::new(interner.next_type_variable()); let expected = Type::Slice(type_var); - Err(InterpreterError::TypeMismatch { expected, value, location }) + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location }) } } } @@ -110,7 +162,8 @@ pub(super) fn get_field(value: Value, location: Location) -> IResult Ok(value), value => { - Err(InterpreterError::TypeMismatch { expected: Type::FieldElement, value, location }) + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected: Type::FieldElement, actual, location }) } } } @@ -120,17 +173,19 @@ pub(super) fn get_u32(value: Value, location: Location) -> IResult { Value::U32(value) => Ok(value), value => { let expected = Type::Integer(Signedness::Unsigned, IntegerBitSize::ThirtyTwo); - Err(InterpreterError::TypeMismatch { expected, value, location }) + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location }) } } } -fn get_trait_constraint(value: Value, location: Location) -> IResult { +fn get_trait_constraint(value: Value, location: Location) -> IResult<(TraitId, Vec)> { match value { - Value::TraitConstraint(bound) => Ok(bound), + Value::TraitConstraint(trait_id, generics) => Ok((trait_id, generics)), value => { let expected = Type::Quoted(QuotedType::TraitConstraint); - Err(InterpreterError::TypeMismatch { expected, value, location }) + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location }) } } } @@ -140,178 +195,182 @@ fn get_trait_def(value: Value, location: Location) -> IResult { Value::TraitDefinition(id) => Ok(id), value => { let expected = Type::Quoted(QuotedType::TraitDefinition); - Err(InterpreterError::TypeMismatch { expected, value, location }) + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location }) + } + } +} + +fn get_type(value: Value, location: Location) -> IResult { + match value { + Value::Type(typ) => Ok(typ), + value => { + let expected = Type::Quoted(QuotedType::Type); + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location }) } } } -fn get_quoted(value: Value, location: Location) -> IResult> { +fn get_quoted(value: Value, location: Location) -> IResult>> { match value { - Value::Code(tokens) => Ok(tokens), + Value::Quoted(tokens) => Ok(tokens), value => { let expected = Type::Quoted(QuotedType::Quoted); - Err(InterpreterError::TypeMismatch { expected, value, location }) + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location }) } } } fn array_len( interner: &NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(1, &arguments, location)?; + let argument = check_one_argument(arguments, location)?; - match arguments.pop().unwrap().0 { + match argument { Value::Array(values, _) | Value::Slice(values, _) => Ok(Value::U32(values.len() as u32)), value => { let type_var = Box::new(interner.next_type_variable()); let expected = Type::Array(type_var.clone(), type_var); - Err(InterpreterError::TypeMismatch { expected, value, location }) + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location }) } } } fn as_slice( interner: &NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(1, &arguments, location)?; + let array = check_one_argument(arguments, location)?; - let (array, _) = arguments.pop().unwrap(); match array { Value::Array(values, Type::Array(_, typ)) => Ok(Value::Slice(values, Type::Slice(typ))), value => { let type_var = Box::new(interner.next_type_variable()); let expected = Type::Array(type_var.clone(), type_var); - Err(InterpreterError::TypeMismatch { expected, value, location }) + let actual = value.get_type().into_owned(); + Err(InterpreterError::TypeMismatch { expected, actual, location }) } } } fn slice_push_back( interner: &NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(2, &arguments, location)?; + let (slice, element) = check_two_arguments(arguments, location)?; - let (element, _) = arguments.pop().unwrap(); - let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + let (mut values, typ) = get_slice(interner, slice, location)?; values.push_back(element); Ok(Value::Slice(values, typ)) } -/// fn as_type(self) -> Quoted +/// fn as_type(self) -> Type fn struct_def_as_type( interner: &NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(1, &arguments, location)?; + let argument = check_one_argument(arguments, location)?; - let (struct_def, span) = match arguments.pop().unwrap() { - (Value::StructDefinition(id), location) => (id, location.span), + let struct_def = match argument { + Value::StructDefinition(id) => id, value => { let expected = Type::Quoted(QuotedType::StructDefinition); - return Err(InterpreterError::TypeMismatch { expected, location, value: value.0 }); + let actual = value.get_type().into_owned(); + return Err(InterpreterError::TypeMismatch { expected, location, actual }); } }; - let struct_def = interner.get_struct(struct_def); - let struct_def = struct_def.borrow(); - let make_token = |name| SpannedToken::new(Token::Ident(name), span); + let struct_def_rc = interner.get_struct(struct_def); + let struct_def = struct_def_rc.borrow(); - let mut tokens = vec![make_token(struct_def.name.to_string())]; - - for (i, generic) in struct_def.generics.iter().enumerate() { - if i != 0 { - tokens.push(SpannedToken::new(Token::Comma, span)); - } - tokens.push(make_token(generic.type_var.borrow().to_string())); - } + let generics = vecmap(&struct_def.generics, |generic| { + Type::NamedGeneric(generic.type_var.clone(), generic.name.clone(), generic.kind.clone()) + }); - Ok(Value::Code(Rc::new(Tokens(tokens)))) + drop(struct_def); + Ok(Value::Type(Type::Struct(struct_def_rc, generics))) } -/// fn generics(self) -> [Quoted] +/// fn generics(self) -> [Type] fn struct_def_generics( interner: &NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(1, &arguments, location)?; + let argument = check_one_argument(arguments, location)?; - let (struct_def, span) = match arguments.pop().unwrap() { - (Value::StructDefinition(id), location) => (id, location.span), + let struct_def = match argument { + Value::StructDefinition(id) => id, value => { let expected = Type::Quoted(QuotedType::StructDefinition); - return Err(InterpreterError::TypeMismatch { expected, location, value: value.0 }); + let actual = value.get_type().into_owned(); + return Err(InterpreterError::TypeMismatch { expected, location, actual }); } }; let struct_def = interner.get_struct(struct_def); let struct_def = struct_def.borrow(); - let generics = struct_def.generics.iter().map(|generic| { - let name = SpannedToken::new(Token::Ident(generic.type_var.borrow().to_string()), span); - Value::Code(Rc::new(Tokens(vec![name]))) - }); + let generics = + struct_def.generics.iter().map(|generic| Value::Type(generic.clone().as_named_generic())); - let typ = Type::Slice(Box::new(Type::Quoted(QuotedType::Quoted))); + let typ = Type::Slice(Box::new(Type::Quoted(QuotedType::Type))); Ok(Value::Slice(generics.collect(), typ)) } -/// fn fields(self) -> [(Quoted, Quoted)] +/// fn fields(self) -> [(Quoted, Type)] /// Returns (name, type) pairs of each field of this StructDefinition fn struct_def_fields( interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(1, &arguments, location)?; + let argument = check_one_argument(arguments, location)?; - let (struct_def, span) = match arguments.pop().unwrap() { - (Value::StructDefinition(id), location) => (id, location.span), + let struct_def = match argument { + Value::StructDefinition(id) => id, value => { let expected = Type::Quoted(QuotedType::StructDefinition); - return Err(InterpreterError::TypeMismatch { expected, location, value: value.0 }); + let actual = value.get_type().into_owned(); + return Err(InterpreterError::TypeMismatch { expected, location, actual }); } }; let struct_def = interner.get_struct(struct_def); let struct_def = struct_def.borrow(); - let make_token = |name| SpannedToken::new(Token::Ident(name), span); - let make_quoted = |tokens| Value::Code(Rc::new(Tokens(tokens))); - let mut fields = im::Vector::new(); for (name, typ) in struct_def.get_fields_as_written() { - let name = make_quoted(vec![make_token(name)]); - let id = interner.push_quoted_type(typ); - let typ = SpannedToken::new(Token::QuotedType(id), span); - let typ = Value::Code(Rc::new(Tokens(vec![typ]))); + let name = Value::Quoted(Rc::new(vec![Token::Ident(name)])); + let typ = Value::Type(typ); fields.push_back(Value::Tuple(vec![name, typ])); } let typ = Type::Slice(Box::new(Type::Tuple(vec![ Type::Quoted(QuotedType::Quoted), - Type::Quoted(QuotedType::Quoted), + Type::Quoted(QuotedType::Type), ]))); Ok(Value::Slice(fields, typ)) } fn slice_remove( interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(2, &arguments, location)?; + let (slice, index) = check_two_arguments(arguments, location)?; - let index = get_u32(arguments.pop().unwrap().0, location)? as usize; - let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + let index = get_u32(index, location)? as usize; + let (mut values, typ) = get_slice(interner, slice, location)?; if values.is_empty() { return failing_constraint("slice_remove called on empty slice", location); @@ -331,25 +390,24 @@ fn slice_remove( fn slice_push_front( interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(2, &arguments, location)?; + let (slice, element) = check_two_arguments(arguments, location)?; - let (element, _) = arguments.pop().unwrap(); - let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + let (mut values, typ) = get_slice(interner, slice, location)?; values.push_front(element); Ok(Value::Slice(values, typ)) } fn slice_pop_front( interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(1, &arguments, location)?; + let argument = check_one_argument(arguments, location)?; - let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + let (mut values, typ) = get_slice(interner, argument, location)?; match values.pop_front() { Some(element) => Ok(Value::Tuple(vec![element, Value::Slice(values, typ)])), None => failing_constraint("slice_pop_front called on empty slice", location), @@ -358,12 +416,12 @@ fn slice_pop_front( fn slice_pop_back( interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(1, &arguments, location)?; + let argument = check_one_argument(arguments, location)?; - let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; + let (mut values, typ) = get_slice(interner, argument, location)?; match values.pop_back() { Some(element) => Ok(Value::Tuple(vec![Value::Slice(values, typ), element])), None => failing_constraint("slice_pop_back called on empty slice", location), @@ -372,28 +430,27 @@ fn slice_pop_back( fn slice_insert( interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(3, &arguments, location)?; + let (slice, index, element) = check_three_arguments(arguments, location)?; - let (element, _) = arguments.pop().unwrap(); - let index = get_u32(arguments.pop().unwrap().0, location)?; - let (mut values, typ) = get_slice(interner, arguments.pop().unwrap().0, location)?; - values.insert(index as usize, element); + let index = get_u32(index, location)? as usize; + let (mut values, typ) = get_slice(interner, slice, location)?; + values.insert(index, element); Ok(Value::Slice(values, typ)) } // fn as_trait_constraint(quoted: Quoted) -> TraitConstraint fn quoted_as_trait_constraint( - _interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + interpreter: &mut Interpreter, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(1, &arguments, location)?; + let argument = check_one_argument(arguments, location)?; - let tokens = get_quoted(arguments.pop().unwrap().0, location)?; - let quoted = tokens.as_ref().clone(); + let tokens = get_quoted(argument, location)?; + let quoted = add_token_spans(tokens.clone(), location.span); let trait_bound = parser::trait_bound().parse(quoted).map_err(|mut errors| { let error = errors.swap_remove(0); @@ -401,18 +458,176 @@ fn quoted_as_trait_constraint( InterpreterError::FailedToParseMacro { error, tokens, rule, file: location.file } })?; - Ok(Value::TraitConstraint(trait_bound)) + let bound = interpreter + .elaborate_item(interpreter.current_function, |elaborator| { + elaborator.resolve_trait_bound(&trait_bound, Type::Unit) + }) + .ok_or(InterpreterError::FailedToResolveTraitBound { trait_bound, location })?; + + Ok(Value::TraitConstraint(bound.trait_id, bound.trait_generics)) +} + +// fn as_type(quoted: Quoted) -> Type +fn quoted_as_type( + interpreter: &mut Interpreter, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let argument = check_one_argument(arguments, location)?; + + let tokens = get_quoted(argument, location)?; + let quoted = add_token_spans(tokens.clone(), location.span); + + let typ = parser::parse_type().parse(quoted).map_err(|mut errors| { + let error = errors.swap_remove(0); + let rule = "a type"; + InterpreterError::FailedToParseMacro { error, tokens, rule, file: location.file } + })?; + + let typ = + interpreter.elaborate_item(interpreter.current_function, |elab| elab.resolve_type(typ)); + + Ok(Value::Type(typ)) +} + +// fn as_array(self) -> Option<(Type, Type)> +fn type_as_array( + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, +) -> IResult { + type_as(arguments, return_type, location, |typ| { + if let Type::Array(length, array_type) = typ { + Some(Value::Tuple(vec![Value::Type(*array_type), Value::Type(*length)])) + } else { + None + } + }) +} + +// fn as_constant(self) -> Option +fn type_as_constant( + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, +) -> IResult { + type_as(arguments, return_type, location, |typ| { + if let Type::Constant(n) = typ { + Some(Value::U32(n)) + } else { + None + } + }) +} + +// fn as_integer(self) -> Option<(bool, u8)> +fn type_as_integer( + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, +) -> IResult { + type_as(arguments, return_type, location, |typ| { + if let Type::Integer(sign, bits) = typ { + Some(Value::Tuple(vec![Value::Bool(sign.is_signed()), Value::U8(bits.bit_size())])) + } else { + None + } + }) +} + +// fn as_slice(self) -> Option +fn type_as_slice( + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, +) -> IResult { + type_as(arguments, return_type, location, |typ| { + if let Type::Slice(slice_type) = typ { + Some(Value::Type(*slice_type)) + } else { + None + } + }) +} + +// fn as_tuple(self) -> Option<[Type]> +fn type_as_tuple( + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, +) -> IResult { + type_as(arguments, return_type.clone(), location, |typ| { + if let Type::Tuple(types) = typ { + let t = extract_option_generic_type(return_type); + + let Type::Slice(slice_type) = t else { + panic!("Expected T to be a slice"); + }; + + Some(Value::Slice(types.into_iter().map(Value::Type).collect(), *slice_type)) + } else { + None + } + }) +} + +// Helper function for implementing the `type_as_...` functions. +fn type_as( + arguments: Vec<(Value, Location)>, + return_type: Type, + location: Location, + f: F, +) -> IResult +where + F: FnOnce(Type) -> Option, +{ + let value = check_one_argument(arguments, location)?; + let typ = get_type(value, location)?; + + let option_value = f(typ); + + option(return_type, option_value) +} + +// fn type_eq(_first: Type, _second: Type) -> bool +fn type_eq(arguments: Vec<(Value, Location)>, location: Location) -> IResult { + let (self_type, other_type) = check_two_arguments(arguments, location)?; + + Ok(Value::Bool(self_type == other_type)) +} + +// fn is_bool(self) -> bool +fn type_is_bool(arguments: Vec<(Value, Location)>, location: Location) -> IResult { + let value = check_one_argument(arguments, location)?; + let typ = get_type(value, location)?; + + Ok(Value::Bool(matches!(typ, Type::Bool))) +} + +// fn is_field(self) -> bool +fn type_is_field(arguments: Vec<(Value, Location)>, location: Location) -> IResult { + let value = check_one_argument(arguments, location)?; + let typ = get_type(value, location)?; + + Ok(Value::Bool(matches!(typ, Type::FieldElement))) +} + +// fn type_of(x: T) -> Type +fn type_of(arguments: Vec<(Value, Location)>, location: Location) -> IResult { + let value = check_one_argument(arguments, location)?; + let typ = value.get_type().into_owned(); + Ok(Value::Type(typ)) } // fn constraint_hash(constraint: TraitConstraint) -> Field fn trait_constraint_hash( _interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(1, &arguments, location)?; + let argument = check_one_argument(arguments, location)?; - let bound = get_trait_constraint(arguments.pop().unwrap().0, location)?; + let bound = get_trait_constraint(argument, location)?; let mut hasher = std::collections::hash_map::DefaultHasher::new(); bound.hash(&mut hasher); @@ -423,25 +638,56 @@ fn trait_constraint_hash( // fn constraint_eq(constraint_a: TraitConstraint, constraint_b: TraitConstraint) -> bool fn trait_constraint_eq( + _interner: &mut NodeInterner, + arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + let (value_a, value_b) = check_two_arguments(arguments, location)?; + + let constraint_a = get_trait_constraint(value_a, location)?; + let constraint_b = get_trait_constraint(value_b, location)?; + + Ok(Value::Bool(constraint_a == constraint_b)) +} + +// fn trait_def_hash(def: TraitDefinition) -> Field +fn trait_def_hash( + _interner: &mut NodeInterner, + mut arguments: Vec<(Value, Location)>, + location: Location, +) -> IResult { + check_argument_count(1, &arguments, location)?; + + let id = get_trait_def(arguments.pop().unwrap().0, location)?; + + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + id.hash(&mut hasher); + let hash = hasher.finish(); + + Ok(Value::Field((hash as u128).into())) +} + +// fn trait_def_eq(def_a: TraitDefinition, def_b: TraitDefinition) -> bool +fn trait_def_eq( _interner: &mut NodeInterner, mut arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { check_argument_count(2, &arguments, location)?; - let constraint_b = get_trait_constraint(arguments.pop().unwrap().0, location)?; - let constraint_a = get_trait_constraint(arguments.pop().unwrap().0, location)?; + let id_b = get_trait_def(arguments.pop().unwrap().0, location)?; + let id_a = get_trait_def(arguments.pop().unwrap().0, location)?; - Ok(Value::Bool(constraint_a == constraint_b)) + Ok(Value::Bool(id_a == id_b)) } // fn zeroed() -> T -fn zeroed(return_type: Type, location: Location) -> IResult { +fn zeroed(return_type: Type) -> IResult { match return_type { Type::FieldElement => Ok(Value::Field(0u128.into())), Type::Array(length_type, elem) => { if let Some(length) = length_type.evaluate_to_u32() { - let element = zeroed(elem.as_ref().clone(), location)?; + let element = zeroed(elem.as_ref().clone())?; let array = std::iter::repeat(element).take(length as usize).collect(); Ok(Value::Array(array, Type::Array(length_type, elem))) } else { @@ -471,44 +717,44 @@ fn zeroed(return_type: Type, location: Location) -> IResult { Ok(Value::Zeroed(Type::String(length_type))) } } - Type::FmtString(_, _) => { - let item = "format strings in a comptime context".into(); - Err(InterpreterError::Unimplemented { item, location }) + Type::FmtString(length_type, captures) => { + let length = length_type.evaluate_to_u32(); + let typ = Type::FmtString(length_type, captures); + if let Some(length) = length { + Ok(Value::FormatString(Rc::new("\0".repeat(length as usize)), typ)) + } else { + // Assume we can resolve the length later + Ok(Value::Zeroed(typ)) + } } Type::Unit => Ok(Value::Unit), - Type::Tuple(fields) => { - Ok(Value::Tuple(try_vecmap(fields, |field| zeroed(field, location))?)) - } + Type::Tuple(fields) => Ok(Value::Tuple(try_vecmap(fields, zeroed)?)), Type::Struct(struct_type, generics) => { let fields = struct_type.borrow().get_fields(&generics); let mut values = HashMap::default(); for (field_name, field_type) in fields { - let field_value = zeroed(field_type, location)?; + let field_value = zeroed(field_type)?; values.insert(Rc::new(field_name), field_value); } let typ = Type::Struct(struct_type, generics); Ok(Value::Struct(values, typ)) } - Type::Alias(alias, generics) => zeroed(alias.borrow().get_type(&generics), location), + Type::Alias(alias, generics) => zeroed(alias.borrow().get_type(&generics)), typ @ Type::Function(..) => { // Using Value::Zeroed here is probably safer than using FuncId::dummy_id() or similar Ok(Value::Zeroed(typ)) } Type::MutableReference(element) => { - let element = zeroed(*element, location)?; + let element = zeroed(*element)?; Ok(Value::Pointer(Shared::new(element), false)) } - Type::Quoted(QuotedType::TraitConstraint) => Ok(Value::TraitConstraint(TraitBound { - trait_path: Path::from_single(String::new(), Span::default()), - trait_id: None, - trait_generics: Vec::new(), - })), // Optimistically assume we can resolve this type later or that the value is unused Type::TypeVariable(_, _) | Type::Forall(_, _) | Type::Constant(_) + | Type::InfixExpr(..) | Type::Quoted(_) | Type::Error | Type::TraitAsType(_, _, _) @@ -582,21 +828,44 @@ fn modulus_num_bits( fn trait_def_as_trait_constraint( interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> Result { - check_argument_count(1, &arguments, location)?; + let argument = check_one_argument(arguments, location)?; - let trait_id = get_trait_def(arguments.pop().unwrap().0, location)?; + let trait_id = get_trait_def(argument, location)?; let the_trait = interner.get_trait(trait_id); - - let trait_path = Path::from_ident(the_trait.name.clone()); - let trait_generics = vecmap(&the_trait.generics, |generic| { - let name = Path::from_single(generic.name.as_ref().clone(), generic.span); - UnresolvedTypeData::Named(name, Vec::new(), false).with_span(generic.span) + Type::NamedGeneric(generic.type_var.clone(), generic.name.clone(), generic.kind.clone()) }); - let trait_id = Some(trait_id); - Ok(Value::TraitConstraint(TraitBound { trait_path, trait_id, trait_generics })) + Ok(Value::TraitConstraint(trait_id, trait_generics)) +} + +/// Creates a value that holds an `Option`. +/// `option_type` must be a Type referencing the `Option` type. +pub(crate) fn option(option_type: Type, value: Option) -> IResult { + let t = extract_option_generic_type(option_type.clone()); + + let (is_some, value) = match value { + Some(value) => (Value::Bool(true), value), + None => (Value::Bool(false), zeroed(t)?), + }; + + let mut fields = HashMap::default(); + fields.insert(Rc::new("_is_some".to_string()), is_some); + fields.insert(Rc::new("_value".to_string()), value); + Ok(Value::Struct(fields, option_type)) +} + +/// Given a type, assert that it's an Option and return the Type for T +pub(crate) fn extract_option_generic_type(typ: Type) -> Type { + let Type::Struct(struct_type, mut generics) = typ else { + panic!("Expected type to be a struct"); + }; + + let struct_type = struct_type.borrow(); + assert_eq!(struct_type.name.0.contents, "Option"); + + generics.pop().expect("Expected Option to have a T generic type") } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs index fc8c57ab634..f0dc2dcf487 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs @@ -8,7 +8,7 @@ use crate::{ macros_api::NodeInterner, }; -use super::builtin::{check_argument_count, get_array, get_u32}; +use super::builtin::{check_two_arguments, get_array, get_u32}; pub(super) fn call_foreign( interner: &mut NodeInterner, @@ -28,13 +28,13 @@ pub(super) fn call_foreign( // poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] fn poseidon2_permutation( interner: &mut NodeInterner, - mut arguments: Vec<(Value, Location)>, + arguments: Vec<(Value, Location)>, location: Location, ) -> IResult { - check_argument_count(2, &arguments, location)?; + let (input, state_length) = check_two_arguments(arguments, location)?; - let state_length = get_u32(arguments.pop().unwrap().0, location)?; - let (input, typ) = get_array(interner, arguments.pop().unwrap().0, location)?; + let (input, typ) = get_array(interner, input, location)?; + let state_length = get_u32(state_length, location)?; let input = try_vecmap(input, |integer| get_field(integer, location))?; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs index 94a848b891d..c7b1532c9b7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/unquote.rs @@ -1,8 +1,8 @@ use noirc_errors::Location; use crate::{ - hir::comptime::{errors::IResult, value::unwrap_rc, Value}, - token::{SpannedToken, Token, Tokens}, + hir::comptime::errors::IResult, + token::{Token, Tokens}, }; use super::Interpreter; @@ -15,29 +15,20 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { &mut self, tokens: Tokens, location: Location, - ) -> IResult { + ) -> IResult> { let mut new_tokens = Vec::with_capacity(tokens.0.len()); for token in tokens.0 { - let span = token.to_span(); - match token.token() { + match token.into_token() { Token::UnquoteMarker(id) => { - match self.evaluate(*id)? { - // If the value is already quoted we don't want to change the token stream by - // turning it into a Quoted block (which would add `quote`, `{`, and `}` tokens). - Value::Code(stream) => new_tokens.extend(unwrap_rc(stream).0), - value => { - let new_id = - value.into_hir_expression(self.elaborator.interner, location)?; - let new_token = Token::UnquoteMarker(new_id); - new_tokens.push(SpannedToken::new(new_token, span)); - } - } + let value = self.evaluate(id)?; + let tokens = value.into_tokens(self.elaborator.interner, location)?; + new_tokens.extend(tokens); } - _ => new_tokens.push(token), + token => new_tokens.push(token), } } - Ok(Tokens(new_tokens)) + Ok(new_tokens) } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs index b4ffa1bd01d..4c1adf9fca0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -45,7 +45,7 @@ fn interpret_helper(src: &str) -> Result { let main = context.get_main_function(&krate).expect("Expected 'main' function"); let mut elaborator = - Elaborator::elaborate_and_return_self(&mut context, krate, collector.items, None); + Elaborator::elaborate_and_return_self(&mut context, krate, collector.items, None, false); assert_eq!(elaborator.errors.len(), 0); let mut interpreter = elaborator.setup_interpreter(); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs index f29b67bfc4e..1264cd21635 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -4,10 +4,10 @@ use acvm::{AcirField, FieldElement}; use chumsky::Parser; use im::Vector; use iter_extended::{try_vecmap, vecmap}; -use noirc_errors::Location; +use noirc_errors::{Location, Span}; use crate::{ - ast::{ArrayLiteral, ConstructorExpression, Ident, IntegerBitSize, Signedness, TraitBound}, + ast::{ArrayLiteral, ConstructorExpression, Ident, IntegerBitSize, Signedness}, hir::def_map::ModuleId, hir_def::expr::{HirArrayLiteral, HirConstructorExpression, HirIdent, HirLambda, ImplKind}, macros_api::{ @@ -38,6 +38,7 @@ pub enum Value { U32(u32), U64(u64), String(Rc), + FormatString(Rc, Type), Function(FuncId, Type, Rc), Closure(HirLambda, Vec, Type), Tuple(Vec), @@ -45,12 +46,16 @@ pub enum Value { Pointer(Shared, /* auto_deref */ bool), Array(Vector, Type), Slice(Vector, Type), - Code(Rc), + /// Quoted tokens don't have spans because otherwise inserting them in the middle of other + /// tokens can cause larger spans to be before lesser spans, causing an assert. They may also + /// be inserted into separate files entirely. + Quoted(Rc>), StructDefinition(StructId), - TraitConstraint(TraitBound), + TraitConstraint(TraitId, /* trait generics */ Vec), TraitDefinition(TraitId), FunctionDefinition(FuncId), ModuleDefinition(ModuleId), + Type(Type), Zeroed(Type), } @@ -73,6 +78,7 @@ impl Value { let length = Type::Constant(value.len() as u32); Type::String(Box::new(length)) } + Value::FormatString(_, typ) => return Cow::Borrowed(typ), Value::Function(_, typ, _) => return Cow::Borrowed(typ), Value::Closure(_, _, typ) => return Cow::Borrowed(typ), Value::Tuple(fields) => { @@ -81,7 +87,7 @@ impl Value { Value::Struct(_, typ) => return Cow::Borrowed(typ), Value::Array(_, typ) => return Cow::Borrowed(typ), Value::Slice(_, typ) => return Cow::Borrowed(typ), - Value::Code(_) => Type::Quoted(QuotedType::Quoted), + Value::Quoted(_) => Type::Quoted(QuotedType::Quoted), Value::StructDefinition(_) => Type::Quoted(QuotedType::StructDefinition), Value::Pointer(element, auto_deref) => { if *auto_deref { @@ -95,6 +101,7 @@ impl Value { Value::TraitDefinition(_) => Type::Quoted(QuotedType::TraitDefinition), Value::FunctionDefinition(_) => Type::Quoted(QuotedType::FunctionDefinition), Value::ModuleDefinition(_) => Type::Quoted(QuotedType::Module), + Value::Type(_) => Type::Quoted(QuotedType::Type), Value::Zeroed(typ) => return Cow::Borrowed(typ), }) } @@ -148,6 +155,10 @@ impl Value { ExpressionKind::Literal(Literal::Integer((value as u128).into(), false)) } Value::String(value) => ExpressionKind::Literal(Literal::Str(unwrap_rc(value))), + // Format strings are lowered as normal strings since they are already interpolated. + Value::FormatString(value, _) => { + ExpressionKind::Literal(Literal::Str(unwrap_rc(value))) + } Value::Function(id, typ, bindings) => { let id = interner.function_definition_id(id); let impl_kind = ImplKind::NotATraitMethod; @@ -196,9 +207,9 @@ impl Value { try_vecmap(elements, |element| element.into_expression(interner, location))?; ExpressionKind::Literal(Literal::Slice(ArrayLiteral::Standard(elements))) } - Value::Code(tokens) => { + Value::Quoted(tokens) => { // Wrap the tokens in '{' and '}' so that we can parse statements as well. - let mut tokens_to_parse = tokens.as_ref().clone(); + let mut tokens_to_parse = add_token_spans(tokens.clone(), location.span); tokens_to_parse.0.insert(0, SpannedToken::new(Token::LeftBrace, location.span)); tokens_to_parse.0.push(SpannedToken::new(Token::RightBrace, location.span)); @@ -214,12 +225,15 @@ impl Value { } Value::Pointer(..) | Value::StructDefinition(_) - | Value::TraitConstraint(_) + | Value::TraitConstraint(..) | Value::TraitDefinition(_) | Value::FunctionDefinition(_) | Value::Zeroed(_) + | Value::Type(_) | Value::ModuleDefinition(_) => { - return Err(InterpreterError::CannotInlineMacro { value: self, location }) + let typ = self.get_type().into_owned(); + let value = self.display(interner).to_string(); + return Err(InterpreterError::CannotInlineMacro { typ, value, location }); } }; @@ -277,6 +291,10 @@ impl Value { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } Value::String(value) => HirExpression::Literal(HirLiteral::Str(unwrap_rc(value))), + // Format strings are lowered as normal strings since they are already interpolated. + Value::FormatString(value, _) => { + HirExpression::Literal(HirLiteral::Str(unwrap_rc(value))) + } Value::Function(id, typ, bindings) => { let id = interner.function_definition_id(id); let impl_kind = ImplKind::NotATraitMethod; @@ -326,15 +344,18 @@ impl Value { })?; HirExpression::Literal(HirLiteral::Slice(HirArrayLiteral::Standard(elements))) } - Value::Code(block) => HirExpression::Unquote(unwrap_rc(block)), + Value::Quoted(tokens) => HirExpression::Unquote(add_token_spans(tokens, location.span)), Value::Pointer(..) | Value::StructDefinition(_) - | Value::TraitConstraint(_) + | Value::TraitConstraint(..) | Value::TraitDefinition(_) | Value::FunctionDefinition(_) | Value::Zeroed(_) + | Value::Type(_) | Value::ModuleDefinition(_) => { - return Err(InterpreterError::CannotInlineMacro { value: self, location }) + let typ = self.get_type().into_owned(); + let value = self.display(interner).to_string(); + return Err(InterpreterError::CannotInlineMacro { value, typ, location }); } }; @@ -344,6 +365,19 @@ impl Value { Ok(id) } + pub(crate) fn into_tokens( + self, + interner: &mut NodeInterner, + location: Location, + ) -> IResult> { + let token = match self { + Value::Quoted(tokens) => return Ok(unwrap_rc(tokens)), + Value::Type(typ) => Token::QuotedType(interner.push_quoted_type(typ)), + other => Token::UnquoteMarker(other.into_hir_expression(interner, location)?), + }; + Ok(vec![token]) + } + /// Converts any unsigned `Value` into a `u128`. /// Returns `None` for negative integers. pub(crate) fn to_u128(&self) -> Option { @@ -364,12 +398,24 @@ impl Value { pub(crate) fn into_top_level_items( self, location: Location, + interner: &NodeInterner, ) -> IResult> { match self { - Value::Code(tokens) => parse_tokens(tokens, parser::top_level_items(), location.file), - value => Err(InterpreterError::CannotInlineMacro { value, location }), + Value::Quoted(tokens) => parse_tokens(tokens, parser::top_level_items(), location), + _ => { + let typ = self.get_type().into_owned(); + let value = self.display(interner).to_string(); + Err(InterpreterError::CannotInlineMacro { value, typ, location }) + } } } + + pub fn display<'value, 'interner>( + &'value self, + interner: &'interner NodeInterner, + ) -> ValuePrinter<'value, 'interner> { + ValuePrinter { value: self, interner } + } } /// Unwraps an Rc value without cloning the inner value if the reference count is 1. Clones otherwise. @@ -377,20 +423,35 @@ pub(crate) fn unwrap_rc(rc: Rc) -> T { Rc::try_unwrap(rc).unwrap_or_else(|rc| (*rc).clone()) } -fn parse_tokens(tokens: Rc, parser: impl NoirParser, file: fm::FileId) -> IResult { - match parser.parse(tokens.as_ref().clone()) { +fn parse_tokens( + tokens: Rc>, + parser: impl NoirParser, + location: Location, +) -> IResult { + match parser.parse(add_token_spans(tokens.clone(), location.span)) { Ok(expr) => Ok(expr), Err(mut errors) => { let error = errors.swap_remove(0); let rule = "an expression"; + let file = location.file; Err(InterpreterError::FailedToParseMacro { error, file, tokens, rule }) } } } -impl Display for Value { +pub(crate) fn add_token_spans(tokens: Rc>, span: Span) -> Tokens { + let tokens = unwrap_rc(tokens); + Tokens(vecmap(tokens, |token| SpannedToken::new(token, span))) +} + +pub struct ValuePrinter<'value, 'interner> { + value: &'value Value, + interner: &'interner NodeInterner, +} + +impl<'value, 'interner> Display for ValuePrinter<'value, 'interner> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { + match self.value { Value::Unit => write!(f, "()"), Value::Bool(value) => { let msg = if *value { "true" } else { "false" }; @@ -407,10 +468,11 @@ impl Display for Value { Value::U32(value) => write!(f, "{value}"), Value::U64(value) => write!(f, "{value}"), Value::String(value) => write!(f, "{value}"), + Value::FormatString(value, _) => write!(f, "{value}"), Value::Function(..) => write!(f, "(function)"), Value::Closure(_, _, _) => write!(f, "(closure)"), Value::Tuple(fields) => { - let fields = vecmap(fields, ToString::to_string); + let fields = vecmap(fields, |field| field.display(self.interner).to_string()); write!(f, "({})", fields.join(", ")) } Value::Struct(fields, typ) => { @@ -418,31 +480,56 @@ impl Display for Value { Type::Struct(def, _) => def.borrow().name.to_string(), other => other.to_string(), }; - let fields = vecmap(fields, |(name, value)| format!("{}: {}", name, value)); + let fields = vecmap(fields, |(name, value)| { + format!("{}: {}", name, value.display(self.interner)) + }); write!(f, "{typename} {{ {} }}", fields.join(", ")) } - Value::Pointer(value, _) => write!(f, "&mut {}", value.borrow()), + Value::Pointer(value, _) => write!(f, "&mut {}", value.borrow().display(self.interner)), Value::Array(values, _) => { - let values = vecmap(values, ToString::to_string); + let values = vecmap(values, |value| value.display(self.interner).to_string()); write!(f, "[{}]", values.join(", ")) } Value::Slice(values, _) => { - let values = vecmap(values, ToString::to_string); + let values = vecmap(values, |value| value.display(self.interner).to_string()); write!(f, "&[{}]", values.join(", ")) } - Value::Code(tokens) => { + Value::Quoted(tokens) => { write!(f, "quote {{")?; - for token in tokens.0.iter() { - write!(f, " {token}")?; + for token in tokens.iter() { + match token { + Token::QuotedType(id) => { + write!(f, " {}", self.interner.get_quoted_type(*id))?; + } + other => write!(f, " {other}")?, + } } write!(f, " }}") } - Value::StructDefinition(_) => write!(f, "(struct definition)"), - Value::TraitConstraint { .. } => write!(f, "(trait constraint)"), - Value::TraitDefinition(_) => write!(f, "(trait definition)"), - Value::FunctionDefinition(_) => write!(f, "(function definition)"), + Value::StructDefinition(id) => { + let def = self.interner.get_struct(*id); + let def = def.borrow(); + write!(f, "{}", def.name) + } + Value::TraitConstraint(trait_id, generics) => { + let trait_ = self.interner.get_trait(*trait_id); + let generic_string = vecmap(generics, ToString::to_string).join(", "); + if generics.is_empty() { + write!(f, "{}", trait_.name) + } else { + write!(f, "{}<{generic_string}>", trait_.name) + } + } + Value::TraitDefinition(trait_id) => { + let trait_ = self.interner.get_trait(*trait_id); + write!(f, "{}", trait_.name) + } + Value::FunctionDefinition(function_id) => { + write!(f, "{}", self.interner.function_name(function_id)) + } Value::ModuleDefinition(_) => write!(f, "(module)"), Value::Zeroed(typ) => write!(f, "(zeroed {typ})"), + Value::Type(typ) => write!(f, "{}", typ), } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 80186c19c76..fabd76a2818 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -19,7 +19,8 @@ use crate::node_interner::{ use crate::ast::{ ExpressionKind, Ident, LetStatement, Literal, NoirFunction, NoirStruct, NoirTrait, - NoirTypeAlias, Path, PathKind, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, + NoirTypeAlias, Path, PathKind, PathSegment, UnresolvedGenerics, UnresolvedTraitConstraint, + UnresolvedType, }; use crate::parser::{ParserError, SortedModule}; @@ -79,7 +80,6 @@ pub struct UnresolvedTraitImpl { pub methods: UnresolvedFunctions, pub generics: UnresolvedGenerics, pub where_clause: Vec, - pub is_comptime: bool, // Every field after this line is filled in later in the elaborator pub trait_id: Option, @@ -247,6 +247,7 @@ impl DefCollector { ast: SortedModule, root_file_id: FileId, debug_comptime_in_file: Option<&str>, + enable_arithmetic_generics: bool, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; @@ -264,6 +265,7 @@ impl DefCollector { dep.crate_id, context, debug_comptime_in_file, + enable_arithmetic_generics, macro_processors, )); @@ -306,7 +308,7 @@ impl DefCollector { // Resolve unresolved imports collected from the crate, one by one. for collected_import in std::mem::take(&mut def_collector.imports) { let module_id = collected_import.module_id; - let resolved_import = if context.def_interner.track_references { + let resolved_import = if context.def_interner.lsp_mode { let mut references: Vec> = Vec::new(); let resolved_import = resolve_import( crate_id, @@ -318,13 +320,14 @@ impl DefCollector { let current_def_map = context.def_maps.get(&crate_id).unwrap(); let file_id = current_def_map.file_id(module_id); - for (referenced, ident) in references.iter().zip(&collected_import.path.segments) { + for (referenced, segment) in references.iter().zip(&collected_import.path.segments) + { let Some(referenced) = referenced else { continue; }; context.def_interner.add_reference( *referenced, - Location::new(ident.span(), file_id), + Location::new(segment.ident.span(), file_id), false, ); } @@ -351,7 +354,7 @@ impl DefCollector { .import(name.clone(), ns, resolved_import.is_prelude); let file_id = current_def_map.file_id(module_id); - let last_segment = collected_import.path.last_segment(); + let last_segment = collected_import.path.last_ident(); add_import_reference(ns, &last_segment, &mut context.def_interner, file_id); if let Some(ref alias) = collected_import.alias { @@ -385,8 +388,14 @@ impl DefCollector { }) }); - let mut more_errors = - Elaborator::elaborate(context, crate_id, def_collector.items, debug_comptime_in_file); + let mut more_errors = Elaborator::elaborate( + context, + crate_id, + def_collector.items, + debug_comptime_in_file, + enable_arithmetic_generics, + ); + errors.append(&mut more_errors); for macro_processor in macro_processors { @@ -425,7 +434,12 @@ fn inject_prelude( if !crate_id.is_stdlib() { let segments: Vec<_> = "std::prelude" .split("::") - .map(|segment| crate::ast::Ident::new(segment.into(), Span::default())) + .map(|segment| { + crate::ast::PathSegment::from(crate::ast::Ident::new( + segment.into(), + Span::default(), + )) + }) .collect(); let path = Path { @@ -446,7 +460,7 @@ fn inject_prelude( for path in prelude { let mut segments = segments.clone(); - segments.push(Ident::new(path.to_string(), Span::default())); + segments.push(PathSegment::from(Ident::new(path.to_string(), Span::default()))); collected_imports.insert( 0, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index e5893dc43d5..be2afd13507 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -173,8 +173,6 @@ impl<'a> ModCollector<'a> { let module = ModuleId { krate, local_id: self.module_id }; for (_, func_id, noir_function) in &mut unresolved_functions.functions { - // Attach any trait constraints on the impl to the function - noir_function.def.where_clause.append(&mut trait_impl.where_clause.clone()); let location = Location::new(noir_function.def.span, self.file_id); context.def_interner.push_function(*func_id, &noir_function.def, module, location); } @@ -188,7 +186,6 @@ impl<'a> ModCollector<'a> { generics: trait_impl.impl_generics, where_clause: trait_impl.where_clause, trait_generics: trait_impl.trait_generics, - is_comptime: trait_impl.is_comptime, // These last fields are filled later on trait_id: None, @@ -262,7 +259,8 @@ impl<'a> ModCollector<'a> { } /// Collect any struct definitions declared within the ast. - /// Returns a vector of errors if any structs were already defined. + /// Returns a vector of errors if any structs were already defined, + /// or if a struct has duplicate fields in it. fn collect_structs( &mut self, context: &mut Context, @@ -271,6 +269,8 @@ impl<'a> ModCollector<'a> { ) -> Vec<(CompilationError, FileId)> { let mut definition_errors = vec![]; for struct_definition in types { + self.check_duplicate_field_names(&struct_definition, &mut definition_errors); + let name = struct_definition.name.clone(); let unresolved = UnresolvedStruct { @@ -330,6 +330,29 @@ impl<'a> ModCollector<'a> { definition_errors } + fn check_duplicate_field_names( + &self, + struct_definition: &NoirStruct, + definition_errors: &mut Vec<(CompilationError, FileId)>, + ) { + let mut seen_field_names = std::collections::HashSet::new(); + for (field_name, _) in &struct_definition.fields { + if seen_field_names.insert(field_name) { + continue; + } + + let previous_field_name = *seen_field_names.get(field_name).unwrap(); + definition_errors.push(( + DefCollectorErrorKind::DuplicateField { + first_def: previous_field_name.clone(), + second_def: field_name.clone(), + } + .into(), + self.file_id, + )); + } + } + /// Collect any type aliases definitions declared within the ast. /// Returns a vector of errors if any type aliases were already defined. fn collect_type_aliases( diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs index 1ccf8dd4792..9e9471c0cb3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -26,6 +26,8 @@ pub enum DuplicateType { pub enum DefCollectorErrorKind { #[error("duplicate {typ} found in namespace")] Duplicate { typ: DuplicateType, first_def: Ident, second_def: Ident }, + #[error("duplicate struct field {first_def}")] + DuplicateField { first_def: Ident, second_def: Ident }, #[error("unresolved import")] UnresolvedModuleDecl { mod_name: Ident, expected_path: String, alternative_path: String }, #[error("overlapping imports")] @@ -132,6 +134,23 @@ impl<'a> From<&'a DefCollectorErrorKind> for Diagnostic { diag } } + DefCollectorErrorKind::DuplicateField { first_def, second_def } => { + let primary_message = format!( + "Duplicate definitions of struct field with name {} found", + &first_def.0.contents + ); + { + let first_span = first_def.0.span(); + let second_span = second_def.0.span(); + let mut diag = Diagnostic::simple_error( + primary_message, + "First definition found here".to_string(), + first_span, + ); + diag.add_secondary("Second definition found here".to_string(), second_span); + diag + } + } DefCollectorErrorKind::UnresolvedModuleDecl { mod_name, expected_path, alternative_path } => { let span = mod_name.0.span(); let mod_name = &mod_name.0.contents; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs index 9de96ab06e8..e607de52ff1 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -48,11 +48,13 @@ impl ModuleId { } impl ModuleId { - pub fn module(self, def_maps: &BTreeMap) -> &ModuleData { + pub fn module(self, def_maps: &DefMaps) -> &ModuleData { &def_maps[&self.krate].modules()[self.local_id.0] } } +pub type DefMaps = BTreeMap; + /// Map of all modules and scopes defined within a crate. /// /// The definitions of the crate are accessible indirectly via the scopes of each module. @@ -74,6 +76,7 @@ impl CrateDefMap { crate_id: CrateId, context: &mut Context, debug_comptime_in_file: Option<&str>, + enable_arithmetic_generics: bool, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { // Check if this Crate has already been compiled @@ -123,6 +126,7 @@ impl CrateDefMap { ast, root_file_id, debug_comptime_in_file, + enable_arithmetic_generics, macro_processors, )); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs index 87c4133d68e..6e91f2fdb61 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs @@ -291,8 +291,8 @@ impl Context<'_, '_> { }) } - // Enables reference tracking (useful for tools like LSP). - pub fn track_references(&mut self) { - self.def_interner.track_references = true; + /// Activates LSP mode, which will track references for all definitions. + pub fn activate_lsp_mode(&mut self) { + self.def_interner.lsp_mode = true; } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index bf6de746791..cfaa2063c40 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -114,8 +114,8 @@ pub enum ResolverError { MacroIsNotComptime { span: Span }, #[error("Annotation name must refer to a comptime function")] NonFunctionInAnnotation { span: Span }, - #[error("Unknown annotation")] - UnknownAnnotation { span: Span }, + #[error("Type `{typ}` was inserted into the generics list from a macro, but is not a generic")] + MacroResultInGenericsListNotAGeneric { span: Span, typ: Type }, } impl ResolverError { @@ -460,13 +460,13 @@ impl<'a> From<&'a ResolverError> for Diagnostic { *span, ) }, - ResolverError::UnknownAnnotation { span } => { - Diagnostic::simple_warning( - "Unknown annotation".into(), - "No matching comptime function found in scope".into(), + ResolverError::MacroResultInGenericsListNotAGeneric { span, typ } => { + Diagnostic::simple_error( + format!("Type `{typ}` was inserted into a generics list from a macro, but it is not a generic"), + format!("Type `{typ}` is not a generic"), *span, ) - }, + } } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs index 10e18248dec..4693d3826a8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -6,7 +6,7 @@ use crate::hir::def_collector::dc_crate::CompilationError; use crate::node_interner::ReferenceId; use std::collections::BTreeMap; -use crate::ast::{Ident, ItemVisibility, Path, PathKind}; +use crate::ast::{Ident, ItemVisibility, Path, PathKind, PathSegment}; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleDefId, ModuleId, PerNs}; use super::errors::ResolverError; @@ -163,7 +163,8 @@ fn resolve_path_to_ns( let current_mod_id = ModuleId { krate: crate_id, local_id: import_directive.module_id }; let current_mod = &def_map.modules[current_mod_id.local_id.0]; - let first_segment = import_path.first().expect("ice: could not fetch first segment"); + let first_segment = + &import_path.first().expect("ice: could not fetch first segment").ident; if current_mod.find_name(first_segment).is_none() { // Resolve externally when first segment is unresolved return resolve_external_dep( @@ -218,7 +219,7 @@ fn resolve_path_from_crate_root( crate_id: CrateId, importing_crate: CrateId, - import_path: &[Ident], + import_path: &[PathSegment], def_maps: &BTreeMap, path_references: &mut Option<&mut Vec>>, ) -> NamespaceResolutionResult { @@ -235,7 +236,7 @@ fn resolve_path_from_crate_root( fn resolve_name_in_module( krate: CrateId, importing_crate: CrateId, - import_path: &[Ident], + import_path: &[PathSegment], starting_mod: LocalModuleId, def_maps: &BTreeMap, path_references: &mut Option<&mut Vec>>, @@ -254,7 +255,7 @@ fn resolve_name_in_module( }); } - let first_segment = import_path.first().expect("ice: could not fetch first segment"); + let first_segment = &import_path.first().expect("ice: could not fetch first segment").ident; let mut current_ns = current_mod.find_name(first_segment); if current_ns.is_none() { return Err(PathResolutionError::Unresolved(first_segment.clone())); @@ -262,6 +263,9 @@ fn resolve_name_in_module( let mut warning: Option = None; for (last_segment, current_segment) in import_path.iter().zip(import_path.iter().skip(1)) { + let last_segment = &last_segment.ident; + let current_segment = ¤t_segment.ident; + let (typ, visibility) = match current_ns.types { None => return Err(PathResolutionError::Unresolved(last_segment.clone())), Some((typ, visibility, _)) => (typ, visibility), @@ -324,7 +328,7 @@ fn resolve_name_in_module( fn resolve_path_name(import_directive: &ImportDirective) -> Ident { match &import_directive.alias { - None => import_directive.path.segments.last().unwrap().clone(), + None => import_directive.path.last_ident(), Some(ident) => ident.clone(), } } @@ -340,7 +344,7 @@ fn resolve_external_dep( let path = &directive.path.segments; // Fetch the root module from the prelude - let crate_name = path.first().unwrap(); + let crate_name = &path.first().unwrap().ident; let dep_module = current_def_map .extern_prelude .get(&crate_name.0.contents) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs index af168a10df9..8eba8215f84 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -152,6 +152,8 @@ pub enum TypeCheckError { StringIndexAssign { span: Span }, #[error("Macro calls may only return `Quoted` values")] MacroReturningNonExpr { typ: Type, span: Span }, + #[error("turbofish (`::<_>`) usage at this position isn't supported yet")] + UnsupportedTurbofishUsage { span: Span }, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -350,6 +352,10 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { "Macro calls must return quoted values, otherwise there is no code to insert".into(), *span, ), + TypeCheckError::UnsupportedTurbofishUsage { span } => { + let msg = "turbofish (`::<_>`) usage at this position isn't supported yet"; + Diagnostic::simple_error(msg.to_string(), "".to_string(), *span) + }, } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs index b9f6af0c4c3..6b66cf1ab4a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs @@ -1,3 +1,4 @@ +use fm::FileId; use iter_extended::vecmap; use noirc_errors::{Location, Span}; @@ -156,6 +157,13 @@ pub struct FuncMeta { /// The module this function was defined in pub source_module: LocalModuleId, + + /// THe file this function was defined in + pub source_file: FileId, + + /// If this function is from an impl (trait or regular impl), this + /// is the object type of the impl. Otherwise this is None. + pub self_type: Option, } #[derive(Debug, Clone)] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs index 0ec975a04db..177d23c74dd 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs @@ -21,7 +21,7 @@ use crate::{ use super::expr::{HirCallExpression, HirExpression, HirIdent}; -#[derive(PartialEq, Eq, Clone, Hash)] +#[derive(PartialEq, Eq, Clone, Hash, Ord, PartialOrd)] pub enum Type { /// A primitive Field type FieldElement, @@ -107,6 +107,8 @@ pub enum Type { /// The type of quoted code in macros. This is always a comptime-only type Quoted(QuotedType), + InfixExpr(Box, BinaryTypeOperator, Box), + /// The result of some type error. Remembering type errors as their own type variant lets /// us avoid issuing repeat type errors for the same item. For example, a lambda with /// an invalid type would otherwise issue a new error each time it is called @@ -120,7 +122,7 @@ pub enum Type { /// For example, the type of a struct field or a function parameter is expected to be /// a type of kind * (represented here as `Normal`). Types used in positions where a number /// is expected (such as in an array length position) are expected to be of kind `Kind::Numeric`. -#[derive(PartialEq, Eq, Clone, Hash, Debug)] +#[derive(PartialEq, Eq, Clone, Hash, Debug, PartialOrd, Ord)] pub enum Kind { Normal, Numeric(Box), @@ -135,7 +137,7 @@ impl std::fmt::Display for Kind { } } -#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] +#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash, PartialOrd, Ord)] pub enum QuotedType { Expr, Quoted, @@ -191,6 +193,12 @@ pub struct ResolvedGeneric { pub span: Span, } +impl ResolvedGeneric { + pub fn as_named_generic(self) -> Type { + Type::NamedGeneric(self.type_var, self.name, self.kind) + } +} + impl std::hash::Hash for StructType { fn hash(&self, state: &mut H) { self.id.hash(state); @@ -203,6 +211,18 @@ impl PartialEq for StructType { } } +impl PartialOrd for StructType { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for StructType { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.id.cmp(&other.id) + } +} + impl StructType { pub fn new( id: StructId, @@ -333,6 +353,18 @@ impl PartialEq for TypeAlias { } } +impl Ord for TypeAlias { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.id.cmp(&other.id) + } +} + +impl PartialOrd for TypeAlias { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + impl std::fmt::Display for TypeAlias { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.name) @@ -425,7 +457,7 @@ impl Shared { /// A restricted subset of binary operators useable on /// type level integers for use in the array length positions of types. -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)] pub enum BinaryTypeOperator { Addition, Subtraction, @@ -434,7 +466,7 @@ pub enum BinaryTypeOperator { Modulo, } -#[derive(Debug, PartialEq, Eq, Clone, Hash)] +#[derive(Debug, PartialEq, Eq, Clone, Hash, PartialOrd, Ord)] pub enum TypeVariableKind { /// Can bind to any type Normal, @@ -458,7 +490,7 @@ pub enum TypeVariableKind { /// A TypeVariable is a mutable reference that is either /// bound to some type, or unbound with a given TypeVariableId. -#[derive(PartialEq, Eq, Clone, Hash)] +#[derive(PartialEq, Eq, Clone, Hash, PartialOrd, Ord)] pub struct TypeVariable(TypeVariableId, Shared); impl TypeVariable { @@ -527,7 +559,7 @@ impl TypeVariable { /// TypeBindings are the mutable insides of a TypeVariable. /// They are either bound to some type, or are unbound. -#[derive(Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] pub enum TypeBinding { Bound(Type), Unbound(TypeVariableId), @@ -540,7 +572,7 @@ impl TypeBinding { } /// A unique ID used to differentiate different type variables -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct TypeVariableId(pub usize); impl std::fmt::Display for Type { @@ -644,6 +676,16 @@ impl std::fmt::Display for Type { write!(f, "&mut {element}") } Type::Quoted(quoted) => write!(f, "{}", quoted), + Type::InfixExpr(lhs, op, rhs) => { + let this = self.canonicalize(); + + // Prevent infinite recursion + if this != *self { + write!(f, "{this}") + } else { + write!(f, "({lhs} {op} {rhs})") + } + } } } } @@ -838,6 +880,9 @@ impl Type { elements.contains_numeric_typevar(target_id) || named_generic_id_matches_target(length) } + Type::InfixExpr(lhs, _op, rhs) => { + lhs.contains_numeric_typevar(target_id) || rhs.contains_numeric_typevar(target_id) + } } } @@ -917,6 +962,10 @@ impl Type { elements.find_numeric_type_vars(found_names); named_generic_is_numeric(length, found_names); } + Type::InfixExpr(lhs, _op, rhs) => { + lhs.find_numeric_type_vars(found_names); + rhs.find_numeric_type_vars(found_names); + } } } @@ -946,6 +995,7 @@ impl Type { | Type::Forall(_, _) | Type::Quoted(_) | Type::Slice(_) + | Type::InfixExpr(_, _, _) | Type::TraitAsType(..) => false, Type::Alias(alias, generics) => { @@ -983,6 +1033,7 @@ impl Type { | Type::Constant(_) | Type::TypeVariable(_, _) | Type::NamedGeneric(_, _, _) + | Type::InfixExpr(..) | Type::Error => true, Type::FmtString(_, _) @@ -1028,6 +1079,7 @@ impl Type { | Type::NamedGeneric(_, _, _) | Type::Function(_, _, _) | Type::FmtString(_, _) + | Type::InfixExpr(..) | Type::Error => true, // Quoted objects only exist at compile-time where the only execution @@ -1162,6 +1214,7 @@ impl Type { | Type::Constant(_) | Type::Quoted(_) | Type::Slice(_) + | Type::InfixExpr(..) | Type::Error => unreachable!("This type cannot exist as a parameter to main"), } } @@ -1416,7 +1469,17 @@ impl Type { use Type::*; use TypeVariableKind as Kind; - match (self, other) { + let lhs = match self { + Type::InfixExpr(..) => Cow::Owned(self.canonicalize()), + other => Cow::Borrowed(other), + }; + + let rhs = match other { + Type::InfixExpr(..) => Cow::Owned(other.canonicalize()), + other => Cow::Borrowed(other), + }; + + match (lhs.as_ref(), rhs.as_ref()) { (Error, _) | (_, Error) => Ok(()), (Alias(alias, args), other) | (other, Alias(alias, args)) => { @@ -1530,6 +1593,27 @@ impl Type { elem_a.try_unify(elem_b, bindings) } + (InfixExpr(lhs_a, op_a, rhs_a), InfixExpr(lhs_b, op_b, rhs_b)) => { + if op_a == op_b { + lhs_a.try_unify(lhs_b, bindings)?; + rhs_a.try_unify(rhs_b, bindings) + } else { + Err(UnificationError) + } + } + + (Constant(value), other) | (other, Constant(value)) => { + if let Some(other_value) = other.evaluate_to_u32() { + if *value == other_value { + Ok(()) + } else { + Err(UnificationError) + } + } else { + Err(UnificationError) + } + } + (other_a, other_b) => { if other_a == other_b { Ok(()) @@ -1540,6 +1624,107 @@ impl Type { } } + /// Try to canonicalize the representation of this type. + /// Currently the only type with a canonical representation is + /// `Type::Infix` where for each consecutive commutative operator + /// we sort the non-constant operands by `Type: Ord` and place all constant + /// operands at the end, constant folded. + /// + /// For example: + /// - `canonicalize[((1 + N) + M) + 2] = (M + N) + 3` + /// - `canonicalize[A + 2 * B + 3 - 2] = A + (B * 2) + 3 - 2` + pub fn canonicalize(&self) -> Type { + match self.follow_bindings() { + Type::InfixExpr(lhs, op, rhs) => { + if let Some(value) = self.evaluate_to_u32() { + return Type::Constant(value); + } + + let lhs = lhs.canonicalize(); + let rhs = rhs.canonicalize(); + + if let Some(result) = Self::try_simplify_subtraction(&lhs, op, &rhs) { + return result; + } + + if op.is_commutative() { + return Self::sort_commutative(&lhs, op, &rhs); + } + + Type::InfixExpr(Box::new(lhs), op, Box::new(rhs)) + } + other => other, + } + } + + fn sort_commutative(lhs: &Type, op: BinaryTypeOperator, rhs: &Type) -> Type { + let mut queue = vec![lhs.clone(), rhs.clone()]; + + let mut sorted = BTreeSet::new(); + + let zero_value = if op == BinaryTypeOperator::Addition { 0 } else { 1 }; + let mut constant = zero_value; + + // Push each non-constant term to `sorted` to sort them. Recur on InfixExprs with the same operator. + while let Some(item) = queue.pop() { + match item.canonicalize() { + Type::InfixExpr(lhs, new_op, rhs) if new_op == op => { + queue.push(*lhs); + queue.push(*rhs); + } + Type::Constant(new_constant) => { + constant = op.function(constant, new_constant); + } + other => { + sorted.insert(other); + } + } + } + + if let Some(first) = sorted.pop_first() { + let mut typ = first.clone(); + + for rhs in sorted { + typ = Type::InfixExpr(Box::new(typ), op, Box::new(rhs.clone())); + } + + if constant != zero_value { + typ = Type::InfixExpr(Box::new(typ), op, Box::new(Type::Constant(constant))); + } + + typ + } else { + // Every type must have been a constant + Type::Constant(constant) + } + } + + /// Try to simplify a subtraction expression of `lhs - rhs`. + /// + /// - Simplifies `(a + C1) - C2` to `a + (C1 - C2)` if C1 and C2 are constants. + fn try_simplify_subtraction(lhs: &Type, op: BinaryTypeOperator, rhs: &Type) -> Option { + use BinaryTypeOperator::*; + match lhs { + Type::InfixExpr(l_lhs, l_op, l_rhs) => { + // Simplify `(N + 2) - 1` + if op == Subtraction && *l_op == Addition { + if let (Some(lhs_const), Some(rhs_const)) = + (l_rhs.evaluate_to_u32(), rhs.evaluate_to_u32()) + { + if lhs_const > rhs_const { + let constant = Box::new(Type::Constant(lhs_const - rhs_const)); + return Some( + Type::InfixExpr(l_lhs.clone(), *l_op, constant).canonicalize(), + ); + } + } + } + None + } + _ => None, + } + } + /// Try to unify a type variable to `self`. /// This is a helper function factored out from try_unify. fn try_unify_to_type_variable( @@ -1637,6 +1822,11 @@ impl Type { Type::TypeVariable(_, TypeVariableKind::Constant(size)) => Some(*size), Type::Array(len, _elem) => len.evaluate_to_u32(), Type::Constant(x) => Some(*x), + Type::InfixExpr(lhs, op, rhs) => { + let lhs = lhs.evaluate_to_u32()?; + let rhs = rhs.evaluate_to_u32()?; + Some(op.function(lhs, rhs)) + } _ => None, } } @@ -1898,6 +2088,11 @@ impl Type { }); Type::TraitAsType(*s, name.clone(), args) } + Type::InfixExpr(lhs, op, rhs) => { + let lhs = lhs.substitute_helper(type_bindings, substitute_bound_typevars); + let rhs = rhs.substitute_helper(type_bindings, substitute_bound_typevars); + Type::InfixExpr(Box::new(lhs), *op, Box::new(rhs)) + } Type::FieldElement | Type::Integer(_, _) @@ -1943,6 +2138,7 @@ impl Type { || env.occurs(target_id) } Type::MutableReference(element) => element.occurs(target_id), + Type::InfixExpr(lhs, _op, rhs) => lhs.occurs(target_id) || rhs.occurs(target_id), Type::FieldElement | Type::Integer(_, _) @@ -2003,6 +2199,11 @@ impl Type { let args = vecmap(args, |arg| arg.follow_bindings()); TraitAsType(*s, name.clone(), args) } + InfixExpr(lhs, op, rhs) => { + let lhs = lhs.follow_bindings(); + let rhs = rhs.follow_bindings(); + InfixExpr(Box::new(lhs), *op, Box::new(rhs)) + } // Expect that this function should only be called on instantiated types Forall(..) => unreachable!(), @@ -2090,6 +2291,17 @@ impl Type { } Type::MutableReference(elem) => elem.replace_named_generics_with_type_variables(), Type::Forall(_, typ) => typ.replace_named_generics_with_type_variables(), + Type::InfixExpr(lhs, _op, rhs) => { + lhs.replace_named_generics_with_type_variables(); + rhs.replace_named_generics_with_type_variables(); + } + } + } + + pub fn slice_element_type(&self) -> Option<&Type> { + match self { + Type::Slice(element) => Some(element), + _ => None, } } } @@ -2130,16 +2342,20 @@ fn convert_array_expression_to_slice( } impl BinaryTypeOperator { - /// Return the actual rust numeric function associated with this operator - pub fn function(self) -> fn(u32, u32) -> u32 { + /// Perform the actual rust numeric operation associated with this operator + pub fn function(self, a: u32, b: u32) -> u32 { match self { - BinaryTypeOperator::Addition => |a, b| a.wrapping_add(b), - BinaryTypeOperator::Subtraction => |a, b| a.wrapping_sub(b), - BinaryTypeOperator::Multiplication => |a, b| a.wrapping_mul(b), - BinaryTypeOperator::Division => |a, b| a.wrapping_div(b), - BinaryTypeOperator::Modulo => |a, b| a.wrapping_rem(b), // % b, + BinaryTypeOperator::Addition => a.wrapping_add(b), + BinaryTypeOperator::Subtraction => a.wrapping_sub(b), + BinaryTypeOperator::Multiplication => a.wrapping_mul(b), + BinaryTypeOperator::Division => a.wrapping_div(b), + BinaryTypeOperator::Modulo => a.wrapping_rem(b), } } + + fn is_commutative(self) -> bool { + matches!(self, BinaryTypeOperator::Addition | BinaryTypeOperator::Multiplication) + } } impl TypeVariableKind { @@ -2222,6 +2438,7 @@ impl From<&Type> for PrintableType { PrintableType::MutableReference { typ: Box::new(typ.as_ref().into()) } } Type::Quoted(_) => unreachable!(), + Type::InfixExpr(..) => unreachable!(), } } } @@ -2314,6 +2531,7 @@ impl std::fmt::Debug for Type { write!(f, "&mut {element:?}") } Type::Quoted(quoted) => write!(f, "{}", quoted), + Type::InfixExpr(lhs, op, rhs) => write!(f, "({lhs:?} {op} {rhs:?})"), } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs index 387ced05258..be5180a777b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs @@ -1,3 +1,4 @@ +use crate::hir::def_collector::dc_crate::CompilationError; use crate::parser::ParserError; use crate::parser::ParserErrorReason; use crate::token::SpannedToken; @@ -42,6 +43,12 @@ impl From for ParserError { } } +impl From for CompilationError { + fn from(error: LexerErrorKind) -> Self { + ParserError::from(error).into() + } +} + impl LexerErrorKind { pub fn span(&self) -> Span { match self { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs index c6a1d44f26b..2284991bbc0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs @@ -635,6 +635,10 @@ impl Attributes { pub fn is_no_predicates(&self) -> bool { self.function.as_ref().map_or(false, |func_attribute| func_attribute.is_no_predicates()) } + + pub fn is_varargs(&self) -> bool { + self.secondary.iter().any(|attr| matches!(attr, SecondaryAttribute::Varargs)) + } } /// An Attribute can be either a Primary Attribute or a Secondary Attribute @@ -728,6 +732,7 @@ impl Attribute { name.trim_matches('"').to_string().into(), )) } + ["varargs"] => Attribute::Secondary(SecondaryAttribute::Varargs), tokens => { tokens.iter().try_for_each(|token| validate(token))?; Attribute::Secondary(SecondaryAttribute::Custom(word.to_owned())) @@ -825,6 +830,9 @@ pub enum SecondaryAttribute { Field(String), Custom(String), Abi(String), + + /// A variable-argument comptime function. + Varargs, } impl fmt::Display for SecondaryAttribute { @@ -839,6 +847,7 @@ impl fmt::Display for SecondaryAttribute { SecondaryAttribute::Export => write!(f, "#[export]"), SecondaryAttribute::Field(ref k) => write!(f, "#[field({k})]"), SecondaryAttribute::Abi(ref k) => write!(f, "#[abi({k})]"), + SecondaryAttribute::Varargs => write!(f, "#[varargs]"), } } } @@ -867,6 +876,7 @@ impl AsRef for SecondaryAttribute { | SecondaryAttribute::Abi(string) => string, SecondaryAttribute::ContractLibraryMethod => "", SecondaryAttribute::Export => "", + SecondaryAttribute::Varargs => "", } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/locations.rs b/noir/noir-repo/compiler/noirc_frontend/src/locations.rs index 0ba74e22781..c437676b605 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/locations.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/locations.rs @@ -147,7 +147,7 @@ impl NodeInterner { location: Location, is_self_type: bool, ) { - if !self.track_references { + if !self.lsp_mode { return; } @@ -166,7 +166,7 @@ impl NodeInterner { referenced: ReferenceId, module_id: Option, ) { - if !self.track_references { + if !self.lsp_mode { return; } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index a46f32e3094..5ac730db400 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -383,8 +383,8 @@ impl<'interner> Monomorphizer<'interner> { self.parameter(field, &typ, new_params)?; } } - HirPattern::Struct(_, fields, _) => { - let struct_field_types = unwrap_struct_type(typ); + HirPattern::Struct(_, fields, location) => { + let struct_field_types = unwrap_struct_type(typ, *location)?; assert_eq!(struct_field_types.len(), fields.len()); let mut fields = @@ -663,8 +663,10 @@ impl<'interner> Monomorphizer<'interner> { constructor: HirConstructorExpression, id: node_interner::ExprId, ) -> Result { + let location = self.interner.expr_location(&id); + let typ = self.interner.id_type(id); - let field_types = unwrap_struct_type(&typ); + let field_types = unwrap_struct_type(&typ, location)?; let field_type_map = btree_map(&field_types, |x| x.clone()); @@ -740,8 +742,8 @@ impl<'interner> Monomorphizer<'interner> { let fields = unwrap_tuple_type(typ); self.unpack_tuple_pattern(value, patterns.into_iter().zip(fields)) } - HirPattern::Struct(_, patterns, _) => { - let fields = unwrap_struct_type(typ); + HirPattern::Struct(_, patterns, location) => { + let fields = unwrap_struct_type(typ, location)?; assert_eq!(patterns.len(), fields.len()); let mut patterns = @@ -975,12 +977,24 @@ impl<'interner> Monomorphizer<'interner> { } HirType::Struct(def, args) => { + // Not all generic arguments may be used in a struct's fields so we have to check + // the arguments as well as the fields in case any need to be defaulted or are unbound. + for arg in args { + Self::check_type(arg, location)?; + } + let fields = def.borrow().get_fields(args); let fields = try_vecmap(fields, |(_, field)| Self::convert_type(&field, location))?; ast::Type::Tuple(fields) } HirType::Alias(def, args) => { + // Similar to the struct case above: generics of an alias might not end up being + // used in the type that is aliased. + for arg in args { + Self::check_type(arg, location)?; + } + Self::convert_type(&def.borrow().get_type(args), location)? } @@ -1012,13 +1026,97 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::MutableReference(Box::new(element)) } - HirType::Forall(_, _) | HirType::Constant(_) | HirType::Error => { + HirType::Forall(_, _) + | HirType::Constant(_) + | HirType::InfixExpr(..) + | HirType::Error => { unreachable!("Unexpected type {} found", typ) } HirType::Quoted(_) => unreachable!("Tried to translate Code type into runtime code"), }) } + // Similar to `convert_type` but returns an error if any type variable can't be defaulted. + fn check_type(typ: &HirType, location: Location) -> Result<(), MonomorphizationError> { + match typ { + HirType::FieldElement + | HirType::Integer(..) + | HirType::Bool + | HirType::String(..) + | HirType::Unit + | HirType::TraitAsType(..) + | HirType::Forall(_, _) + | HirType::Constant(_) + | HirType::Error + | HirType::Quoted(_) => Ok(()), + HirType::FmtString(_size, fields) => Self::check_type(fields.as_ref(), location), + HirType::Array(_length, element) => Self::check_type(element.as_ref(), location), + HirType::Slice(element) => Self::check_type(element.as_ref(), location), + HirType::NamedGeneric(binding, _, _) => { + if let TypeBinding::Bound(binding) = &*binding.borrow() { + return Self::check_type(binding, location); + } + + Ok(()) + } + + HirType::TypeVariable(binding, kind) => { + if let TypeBinding::Bound(binding) = &*binding.borrow() { + return Self::check_type(binding, location); + } + + // Default any remaining unbound type variables. + // This should only happen if the variable in question is unused + // and within a larger generic type. + let default = match kind.default_type() { + Some(typ) => typ, + None => return Err(MonomorphizationError::TypeAnnotationsNeeded { location }), + }; + + Self::check_type(&default, location) + } + + HirType::Struct(_def, args) => { + for arg in args { + Self::check_type(arg, location)?; + } + + Ok(()) + } + + HirType::Alias(_def, args) => { + for arg in args { + Self::check_type(arg, location)?; + } + + Ok(()) + } + + HirType::Tuple(fields) => { + for field in fields { + Self::check_type(field, location)?; + } + + Ok(()) + } + + HirType::Function(args, ret, env) => { + for arg in args { + Self::check_type(arg, location)?; + } + + Self::check_type(ret, location)?; + Self::check_type(env, location) + } + + HirType::MutableReference(element) => Self::check_type(element, location), + HirType::InfixExpr(lhs, _, rhs) => { + Self::check_type(lhs, location)?; + Self::check_type(rhs, location) + } + } + } + fn is_function_closure(&self, t: ast::Type) -> bool { if self.is_function_closure_type(&t) { true @@ -1595,7 +1693,7 @@ impl<'interner> Monomorphizer<'interner> { self.create_zeroed_function(parameter_types, ret_type, env, location) } ast::Type::Slice(element_type) => { - ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { + ast::Expression::Literal(ast::Literal::Slice(ast::ArrayLiteral { contents: vec![], typ: ast::Type::Slice(element_type.clone()), })) @@ -1753,9 +1851,19 @@ fn unwrap_tuple_type(typ: &HirType) -> Vec { } } -fn unwrap_struct_type(typ: &HirType) -> Vec<(String, HirType)> { +fn unwrap_struct_type( + typ: &HirType, + location: Location, +) -> Result, MonomorphizationError> { match typ.follow_bindings() { - HirType::Struct(def, args) => def.borrow().get_fields(&args), + HirType::Struct(def, args) => { + // Some of args might not be mentioned in fields, so we need to check that they aren't unbound. + for arg in &args { + Monomorphizer::check_type(arg, location)?; + } + + Ok(def.borrow().get_fields(&args)) + } other => unreachable!("unwrap_struct_type: expected struct, found {:?}", other), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs index 87ff45f8f1a..c701b29f898 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs @@ -1,5 +1,4 @@ use std::borrow::Cow; -use std::collections::HashMap; use std::fmt; use std::hash::Hash; use std::marker::Copy; @@ -12,6 +11,7 @@ use noirc_errors::{Location, Span, Spanned}; use petgraph::algo::tarjan_scc; use petgraph::prelude::DiGraph; use petgraph::prelude::NodeIndex as PetGraphIndex; +use rustc_hash::FxHashMap as HashMap; use crate::ast::Ident; use crate::graph::CrateId; @@ -200,8 +200,8 @@ pub struct NodeInterner { /// the actual type since types do not implement Send or Sync. quoted_types: noirc_arena::Arena, - /// Whether to track references. In regular compilations this is false, but tools set it to true. - pub(crate) track_references: bool, + /// Determins whether to run in LSP mode. In LSP mode references are tracked. + pub(crate) lsp_mode: bool, /// Store the location of the references in the graph. /// Edges are directed from reference nodes to referenced nodes. @@ -230,6 +230,14 @@ pub struct NodeInterner { // The module where each reference is // (ReferenceId::Reference and ReferenceId::Local aren't included here) pub(crate) reference_modules: HashMap, + + /// Each value currently in scope in the comptime interpreter. + /// Each element of the Vec represents a scope with every scope together making + /// up all currently visible definitions. The first scope is always the global scope. + /// + /// This is stored in the NodeInterner so that the Elaborator from each crate can + /// share the same global values. + pub(crate) comptime_scopes: Vec>, } /// A dependency in the dependency graph may be a type or a definition. @@ -556,44 +564,45 @@ impl Default for NodeInterner { fn default() -> Self { NodeInterner { nodes: Arena::default(), - func_meta: HashMap::new(), - function_definition_ids: HashMap::new(), - function_modifiers: HashMap::new(), - function_modules: HashMap::new(), - module_attributes: HashMap::new(), - func_id_to_trait: HashMap::new(), + func_meta: HashMap::default(), + function_definition_ids: HashMap::default(), + function_modifiers: HashMap::default(), + function_modules: HashMap::default(), + module_attributes: HashMap::default(), + func_id_to_trait: HashMap::default(), dependency_graph: petgraph::graph::DiGraph::new(), - dependency_graph_indices: HashMap::new(), - id_to_location: HashMap::new(), + dependency_graph_indices: HashMap::default(), + id_to_location: HashMap::default(), definitions: vec![], - id_to_type: HashMap::new(), - definition_to_type: HashMap::new(), - structs: HashMap::new(), - struct_attributes: HashMap::new(), + id_to_type: HashMap::default(), + definition_to_type: HashMap::default(), + structs: HashMap::default(), + struct_attributes: HashMap::default(), type_aliases: Vec::new(), - traits: HashMap::new(), - trait_implementations: HashMap::new(), + traits: HashMap::default(), + trait_implementations: HashMap::default(), next_trait_implementation_id: 0, - trait_implementation_map: HashMap::new(), - selected_trait_implementations: HashMap::new(), - infix_operator_traits: HashMap::new(), - prefix_operator_traits: HashMap::new(), + trait_implementation_map: HashMap::default(), + selected_trait_implementations: HashMap::default(), + infix_operator_traits: HashMap::default(), + prefix_operator_traits: HashMap::default(), ordering_type: None, - instantiation_bindings: HashMap::new(), - field_indices: HashMap::new(), + instantiation_bindings: HashMap::default(), + field_indices: HashMap::default(), next_type_variable_id: std::cell::Cell::new(0), globals: Vec::new(), - global_attributes: HashMap::new(), - struct_methods: HashMap::new(), - primitive_methods: HashMap::new(), + global_attributes: HashMap::default(), + struct_methods: HashMap::default(), + primitive_methods: HashMap::default(), type_alias_ref: Vec::new(), type_ref_locations: Vec::new(), quoted_types: Default::default(), - track_references: false, + lsp_mode: false, location_indices: LocationIndices::default(), reference_graph: petgraph::graph::DiGraph::new(), - reference_graph_indices: HashMap::new(), - reference_modules: HashMap::new(), + reference_graph_indices: HashMap::default(), + reference_modules: HashMap::default(), + comptime_scopes: vec![HashMap::default()], } } } @@ -1437,6 +1446,8 @@ impl NodeInterner { let mut matching_impls = Vec::new(); + let mut where_clause_errors = Vec::new(); + for (existing_object_type2, impl_kind) in impls { // Bug: We're instantiating only the object type's generics here, not all of the trait's generics like we need to let (existing_object_type, instantiation_bindings) = @@ -1471,14 +1482,17 @@ impl NodeInterner { let trait_impl = self.get_trait_implementation(*impl_id); let trait_impl = trait_impl.borrow(); - if let Err(mut errors) = self.validate_where_clause( + if let Err(errors) = self.validate_where_clause( &trait_impl.where_clause, &mut fresh_bindings, &instantiation_bindings, recursion_limit, ) { - errors.push(make_constraint()); - return Err(errors); + // Only keep the first errors we get from a failing where clause + if where_clause_errors.is_empty() { + where_clause_errors.extend(errors); + } + continue; } } @@ -1491,7 +1505,8 @@ impl NodeInterner { *type_bindings = fresh_bindings; Ok(impl_) } else if matching_impls.is_empty() { - Err(vec![make_constraint()]) + where_clause_errors.push(make_constraint()); + Err(where_clause_errors) } else { // multiple matching impls, type annotations needed Err(vec![]) @@ -1969,6 +1984,10 @@ impl NodeInterner { let env = Box::new(Type::Unit); (Type::Function(args, Box::new(ret.clone()), env), ret) } + + pub fn is_in_lsp_mode(&self) -> bool { + self.lsp_mode + } } impl Methods { @@ -2067,6 +2086,7 @@ fn get_type_method_key(typ: &Type) -> Option { | Type::Constant(_) | Type::Error | Type::Struct(_, _) + | Type::InfixExpr(..) | Type::TraitAsType(..) => None, } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop b/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop index 5bf48a764d6..1488a53183e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop +++ b/noir/noir-repo/compiler/noirc_frontend/src/noir_parser.lalrpop @@ -4,7 +4,7 @@ use crate::lexer::token::BorrowedToken; use crate::lexer::token as noir_token; use crate::lexer::errors::LexerErrorKind; use crate::parser::TopLevelStatement; -use crate::ast::{Ident, Path, PathKind, UseTree, UseTreeKind}; +use crate::ast::{Ident, Path, PathKind, PathSegment, UseTree, UseTreeKind}; use lalrpop_util::ErrorRecovery; @@ -110,7 +110,7 @@ pub(crate) TopLevelStatement: TopLevelStatement = { UseTree: UseTree = { // path::to::ident as SomeAlias => { - let ident = prefix.pop(); + let ident = prefix.pop().ident; let kind = UseTreeKind::Path(ident, alias); UseTree { prefix, kind } }, @@ -129,7 +129,7 @@ pub(crate) Path: Path = { Path { segments, kind, span } }, - => { + => { segments.insert(0, id); let kind = PathKind::Plain; let span = Span::from(lo as u32..hi as u32); @@ -137,12 +137,20 @@ pub(crate) Path: Path = { }, } -PathSegments: Vec = { - )*> => { +PathSegments: Vec = { + )*> => { segments } } +PathSegment: PathSegment = { + => { + let token = noir_token::Token::Ident(i.to_string()); + let span = Span::from(lo as u32..hi as u32); + PathSegment::from(Ident::from_token(token, span)) + }, +} + Alias: Ident = { r"[\t\r\n ]+" "as" r"[\t\r\n ]+" => <>, } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs index c566489eb40..80adb01dc9a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs @@ -46,6 +46,8 @@ pub enum ParserErrorReason { Lexer(LexerErrorKind), #[error("The only supported numeric generic types are `u1`, `u8`, `u16`, and `u32`")] ForbiddenNumericGenericType, + #[error("Invalid call data identifier, must be a number. E.g `call_data(0)`")] + InvalidCallDataIdentifier, } /// Represents a parsing error, or a parsing error in the making. diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs index c62d66769ac..677d741b5e0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs @@ -22,7 +22,7 @@ use chumsky::primitive::Container; pub use errors::ParserError; pub use errors::ParserErrorReason; use noirc_errors::Span; -pub use parser::{expression, parse_program, top_level_items, trait_bound}; +pub use parser::{expression, parse_program, parse_type, top_level_items, trait_bound}; #[derive(Debug, Clone)] pub enum TopLevelStatement { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index 7f3e0e68bbc..9772814027f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -24,7 +24,8 @@ //! be limited to cases like the above `fn` example where it is clear we shouldn't back out of the //! current parser to try alternative parsers in a `choice` expression. use self::primitives::{keyword, macro_quote_marker, mutable_reference, variable}; -use self::types::{generic_type_args, maybe_comp_time, parse_type}; +use self::types::{generic_type_args, maybe_comp_time}; +pub use types::parse_type; use super::{ foldl_with_span, labels::ParsingRuleLabel, parameter_name_recovery, parameter_recovery, @@ -37,7 +38,7 @@ use crate::ast::{ BinaryOp, BinaryOpKind, BlockExpression, ForLoopStatement, ForRange, Ident, IfExpression, InfixExpression, LValue, Literal, ModuleDeclaration, NoirTypeAlias, Param, Path, Pattern, Recoverable, Statement, TraitBound, TypeImpl, UnaryRhsMemberAccess, UnaryRhsMethodCall, - UnresolvedTraitConstraint, UseTree, UseTreeKind, Visibility, + UseTree, UseTreeKind, Visibility, }; use crate::ast::{ Expression, ExpressionKind, LetStatement, StatementKind, UnresolvedType, UnresolvedTypeData, @@ -45,6 +46,7 @@ use crate::ast::{ use crate::lexer::{lexer::from_spanned_token_result, Lexer}; use crate::parser::{force, ignore_then_commit, statement_recovery}; use crate::token::{Keyword, Token, TokenKind}; +use acvm::AcirField; use chumsky::prelude::*; use iter_extended::vecmap; @@ -69,8 +71,9 @@ lalrpop_mod!(pub noir_parser); mod test_helpers; use literals::literal; -use path::{maybe_empty_path, path}; +use path::{maybe_empty_path, path, path_no_turbofish}; use primitives::{dereference, ident, negation, not, nothing, right_shift_operator, token_kind}; +use traits::where_clause; /// Entry function for the parser - also handles lexing internally. /// @@ -215,9 +218,8 @@ fn top_level_statement<'a>( /// /// implementation: 'impl' generics type '{' function_definition ... '}' fn implementation() -> impl NoirParser { - maybe_comp_time() - .then_ignore(keyword(Keyword::Impl)) - .then(function::generics()) + keyword(Keyword::Impl) + .ignore_then(function::generics()) .then(parse_type().map_with_span(|typ, span| (typ, span))) .then(where_clause()) .then_ignore(just(Token::LeftBrace)) @@ -225,14 +227,13 @@ fn implementation() -> impl NoirParser { .then_ignore(just(Token::RightBrace)) .map(|args| { let ((other_args, where_clause), methods) = args; - let ((is_comptime, generics), (object_type, type_span)) = other_args; + let (generics, (object_type, type_span)) = other_args; TopLevelStatement::Impl(TypeImpl { generics, object_type, type_span, where_clause, methods, - is_comptime, }) }) } @@ -365,45 +366,8 @@ fn function_declaration_parameters() -> impl NoirParser impl NoirParser> { - struct MultiTraitConstraint { - typ: UnresolvedType, - trait_bounds: Vec, - } - - let constraints = parse_type() - .then_ignore(just(Token::Colon)) - .then(trait_bounds()) - .map(|(typ, trait_bounds)| MultiTraitConstraint { typ, trait_bounds }); - - keyword(Keyword::Where) - .ignore_then(constraints.separated_by(just(Token::Comma))) - .or_not() - .map(|option| option.unwrap_or_default()) - .map(|x: Vec| { - let mut result: Vec = Vec::new(); - for constraint in x { - for bound in constraint.trait_bounds { - result.push(UnresolvedTraitConstraint { - typ: constraint.typ.clone(), - trait_bound: bound, - }); - } - } - result - }) -} - -fn trait_bounds() -> impl NoirParser> { - trait_bound().separated_by(just(Token::Plus)).at_least(1).allow_trailing() -} - pub fn trait_bound() -> impl NoirParser { - path().then(generic_type_args(parse_type())).map(|(trait_path, trait_generics)| TraitBound { - trait_path, - trait_generics, - trait_id: None, - }) + traits::trait_bound() } fn block_expr<'a>( @@ -467,8 +431,8 @@ fn rename() -> impl NoirParser> { fn use_tree() -> impl NoirParser { recursive(|use_tree| { - let simple = path().then(rename()).map(|(mut prefix, alias)| { - let ident = prefix.pop(); + let simple = path_no_turbofish().then(rename()).map(|(mut prefix, alias)| { + let ident = prefix.pop().ident; UseTree { prefix, kind: UseTreeKind::Path(ident, alias) } }); @@ -502,6 +466,8 @@ where assertion::assertion_eq(expr_parser.clone()), declaration(expr_parser.clone()), assignment(expr_parser.clone()), + if_statement(expr_no_constructors.clone(), statement.clone()), + block_statement(statement.clone()), for_loop(expr_no_constructors.clone(), statement.clone()), break_statement(), continue_statement(), @@ -593,7 +559,7 @@ fn pattern() -> impl NoirParser { .separated_by(just(Token::Comma)) .delimited_by(just(Token::LeftBrace), just(Token::RightBrace)); - let struct_pattern = path() + let struct_pattern = path(super::parse_type()) .then(struct_pattern_fields) .map_with_span(|(typename, fields), span| Pattern::Struct(typename, fields, span)); @@ -680,19 +646,28 @@ where }) } +fn call_data() -> impl NoirParser { + keyword(Keyword::CallData).then(parenthesized(literal())).validate(|token, span, emit| { + match token { + (_, ExpressionKind::Literal(Literal::Integer(x, _))) => { + let id = x.to_u128() as u32; + Visibility::CallData(id) + } + _ => { + emit(ParserError::with_reason(ParserErrorReason::InvalidCallDataIdentifier, span)); + Visibility::CallData(0) + } + } + }) +} + fn optional_visibility() -> impl NoirParser { keyword(Keyword::Pub) - .or(keyword(Keyword::CallData)) - .or(keyword(Keyword::ReturnData)) + .map(|_| Visibility::Public) + .or(call_data()) + .or(keyword(Keyword::ReturnData).map(|_| Visibility::ReturnData)) .or_not() - .map(|opt| match opt { - Some(Token::Keyword(Keyword::Pub)) => Visibility::Public, - Some(Token::Keyword(Keyword::CallData)) | Some(Token::Keyword(Keyword::ReturnData)) => { - Visibility::DataBus - } - None => Visibility::Private, - _ => unreachable!("unexpected token found"), - }) + .map(|opt| opt.unwrap_or(Visibility::Private)) } pub fn expression() -> impl ExprParser { @@ -959,6 +934,28 @@ where }) } +fn if_statement<'a, P, S>( + expr_no_constructors: P, + statement: S, +) -> impl NoirParser + 'a +where + P: ExprParser + 'a, + S: NoirParser + 'a, +{ + if_expr(expr_no_constructors, statement).map_with_span(|expression_kind, span| { + StatementKind::Expression(Expression::new(expression_kind, span)) + }) +} + +fn block_statement<'a, S>(statement: S) -> impl NoirParser + 'a +where + S: NoirParser + 'a, +{ + block(statement).map_with_span(|block, span| { + StatementKind::Expression(Expression::new(ExpressionKind::Block(block), span)) + }) +} + fn for_loop<'a, P, S>(expr_no_constructors: P, statement: S) -> impl NoirParser + 'a where P: ExprParser + 'a, @@ -1155,7 +1152,7 @@ fn constructor(expr_parser: impl ExprParser) -> impl NoirParser .allow_trailing() .delimited_by(just(Token::LeftBrace), just(Token::RightBrace)); - path().then(args).map(ExpressionKind::constructor) + path(super::parse_type()).then(args).map(ExpressionKind::constructor) } fn constructor_field

(expr_parser: P) -> impl NoirParser<(Ident, Expression)> @@ -1325,20 +1322,6 @@ mod test { fn parse_block() { parse_with(block(fresh_statement()), "{ [0,1,2,3,4] }").unwrap(); - // Regression for #1310: this should be parsed as a block and not a function call - let res = - parse_with(block(fresh_statement()), "{ if true { 1 } else { 2 } (3, 4) }").unwrap(); - match unwrap_expr(&res.statements.last().unwrap().kind) { - // The `if` followed by a tuple is currently creates a block around both in case - // there was none to start with, so there is an extra block here. - ExpressionKind::Block(block) => { - assert_eq!(block.statements.len(), 2); - assert!(matches!(unwrap_expr(&block.statements[0].kind), ExpressionKind::If(_))); - assert!(matches!(unwrap_expr(&block.statements[1].kind), ExpressionKind::Tuple(_))); - } - _ => unreachable!(), - } - parse_all_failing( block(fresh_statement()), vec![ @@ -1352,14 +1335,6 @@ mod test { ); } - /// Extract an Statement::Expression from a statement or panic - fn unwrap_expr(stmt: &StatementKind) -> &ExpressionKind { - match stmt { - StatementKind::Expression(expr) => &expr.kind, - _ => unreachable!(), - } - } - #[test] fn parse_let() { // Why is it valid to specify a let declaration as having type u8? @@ -1656,4 +1631,40 @@ mod test { let failing = vec!["quote {}}", "quote a", "quote { { { } } } }"]; parse_all_failing(quote(), failing); } + + #[test] + fn test_parses_block_statement_not_infix_expression() { + let src = r#" + { + {} + -1 + }"#; + let (block_expr, _) = parse_recover(block(fresh_statement()), src); + let block_expr = block_expr.expect("Failed to parse module"); + assert_eq!(block_expr.statements.len(), 2); + } + + #[test] + fn test_parses_if_statement_not_infix_expression() { + let src = r#" + { + if 1 { 2 } else { 3 } + -1 + }"#; + let (block_expr, _) = parse_recover(block(fresh_statement()), src); + let block_expr = block_expr.expect("Failed to parse module"); + assert_eq!(block_expr.statements.len(), 2); + } + + #[test] + fn test_parses_if_statement_followed_by_tuple_as_two_separate_statements() { + // Regression for #1310: this should not be parsed as a function call + let src = r#" + { + if 1 { 2 } else { 3 } (1, 2) + }"#; + let (block_expr, _) = parse_recover(block(fresh_statement()), src); + let block_expr = block_expr.expect("Failed to parse module"); + assert_eq!(block_expr.statements.len(), 2); + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs:28:9 b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs:28:9 new file mode 100644 index 00000000000..47dfb32b53b --- /dev/null +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs:28:9 @@ -0,0 +1,45 @@ +[?1049h[?1h[?2004h[?2026$p[?u[?12h[?25h[?25l(B[38:2:235:219:178m[48:2:168:153:132m [No Name]  (B[38:2:168:153:132m(B[38:2:235:219:178m (B[38:2:80:73:69m(B[38:2:168:153:132m[48:2:80:73:69m buffers +(B[38:2:124:111:100m 1 (B[38:2:235:219:178m +(B[38:2:80:73:69m~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +~ +(B[38:2:235:219:178m[48:2:168:153:132m (B[38:2:235:219:178m[48:2:168:153:132mNORMAL(B[38:2:235:219:178m[48:2:168:153:132m (B[38:2:168:153:132m[48:2:80:73:69m  jf/quoted-as-type (B[38:2:80:73:69m[48:2:60:56:54m(B[38:2:168:153:132m[48:2:60:56:54m (B[38:2:60:56:54m[48:2:60:56:54m(B[38:2:168:153:132m[48:2:60:56:54m  (B[38:2:80:73:69m[48:2:60:56:54m(B[38:2:168:153:132m[48:2:80:73:69m(B[38:2:235:219:178m[48:2:168:153:132m 100% (B[38:2:235:219:178m[48:2:168:153:132m☰ 0/1 (B[38:2:235:219:178m[48:2:168:153:132m : 1 (B[38:2:254:128:25m[48:2:168:153:132m(B[38:2:235:219:178mNVIM v0.10.0Nvim is open source and freely distributablehttps://neovim.io/#chattype :help nvim(B[38:2:80:73:69m(B[38:2:235:219:178m if you are new! type :checkhealth(B[38:2:80:73:69m(B[38:2:235:219:178m to optimize Nvimtype :q(B[38:2:80:73:69m(B[38:2:235:219:178m to exit type :help(B[38:2:80:73:69m(B[38:2:235:219:178m for help type :help news(B[38:2:80:73:69m(B[38:2:235:219:178m to see changes in v0.10Help poor children in Uganda!type :help iccf(B[38:2:80:73:69m(B[38:2:235:219:178m for information ]112[2 q]112[2 q[?1002h[?1006h(B[38:2:235:219:178m[48:2:168:153:132m [No Name]  (B[38:2:168:153:132m(B[38:2:235:219:178m (B[38:2:80:73:69m(B[38:2:168:153:132m[48:2:80:73:69m buffers +(B[38:2:124:111:100m 1 (B[38:2:235:219:178m +(B[38:2:80:73:69m~ +~ +~ (B[38:2:235:219:178mNVIM v0.10.0(B[38:2:80:73:69m +~ +~ (B[38:2:235:219:178mNvim is open source and freely distributable(B[38:2:80:73:69m +~ (B[38:2:235:219:178mhttps://neovim.io/#chat(B[38:2:80:73:69m +~ +~ (B[38:2:235:219:178mtype :help nvim(B[38:2:80:73:69m(B[38:2:235:219:178m if you are new! (B[38:2:80:73:69m +~ (B[38:2:235:219:178mtype :checkhealth(B[38:2:80:73:69m(B[38:2:235:219:178m to optimize Nvim(B[38:2:80:73:69m +~ (B[38:2:235:219:178mtype :q(B[38:2:80:73:69m(B[38:2:235:219:178m to exit (B[38:2:80:73:69m +~ (B[38:2:235:219:178mtype :help(B[38:2:80:73:69m(B[38:2:235:219:178m for help (B[38:2:80:73:69m +~ +~ (B[38:2:235:219:178mtype :help news(B[38:2:80:73:69m(B[38:2:235:219:178m to see changes in v0.10(B[38:2:80:73:69m +~ +~ (B[38:2:235:219:178mHelp poor children in Uganda!(B[38:2:80:73:69m +~ (B[38:2:235:219:178mtype :help iccf(B[38:2:80:73:69m(B[38:2:235:219:178m for information (B[38:2:80:73:69m +~ +~ +~ +~ +(B[38:2:235:219:178m[48:2:168:153:132m (B[38:2:235:219:178m[48:2:168:153:132mNORMAL(B[38:2:235:219:178m[48:2:168:153:132m (B[38:2:168:153:132m[48:2:80:73:69m  jf/quoted-as-type (B[38:2:80:73:69m[48:2:60:56:54m(B[38:2:168:153:132m[48:2:60:56:54m (B[38:2:60:56:54m[48:2:60:56:54m(B[38:2:168:153:132m[48:2:60:56:54m  (B[38:2:80:73:69m[48:2:60:56:54m(B[38:2:168:153:132m[48:2:80:73:69m(B[38:2:235:219:178m[48:2:168:153:132m 100% (B[38:2:235:219:178m[48:2:168:153:132m☰ 0/1 (B[38:2:235:219:178m[48:2:168:153:132m : 1 (B[38:2:254:128:25m[48:2:168:153:132m(B[38:2:235:219:178m[?12h[?25h[?25l[?1004h[?12h[?25h \ No newline at end of file diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs index 2fd337e1cb1..3de48d2e02a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs @@ -2,9 +2,10 @@ use super::{ attributes::{attributes, validate_attributes}, block, fresh_statement, ident, keyword, maybe_comp_time, nothing, optional_visibility, parameter_name_recovery, parameter_recovery, parenthesized, parse_type, pattern, + primitives::token_kind, self_parameter, where_clause, NoirParser, }; -use crate::token::{Keyword, Token}; +use crate::token::{Keyword, Token, TokenKind}; use crate::{ast::IntegerBitSize, parser::spanned}; use crate::{ ast::{ @@ -110,8 +111,15 @@ pub(super) fn generic_type() -> impl NoirParser { ident().map(UnresolvedGeneric::Variable) } +pub(super) fn resolved_generic() -> impl NoirParser { + token_kind(TokenKind::QuotedType).map_with_span(|token, span| match token { + Token::QuotedType(id) => UnresolvedGeneric::Resolved(id, span), + _ => unreachable!("token_kind(QuotedType) guarantees we parse a quoted type"), + }) +} + pub(super) fn generic() -> impl NoirParser { - generic_type().or(numeric_generic()) + generic_type().or(numeric_generic()).or(resolved_generic()) } /// non_empty_ident_list: ident ',' non_empty_ident_list diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/path.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/path.rs index 8957fb7c40b..140650af1a2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/path.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/path.rs @@ -1,24 +1,36 @@ -use crate::ast::{Path, PathKind}; +use crate::ast::{Path, PathKind, PathSegment, UnresolvedType}; use crate::parser::NoirParser; use crate::token::{Keyword, Token}; use chumsky::prelude::*; -use super::{ident, keyword}; +use super::keyword; +use super::primitives::{path_segment, path_segment_no_turbofish}; -pub(super) fn path() -> impl NoirParser { - let idents = || ident().separated_by(just(Token::DoubleColon)).at_least(1); +pub(super) fn path<'a>( + type_parser: impl NoirParser + 'a, +) -> impl NoirParser + 'a { + path_inner(path_segment(type_parser)) +} + +pub(super) fn path_no_turbofish() -> impl NoirParser { + path_inner(path_segment_no_turbofish()) +} + +fn path_inner<'a>(segment: impl NoirParser + 'a) -> impl NoirParser + 'a { + let segments = segment.separated_by(just(Token::DoubleColon)).at_least(1); let make_path = |kind| move |segments, span| Path { segments, kind, span }; let prefix = |key| keyword(key).ignore_then(just(Token::DoubleColon)); - let path_kind = |key, kind| prefix(key).ignore_then(idents()).map_with_span(make_path(kind)); + let path_kind = + |key, kind| prefix(key).ignore_then(segments.clone()).map_with_span(make_path(kind)); choice(( path_kind(Keyword::Crate, PathKind::Crate), path_kind(Keyword::Dep, PathKind::Dep), path_kind(Keyword::Super, PathKind::Super), - idents().map_with_span(make_path(PathKind::Plain)), + segments.map_with_span(make_path(PathKind::Plain)), )) } @@ -30,13 +42,16 @@ fn empty_path() -> impl NoirParser { } pub(super) fn maybe_empty_path() -> impl NoirParser { - path().or(empty_path()) + path_no_turbofish().or(empty_path()) } #[cfg(test)] mod test { use super::*; - use crate::parser::parser::test_helpers::{parse_all_failing, parse_with}; + use crate::parser::{ + parse_type, + parser::test_helpers::{parse_all_failing, parse_with}, + }; #[test] fn parse_path() { @@ -45,18 +60,17 @@ mod test { ("std::hash", vec!["std", "hash"]), ("std::hash::collections", vec!["std", "hash", "collections"]), ("foo::bar", vec!["foo", "bar"]), - ("foo::bar", vec!["foo", "bar"]), ("crate::std::hash", vec!["std", "hash"]), ]; for (src, expected_segments) in cases { - let path: Path = parse_with(path(), src).unwrap(); + let path: Path = parse_with(path(parse_type()), src).unwrap(); for (segment, expected) in path.segments.into_iter().zip(expected_segments) { - assert_eq!(segment.0.contents, expected); + assert_eq!(segment.ident.0.contents, expected); } } - parse_all_failing(path(), vec!["std::", "::std", "std::hash::", "foo::1"]); + parse_all_failing(path(parse_type()), vec!["std::", "::std", "std::hash::", "foo::1"]); } #[test] @@ -69,12 +83,12 @@ mod test { ]; for (src, expected_path_kind) in cases { - let path = parse_with(path(), src).unwrap(); + let path = parse_with(path(parse_type()), src).unwrap(); assert_eq!(path.kind, expected_path_kind); } parse_all_failing( - path(), + path(parse_type()), vec!["crate", "crate::std::crate", "foo::bar::crate", "foo::dep"], ); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/primitives.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/primitives.rs index 88f9e591aba..25f693bf504 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/primitives.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/primitives.rs @@ -1,13 +1,14 @@ use chumsky::prelude::*; -use crate::ast::{ExpressionKind, Ident, UnaryOp}; +use crate::ast::{ExpressionKind, Ident, PathSegment, UnaryOp}; use crate::macros_api::UnresolvedType; use crate::{ parser::{labels::ParsingRuleLabel, ExprParser, NoirParser, ParserError}, token::{Keyword, Token, TokenKind}, }; -use super::path; +use super::path::{path, path_no_turbofish}; +use super::types::required_generic_type_args; /// This parser always parses no input and fails pub(super) fn nothing() -> impl NoirParser { @@ -32,6 +33,20 @@ pub(super) fn token_kind(token_kind: TokenKind) -> impl NoirParser { }) } +pub(super) fn path_segment<'a>( + type_parser: impl NoirParser + 'a, +) -> impl NoirParser + 'a { + ident().then(turbofish(type_parser)).map_with_span(|(ident, generics), span| PathSegment { + ident, + generics, + span, + }) +} + +pub(super) fn path_segment_no_turbofish() -> impl NoirParser { + ident().map(PathSegment::from) +} + pub(super) fn ident() -> impl NoirParser { token_kind(TokenKind::Ident).map_with_span(Ident::from_token) } @@ -81,17 +96,15 @@ where pub(super) fn turbofish<'a>( type_parser: impl NoirParser + 'a, ) -> impl NoirParser>> + 'a { - just(Token::DoubleColon).ignore_then(super::generic_type_args(type_parser)).or_not() + just(Token::DoubleColon).ignore_then(required_generic_type_args(type_parser)).or_not() } pub(super) fn variable() -> impl NoirParser { - path() - .then(turbofish(super::parse_type())) - .map(|(path, generics)| ExpressionKind::Variable(path, generics)) + path(super::parse_type()).map(ExpressionKind::Variable) } pub(super) fn variable_no_turbofish() -> impl NoirParser { - path().map(|path| ExpressionKind::Variable(path, None)) + path_no_turbofish().map(ExpressionKind::Variable) } pub(super) fn macro_quote_marker() -> impl NoirParser { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/structs.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/structs.rs index 9a3adf74d7f..58bf1693eee 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/structs.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/structs.rs @@ -1,7 +1,6 @@ use chumsky::prelude::*; use crate::ast::{Ident, NoirStruct, UnresolvedType}; -use crate::parser::parser::types::maybe_comp_time; use crate::{ parser::{ parser::{ @@ -29,21 +28,13 @@ pub(super) fn struct_definition() -> impl NoirParser { .or(just(Semicolon).to(Vec::new())); attributes() - .then(maybe_comp_time()) .then_ignore(keyword(Struct)) .then(ident()) .then(function::generics()) .then(fields) - .validate(|((((attributes, is_comptime), name), generics), fields), span, emit| { + .validate(|(((attributes, name), generics), fields), span, emit| { let attributes = validate_secondary_attributes(attributes, span, emit); - TopLevelStatement::Struct(NoirStruct { - name, - attributes, - generics, - fields, - span, - is_comptime, - }) + TopLevelStatement::Struct(NoirStruct { name, attributes, generics, fields, span }) }) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/traits.rs index 4e4c9d5c0db..ffcf7e07629 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/traits.rs @@ -2,7 +2,7 @@ use chumsky::prelude::*; use super::attributes::{attributes, validate_secondary_attributes}; use super::function::function_return_type; -use super::types::maybe_comp_time; +use super::path::path_no_turbofish; use super::{block, expression, fresh_statement, function, function_declaration_parameters}; use crate::ast::{ @@ -17,7 +17,7 @@ use crate::{ token::{Keyword, Token}, }; -use super::{generic_type_args, parse_type, path, primitives::ident}; +use super::{generic_type_args, parse_type, primitives::ident}; pub(super) fn trait_definition() -> impl NoirParser { attributes() @@ -104,10 +104,9 @@ fn trait_type_declaration() -> impl NoirParser { /// /// trait_implementation: 'impl' generics ident generic_args for type '{' trait_implementation_body '}' pub(super) fn trait_implementation() -> impl NoirParser { - maybe_comp_time() - .then_ignore(keyword(Keyword::Impl)) - .then(function::generics()) - .then(path()) + keyword(Keyword::Impl) + .ignore_then(function::generics()) + .then(path_no_turbofish()) .then(generic_type_args(parse_type())) .then_ignore(keyword(Keyword::For)) .then(parse_type()) @@ -117,7 +116,7 @@ pub(super) fn trait_implementation() -> impl NoirParser { .then_ignore(just(Token::RightBrace)) .map(|args| { let (((other_args, object_type), where_clause), items) = args; - let (((is_comptime, impl_generics), trait_name), trait_generics) = other_args; + let ((impl_generics, trait_name), trait_generics) = other_args; TopLevelStatement::TraitImpl(NoirTraitImpl { impl_generics, @@ -126,7 +125,6 @@ pub(super) fn trait_implementation() -> impl NoirParser { object_type, items, where_clause, - is_comptime, }) }) } @@ -151,7 +149,7 @@ fn trait_implementation_body() -> impl NoirParser> { function.or(alias).repeated() } -fn where_clause() -> impl NoirParser> { +pub(super) fn where_clause() -> impl NoirParser> { struct MultiTraitConstraint { typ: UnresolvedType, trait_bounds: Vec, @@ -163,7 +161,7 @@ fn where_clause() -> impl NoirParser> { .map(|(typ, trait_bounds)| MultiTraitConstraint { typ, trait_bounds }); keyword(Keyword::Where) - .ignore_then(constraints.separated_by(just(Token::Comma))) + .ignore_then(constraints.separated_by(just(Token::Comma)).allow_trailing()) .or_not() .map(|option| option.unwrap_or_default()) .map(|x: Vec| { @@ -184,11 +182,9 @@ fn trait_bounds() -> impl NoirParser> { trait_bound().separated_by(just(Token::Plus)).at_least(1).allow_trailing() } -fn trait_bound() -> impl NoirParser { - path().then(generic_type_args(parse_type())).map(|(trait_path, trait_generics)| TraitBound { - trait_path, - trait_generics, - trait_id: None, +pub(super) fn trait_bound() -> impl NoirParser { + path_no_turbofish().then(generic_type_args(parse_type())).map(|(trait_path, trait_generics)| { + TraitBound { trait_path, trait_generics, trait_id: None } }) } @@ -215,6 +211,7 @@ mod test { "trait GenericTrait { fn elem(&mut self, index: Field) -> T; }", "trait GenericTraitWithConstraints where T: SomeTrait { fn elem(self, index: Field) -> T; }", "trait TraitWithMultipleGenericParams where A: SomeTrait, B: AnotherTrait { let Size: Field; fn zero() -> Self; }", + "trait TraitWithMultipleGenericParams where A: SomeTrait, B: AnotherTrait, { let Size: Field; fn zero() -> Self; }", ], ); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs index cecc1cbcd4c..7c2bdcb9fa3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs @@ -1,6 +1,7 @@ +use super::path::path_no_turbofish; use super::primitives::token_kind; use super::{ - expression_with_precedence, keyword, nothing, parenthesized, path, NoirParser, ParserError, + expression_with_precedence, keyword, nothing, parenthesized, NoirParser, ParserError, ParserErrorReason, Precedence, }; use crate::ast::{ @@ -14,7 +15,7 @@ use crate::token::{Keyword, Token, TokenKind}; use chumsky::prelude::*; use noirc_errors::Span; -pub(super) fn parse_type<'a>() -> impl NoirParser + 'a { +pub fn parse_type<'a>() -> impl NoirParser + 'a { recursive(parse_type_inner) } @@ -132,7 +133,7 @@ fn quoted_type() -> impl NoirParser { /// This is the type of an already resolved type. /// The only way this can appear in the token input is if an already resolved `Type` object /// was spliced into a macro's token stream via the `$` operator. -fn resolved_type() -> impl NoirParser { +pub(super) fn resolved_type() -> impl NoirParser { token_kind(TokenKind::QuotedType).map_with_span(|token, span| match token { Token::QuotedType(id) => UnresolvedTypeData::Resolved(id).with_span(span), _ => unreachable!("token_kind(QuotedType) guarantees we parse a quoted type"), @@ -180,7 +181,7 @@ pub(super) fn int_type() -> impl NoirParser { pub(super) fn named_type<'a>( type_parser: impl NoirParser + 'a, ) -> impl NoirParser + 'a { - path().then(generic_type_args(type_parser)).map_with_span(|(path, args), span| { + path_no_turbofish().then(generic_type_args(type_parser)).map_with_span(|(path, args), span| { UnresolvedTypeData::Named(path, args, false).with_span(span) }) } @@ -188,13 +189,22 @@ pub(super) fn named_type<'a>( pub(super) fn named_trait<'a>( type_parser: impl NoirParser + 'a, ) -> impl NoirParser + 'a { - keyword(Keyword::Impl).ignore_then(path()).then(generic_type_args(type_parser)).map_with_span( - |(path, args), span| UnresolvedTypeData::TraitAsType(path, args).with_span(span), - ) + keyword(Keyword::Impl) + .ignore_then(path_no_turbofish()) + .then(generic_type_args(type_parser)) + .map_with_span(|(path, args), span| { + UnresolvedTypeData::TraitAsType(path, args).with_span(span) + }) } pub(super) fn generic_type_args<'a>( type_parser: impl NoirParser + 'a, +) -> impl NoirParser> + 'a { + required_generic_type_args(type_parser).or_not().map(Option::unwrap_or_default) +} + +pub(super) fn required_generic_type_args<'a>( + type_parser: impl NoirParser + 'a, ) -> impl NoirParser> + 'a { type_parser .clone() @@ -208,8 +218,6 @@ pub(super) fn generic_type_args<'a>( .allow_trailing() .at_least(1) .delimited_by(just(Token::Less), just(Token::Greater)) - .or_not() - .map(Option::unwrap_or_default) } pub(super) fn array_type<'a>( @@ -241,7 +249,7 @@ fn type_expression() -> impl NoirParser { /// This parser is the same as `type_expression()`, however, it continues parsing and /// emits a parser error in the case of an invalid type expression rather than halting the parser. -fn type_expression_validated() -> impl NoirParser { +pub(super) fn type_expression_validated() -> impl NoirParser { type_expression_inner().validate(|expr, span, emit| { let type_expr = UnresolvedTypeExpression::from_expr(expr, span); match type_expr { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index cbc15da20ff..9124567b4e5 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -82,8 +82,9 @@ pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(Compilation &mut context, program.clone().into_sorted(), root_file_id, - None, // No debug_comptime_in_file - &[], // No macro processors + None, // No debug_comptime_in_file + false, // Disallow arithmetic generics + &[], // No macro processors )); } (program, context, errors) @@ -2494,3 +2495,427 @@ fn bit_not_on_untyped_integer() { "#; assert_no_errors(src); } + +#[test] +fn duplicate_struct_field() { + let src = r#" + struct Foo { + x: i32, + x: i32, + } + + fn main() {} + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::DefinitionError(DefCollectorErrorKind::DuplicateField { + first_def, + second_def, + }) = &errors[0].0 + else { + panic!("Expected a duplicate field error, got {:?}", errors[0].0); + }; + + assert_eq!(first_def.to_string(), "x"); + assert_eq!(second_def.to_string(), "x"); + + assert_eq!(first_def.span().start(), 26); + assert_eq!(second_def.span().start(), 42); +} + +#[test] +fn trait_constraint_on_tuple_type() { + let src = r#" + trait Foo { + fn foo(self, x: A) -> bool; + } + + fn bar(x: (T, U), y: V) -> bool where (T, U): Foo { + x.foo(y) + } + + fn main() {}"#; + assert_no_errors(src); +} + +#[test] +fn turbofish_in_constructor_generics_mismatch() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let _ = Foo:: { x: 1 }; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::GenericCountMismatch { .. }), + )); +} + +#[test] +fn turbofish_in_constructor() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let x: Field = 0; + let _ = Foo:: { x: x }; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, expr_typ, .. + }) = &errors[0].0 + else { + panic!("Expected a type mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(expected_typ, "i32"); + assert_eq!(expr_typ, "Field"); +} + +#[test] +fn turbofish_in_middle_of_variable_unsupported_yet() { + let src = r#" + struct Foo { + x: T + } + + impl Foo { + fn new(x: T) -> Self { + Foo { x } + } + } + + fn main() { + let _ = Foo::::new(1); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::UnsupportedTurbofishUsage { .. }), + )); +} + +#[test] +fn turbofish_in_struct_pattern() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let value: Field = 0; + let Foo:: { x } = Foo { x: value }; + let _ = x; + } + "#; + assert_no_errors(src); +} + +#[test] +fn turbofish_in_struct_pattern_errors_if_type_mismatch() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let value: Field = 0; + let Foo:: { x } = Foo { x: value }; + let _ = x; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::TypeMismatchWithSource { .. }) = &errors[0].0 + else { + panic!("Expected a type mismatch error, got {:?}", errors[0].0); + }; +} + +#[test] +fn turbofish_in_struct_pattern_generic_count_mismatch() { + let src = r#" + struct Foo { + x: T + } + + fn main() { + let value = 0; + let Foo:: { x } = Foo { x: value }; + let _ = x; + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::GenericCountMismatch { + item, + expected, + found, + .. + }) = &errors[0].0 + else { + panic!("Expected a generic count mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(item, "struct Foo"); + assert_eq!(*expected, 1); + assert_eq!(*found, 2); +} + +#[test] +fn incorrect_generic_count_on_struct_impl() { + let src = r#" + struct Foo {} + impl Foo {} + fn main() {} + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::IncorrectGenericCount { + actual, + expected, + .. + }) = errors[0].0 + else { + panic!("Expected an incorrect generic count mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(actual, 1); + assert_eq!(expected, 0); +} + +#[test] +fn incorrect_generic_count_on_type_alias() { + let src = r#" + struct Foo {} + type Bar = Foo; + fn main() {} + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::IncorrectGenericCount { + actual, + expected, + .. + }) = errors[0].0 + else { + panic!("Expected an incorrect generic count mismatch error, got {:?}", errors[0].0); + }; + + assert_eq!(actual, 1); + assert_eq!(expected, 0); +} + +#[test] +fn uses_self_type_for_struct_function_call() { + let src = r#" + struct S { } + + impl S { + fn one() -> Field { + 1 + } + + fn two() -> Field { + Self::one() + Self::one() + } + } + + fn main() {} + "#; + assert_no_errors(src); +} + +#[test] +fn uses_self_type_inside_trait() { + let src = r#" + trait Foo { + fn foo() -> Self { + Self::bar() + } + + fn bar() -> Self; + } + + impl Foo for Field { + fn bar() -> Self { + 1 + } + } + + fn main() { + let _: Field = Foo::foo(); + } + "#; + assert_no_errors(src); +} + +#[test] +fn uses_self_type_in_trait_where_clause() { + let src = r#" + trait Trait { + fn trait_func() -> bool; + } + + trait Foo where Self: Trait { + fn foo(self) -> bool { + self.trait_func() + } + } + + struct Bar { + + } + + impl Foo for Bar { + + } + + fn main() {} + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::UnresolvedMethodCall { method_name, .. }) = + &errors[0].0 + else { + panic!("Expected an unresolved method call error, got {:?}", errors[0].0); + }; + + assert_eq!(method_name, "trait_func"); +} + +#[test] +fn do_not_eagerly_error_on_cast_on_type_variable() { + let src = r#" + pub fn foo(x: T, f: fn(T) -> U) -> U { + f(x) + } + + fn main() { + let x: u8 = 1; + let _: Field = foo(x, |x| x as Field); + } + "#; + assert_no_errors(src); +} + +#[test] +fn error_on_cast_over_type_variable() { + let src = r#" + pub fn foo(x: T, f: fn(T) -> U) -> U { + f(x) + } + + fn main() { + let x = "a"; + let _: Field = foo(x, |x| x as Field); + } + "#; + + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::TypeMismatch { .. }) + )); +} + +#[test] +fn trait_impl_for_a_type_that_implements_another_trait() { + let src = r#" + trait One { + fn one(self) -> i32; + } + + impl One for i32 { + fn one(self) -> i32 { + self + } + } + + trait Two { + fn two(self) -> i32; + } + + impl Two for T where T: One { + fn two(self) -> i32 { + self.one() + 1 + } + } + + fn use_it(t: T) -> i32 where T: Two { + Two::two(t) + } + + fn main() {} + "#; + assert_no_errors(src); +} + +#[test] +fn trait_impl_for_a_type_that_implements_another_trait_with_another_impl_used() { + let src = r#" + trait One { + fn one(self) -> i32; + } + + impl One for i32 { + fn one(self) -> i32 { + let _ = self; + 1 + } + } + + trait Two { + fn two(self) -> i32; + } + + impl Two for T where T: One { + fn two(self) -> i32 { + self.one() + 1 + } + } + + impl Two for u32 { + fn two(self) -> i32 { + let _ = self; + 0 + } + } + + fn use_it(t: u32) -> i32 { + Two::two(t) + } + + fn main() {} + "#; + assert_no_errors(src); +} diff --git a/noir/noir-repo/cspell.json b/noir/noir-repo/cspell.json index 689b72435ef..b9199bea4bd 100644 --- a/noir/noir-repo/cspell.json +++ b/noir/noir-repo/cspell.json @@ -126,6 +126,7 @@ "memset", "merkle", "metas", + "microcontroller", "minreq", "monomorphization", "monomorphize", @@ -135,6 +136,7 @@ "monomorphizing", "montcurve", "MSRV", + "multicall", "nand", "nargo", "neovim", diff --git a/noir/noir-repo/docs/docs/explainers/cspell.json b/noir/noir-repo/docs/docs/explainers/cspell.json new file mode 100644 index 00000000000..c60b0a597b1 --- /dev/null +++ b/noir/noir-repo/docs/docs/explainers/cspell.json @@ -0,0 +1,5 @@ +{ + "words": [ + "Cryptdoku" + ] +} diff --git a/noir/noir-repo/docs/docs/explainers/explainer-writing-noir.md b/noir/noir-repo/docs/docs/explainers/explainer-writing-noir.md new file mode 100644 index 00000000000..c8a42c379e6 --- /dev/null +++ b/noir/noir-repo/docs/docs/explainers/explainer-writing-noir.md @@ -0,0 +1,173 @@ +--- +title: Writing Performant Noir +description: Understand new considerations when writing Noir +keywords: [Noir, programming, rust] +tags: [Optimization] +sidebar_position: 0 +--- + + +This article intends to set you up with key concepts essential for writing more viable applications that use zero knowledge proofs, namely around efficient circuits. + +## Context - 'Efficient' is subjective + +When writing a web application for a performant computer with high-speed internet connection, writing efficient code sometimes is seen as an afterthought only if needed. Large multiplications running at the innermost of nested loops may not even be on a dev's radar. +When writing firmware for a battery-powered microcontroller, you think of cpu cycles as rations to keep within a product's power budget. + +> Code is written to create applications that perform specific tasks within specific constraints + +And these constraints differ depending on where the compiled code is execute. + +### The Ethereum Virtual Machine (EVM) + +In scenarios where extremely low gas costs are required for an Ethereum application to be viable/competitive, Ethereum smart contract developers get into what is colloquially known as: "*gas golfing*". Finding the lowest execution cost of their compiled code (EVM bytecode) to achieve a specific task. + +The equivalent optimization task when writing zk circuits is affectionately referred to as "*gate golfing*", finding the lowest gate representation of the compiled Noir code. + +### Coding for circuits - a paradigm shift + +In zero knowledge cryptography, code is compiled to "circuits" consisting of arithmetic gates, and gate count is the significant cost. Depending on the proving system this is linearly proportionate to proving time, and so from a product point this should be kept as low as possible. + +Whilst writing efficient code for web apps and Solidity has a few key differences, writing efficient circuits have a different set of considerations. It is a bit of a paradigm shift, like writing code for GPUs for the first time... + +For example, drawing a circle at (0, 0) of radius `r`: +- For a single CPU thread, +``` +for theta in 0..2*pi { + let x = r * cos(theta); + let y = r * sin(theta); + draw(x, y); +} // note: would do 0 - pi/2 and draw +ve/-ve x and y. +``` + +- For GPUs (simultaneous parallel calls with x, y across image), +``` +if (x^2 + y^2 = r^2) { + draw(x, y); +} +``` + +([Related](https://www.youtube.com/watch?v=-P28LKWTzrI)) + +Whilst this CPU -> GPU does not translate to circuits exactly, it is intended to exemplify the difference in intuition when coding for different machine capabilities/constraints. + +### Context Takeaway + +For those coming from a primarily web app background, this article will explain what you need to consider when writing circuits. Furthermore, for those experienced writing efficient machine code, prepare to shift what you think is efficient 😬 + +## Translating from Rust + +For some applications using Noir, existing code might be a convenient starting point to then proceed to optimize the gate count of. + +:::note +Many valuable functions and algorithms have been written in more established languages (C/C++), and converted to modern ones (like Rust). +::: + +Fortunately for Noir developers, when needing a particular function a Rust implementation can be readily compiled into Noir with some key changes. While the compiler does a decent amount of optimizations, it won't be able to change code that has been optimized for clock-cycles into code optimized for arithmetic gates. + +A few things to do when converting Rust code to Noir: +- `println!` is not a macro, use `println` function (same for `assert_eq`) +- No early `return` in function. Use constrain via assertion instead +- No passing by reference. Remove `&` operator to pass by value (copy) +- No boolean operators (`&&`, `||`). Use bitwise operators (`&`, `|`) with boolean values +- No type `usize`. Use types `u8`, `u32`, `u64`, ... +- `main` return must be public, `pub` +- No `const`, use `global` +- Noir's LSP is your friend, so error message should be informative enough to resolve syntax issues. + +## Writing efficient Noir for performant products + +The following points help refine our understanding over time. + +:::note +A Noir program makes a statement that can be verified. +::: + +It compiles to a structure that represents the calculation, and can assert results within the calculation at any stage (via the `constrain` keyword). + +A Noir program compiles to an Abstract Circuit Intermediate Representation which is: + - A tree structure + - Leaves (inputs) are the `Field` type + - Nodes contain arithmetic operations to combine them (gates) + - The root is the final result (return value) + +:::tip +The command `nargo info` shows the programs circuit size, and is useful to compare the value of changes made. +You can dig deeper and use the `--print-acir` param to take a closer look at individual ACIR opcodes, and the proving backend to see its gate count (eg for barretenberg, `bb gates -b ./target/program.json`). +::: + +### Use the `Field` type + +Since the native type of values in circuits are `Field`s, using them for variables in Noir means less gates converting them under the hood. + +:::tip +Where possible, use `Field` type for values. Using smaller value types, and bit-packing strategies, will result in MORE gates +::: + +**Note:** Need to remain mindful of overflow. Types with less bits may be used to limit the range of possible values prior to a calculation. + +### Use Arithmetic over non-arithmetic operations + +Since circuits are made of arithmetic gates, the cost of arithmetic operations tends to be one gate. Whereas for procedural code, they represent several clock cycles. + +Inversely, non-arithmetic operators are achieved with multiple gates, vs 1 clock cycle for procedural code. + +| (cost\op) | arithmetic
(`*`, `+`) | bit-wise ops
(eg `<`, `\|`, `>>`) | +| - | - | - | +| **cycles** | 10+ | 1 | +| **gates** | 1 | 10+ | + +Bit-wise operations (e.g. bit shifts `<<` and `>>`), albeit commonly used in general programming and especially for clock cycle optimizations, are on the contrary expensive in gates when performed within circuits. + +Translate away from bit shifts when writing constrained functions for the best performance. + +On the flip side, feel free to use bit shifts in unconstrained functions and tests if necessary, as they are executed outside of circuits and does not induce performance hits. + +### Use static over dynamic values + +Another general theme that manifests in different ways is that static reads are represented with less gates than dynamic ones. + +Reading from read-only memory (ROM) adds less gates than random-access memory (RAM), 2 vs ~3.25 due to the additional bounds checks. Arrays of fixed length (albeit used at a lower capacity), will generate less gates than dynamic storage. + +Related to this, if an index used to access an array is not known at compile time (ie unknown until run time), then ROM will be converted to RAM, expanding the gate count. + +:::tip +Use arrays and indices that are known at compile time where possible. +Using `assert_constant(i);` before an index, `i`, is used in an array will give a compile error if `i` is NOT known at compile time. +::: + +### Leverage unconstrained execution + +Constrained verification can leverage unconstrained execution, this is especially useful for operations that are represented by many gates. +Use an [unconstrained function](../noir/concepts/unconstrained.md) to perform gate-heavy calculations, then verify and constrain the result. + +Eg division generates more gates than multiplication, so calculating the quotient in an unconstrained function then constraining the product for the quotient and divisor (+ any remainder) equals the dividend will be more efficient. + +Use ` if is_unconstrained() { /`, to conditionally execute code if being called in an unconstrained vs constrained way. + +## Advanced + +Unless you're well into the depth of gate optimization, this advanced section can be ignored. + +### Combine arithmetic operations + +A Noir program can be honed further by combining arithmetic operators in a way that makes the most of each constraint of the backend proving system. This is in scenarios where the backend might not be doing this perfectly. + +Eg Barretenberg backend (current default for Noir) is a width-4 PLONKish constraint system +$ w_1*w_2*q_m + w_1*q_1 + w_2*q_2 + w_3*q_3 + w_4*q_4 + q_c = 0 $ + +Here we see there is one occurrence of witness 1 and 2 ($w_1$, $w_2$) being multiplied together, with addition to witnesses 1-4 ($w_1$ .. $w_4$) multiplied by 4 corresponding circuit constants ($q_1$ .. $q_4$) (plus a final circuit constant, $q_c$). + +Use `nargo info --print-acir`, to inspect the ACIR opcodes (and the proving system for gates), and it may present opportunities to amend the order of operations and reduce the number of constraints. + +#### Variable as witness vs expression + +If you've come this far and really know what you're doing at the equation level, a temporary lever (that will become unnecessary/useless over time) is: `std::as_witness`. This informs the compiler to save a variable as a witness not an expression. + +The compiler will mostly be correct and optimal, but this may help some near term edge cases that are yet to optimize. +Note: When used incorrectly it will create **less** efficient circuits (higher gate count). + +## References +- Guillaume's ["`Cryptdoku`" talk](https://www.youtube.com/watch?v=MrQyzuogxgg) (Jun'23) +- Tips from Tom, Jake and Zac. +- [Idiomatic Noir](https://www.vlayer.xyz/blog/idiomatic-noir-part-1-collections) blog post diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/barretenberg/_category_.json b/noir/noir-repo/docs/docs/getting_started/backend/_category_.json similarity index 63% rename from noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/barretenberg/_category_.json rename to noir/noir-repo/docs/docs/getting_started/backend/_category_.json index 27a8e89228d..b82e92beb0c 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/barretenberg/_category_.json +++ b/noir/noir-repo/docs/docs/getting_started/backend/_category_.json @@ -1,6 +1,6 @@ { "position": 1, - "label": "Install Barretenberg", + "label": "Install Proving Backend", "collapsible": true, "collapsed": true } diff --git a/noir/noir-repo/docs/docs/getting_started/backend/index.md b/noir/noir-repo/docs/docs/getting_started/backend/index.md new file mode 100644 index 00000000000..7192d954877 --- /dev/null +++ b/noir/noir-repo/docs/docs/getting_started/backend/index.md @@ -0,0 +1,31 @@ +--- +title: Proving Backend Installation +description: Proving backends offer command line tools for proving and verifying Noir programs. This page describes how to install `bb` as an example. +keywords: [ + Proving + Backend + Barretenberg + bb + bbup + Installation + Terminal + Command + CLI + Version +] +pagination_next: getting_started/hello_noir/index +--- + +Proving backends each provide their own tools for working with Noir programs, providing functionality like proof generation, proof verification, and verifier smart contract generation. + +For the latest information on tooling provided by each proving backend, installation instructions, Noir version compatibility... you may refer to the proving backends' own documentation. + +You can find the full list of proving backends compatible with Noir in [Awesome Noir](https://github.com/noir-lang/awesome-noir/?tab=readme-ov-file#proving-backends). + +## Example: Installing `bb` + +`bb` is the CLI tool provided by the [Barretenberg proving backend](https://github.com/AztecProtocol/barretenberg) developed by Aztec Labs. + +You can find the instructions for installation in [`bb`'s documentation](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/readme.md#installation). + +Once installed, we are ready to start working on [our first Noir program](../hello_noir/index.md). diff --git a/noir/noir-repo/docs/docs/getting_started/barretenberg/index.md b/noir/noir-repo/docs/docs/getting_started/barretenberg/index.md deleted file mode 100644 index 0102c86770b..00000000000 --- a/noir/noir-repo/docs/docs/getting_started/barretenberg/index.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: Barretenberg Installation -description: bb is a command line tool for interacting with Aztec's proving backend Barretenberg. This page is a quick guide on how to install `bb` -keywords: [ - Barretenberg - bb - Installation - Terminal Commands - Version Check - Nightlies - Specific Versions - Branches -] -pagination_next: getting_started/hello_noir/index ---- - -`bb` is the CLI tool for generating and verifying proofs for Noir programs using the Barretenberg proving library. It also allows generating solidity verifier contracts for which you can verify contracts which were constructed using `bb`. - -## Installing `bb` - -Open a terminal on your machine, and write: - -##### macOS (Apple Silicon) - -```bash -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash -source ~/.zshrc -bbup -v 0.41.0 -``` - -##### macOS (Intel) - -```bash -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash -source ~/.zshrc -bbup -v 0.41.0 -``` - -##### Linux (Bash) - -```bash -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash -source ~/.bashrc -bbup -v 0.41.0 -``` - -Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md b/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md index 1ade3f09ae3..3baae217eb3 100644 --- a/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md +++ b/noir/noir-repo/docs/docs/getting_started/hello_noir/index.md @@ -17,22 +17,25 @@ sidebar_position: 1 --- -Now that we have installed Nargo, it is time to make our first hello world program! +Now that we have installed Nargo and a proving backend, it is time to make our first hello world program! -## Create a Project Directory +### 1. Create a new project directory Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home -directory to house our Noir programs. +directory to house our first Noir program. -For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by -running: +Create the directory and change directory into it by running: ```sh mkdir ~/projects cd ~/projects ``` -## Create Our First Nargo Project +## Nargo + +Nargo provides the ability to initiate and execute Noir projects. Read the [Nargo installation](../installation/index.md) section to learn more about Nargo and how to install it. + +### 2. Create a new Noir project Now that we are in the projects directory, create a new Nargo project by running: @@ -40,18 +43,15 @@ Now that we are in the projects directory, create a new Nargo project by running nargo new hello_world ``` -> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for -> demonstration. -> -> In production, the common practice is to name the project folder as `circuits` for better -> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, -> `test`). +`hello_world` can be any arbitrary project name, we are simply using `hello_world` for demonstration. + +In production, it is common practice to name the project folder, `circuits`, for clarity amongst other folders in the codebase (like: `contracts`, `scripts`, and `test`). A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and _Nargo.toml_ which contain the source code and environmental options of your Noir program respectively. -### Intro to Noir Syntax +#### Intro to Noir Syntax Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: @@ -81,7 +81,7 @@ The Noir syntax `assert` can be interpreted as something similar to constraints For more Noir syntax, check the [Language Concepts](../../noir/concepts/comments.md) chapter. -## Build In/Output Files +### 3. Build in/output files Change directory into _hello_world_ and build in/output files for your Noir program by running: @@ -92,7 +92,7 @@ nargo check A _Prover.toml_ file will be generated in your project directory, to allow specifying input values to the program. -## Execute Our Noir Program +### 4. Execute the Noir program Now that the project is set up, we can execute our Noir program. @@ -111,34 +111,41 @@ nargo execute witness-name The witness corresponding to this execution will then be written to the file `./target/witness-name.gz`. -## Prove Our Noir Program +The command also automatically compiles your Noir program if it was not already / was edited, which you may notice the compiled artifacts being written to the file `./target/hello_world.json`. + +## Proving Backend -:::info +Proving backends provide the ability to generate and verify proofs of executing Noir programs, following Noir's tooling that compiles and executes the programs. Read the [proving backend installation](../backend/index.md) section to learn more about proving backends and how to install them. -Nargo no longer handles communicating with backends in order to generate proofs. In order to prove/verify your Noir programs, you'll need an installation of [bb](../barretenberg/index.md). +Barretenberg is used as an example here to demonstrate how proving and verifying could be implemented and used. Read the [`bb` installation](../backend/index.md#example-installing-bb) section for how to install Barretenberg's CLI tool; refer to [`bb`'s documentation](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/readme.md) for full details about the tool. -::: +### 5. Prove an execution of the Noir program -Prove the valid execution of your Noir program using `bb`: +Using Barretenberg as an example, prove the valid execution of your Noir program running: ```sh -bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./proof +bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./target/proof ``` -A new file called `proof` will be generated in your project directory, containing the generated proof for your program. +The proof generated will then be written to the file `./target/proof`. -## Verify Our Noir Program +### 6. Verify the execution proof Once a proof is generated, we can verify correct execution of our Noir program by verifying the proof file. -Verify your proof by running: +Using Barretenberg as an example, compute the verification key for the Noir program by running: ```sh bb write_vk -b ./target/hello_world.json -o ./target/vk -bb verify -k ./target/vk -p ./proof ``` -The verification will complete in silence if it is successful. If it fails, it will log the corresponding error instead. +And verify your proof by running: + +```sh +bb verify -k ./target/vk -p ./target/proof +``` + +If successful, the verification will complete in silence; if unsuccessful, the command will trigger logging of the corresponding error. Congratulations, you have now created and verified a proof for your very first Noir program! diff --git a/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md b/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md index 525b8dabdd8..96e653f6c08 100644 --- a/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md +++ b/noir/noir-repo/docs/docs/getting_started/hello_noir/project_breakdown.md @@ -8,8 +8,7 @@ keywords: sidebar_position: 2 --- -This section breaks down our hello world program from the previous section. We elaborate on the project -structure and what the `prove` and `verify` commands did. +This section breaks down our hello world program from the previous section. ## Anatomy of a Nargo Project diff --git a/noir/noir-repo/docs/docs/getting_started/installation/index.md b/noir/noir-repo/docs/docs/getting_started/installation/index.md index 4ef86aa5914..53ea9c7891c 100644 --- a/noir/noir-repo/docs/docs/getting_started/installation/index.md +++ b/noir/noir-repo/docs/docs/getting_started/installation/index.md @@ -19,11 +19,9 @@ keywords: [ pagination_next: getting_started/hello_noir/index --- -`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. +`nargo` is a tool for working with Noir programs on the CLI, providing you with the ability to start new projects, compile, execute and test Noir programs from the terminal. -With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. - -Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. +The name is inspired by Rust's package manager `cargo`; and similar to Rust's `rustup`, Noir also has an easy installation script `noirup`. ## Installing Noirup diff --git a/noir/noir-repo/docs/docs/noir/standard_library/recursion.md b/noir/noir-repo/docs/docs/noir/standard_library/recursion.md index 8cfb37fc52d..7f4dcebf084 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/recursion.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/recursion.md @@ -49,16 +49,16 @@ fn main( proof_b : [Field; 93], ) { std::verify_proof( - verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), + verification_key, + proof, + public_inputs, key_hash ); std::verify_proof( - verification_key.as_slice(), - proof_b.as_slice(), - public_inputs.as_slice(), + verification_key, + proof_b, + public_inputs, key_hash ); } diff --git a/noir/noir-repo/docs/docs/getting_started/tooling/noir_codegen.md b/noir/noir-repo/docs/docs/reference/noir_codegen.md similarity index 97% rename from noir/noir-repo/docs/docs/getting_started/tooling/noir_codegen.md rename to noir/noir-repo/docs/docs/reference/noir_codegen.md index f7505bef7ab..db8f07dc22e 100644 --- a/noir/noir-repo/docs/docs/getting_started/tooling/noir_codegen.md +++ b/noir/noir-repo/docs/docs/reference/noir_codegen.md @@ -33,7 +33,7 @@ yarn add @noir-lang/noir_codegen -D ``` ### Nargo library -Make sure you have Nargo, v0.25.0 or greater, installed. If you don't, follow the [installation guide](../installation/index.md). +Make sure you have Nargo, v0.25.0 or greater, installed. If you don't, follow the [installation guide](../getting_started/installation/index.md). If you're in a new project, make a `circuits` folder and create a new Noir library: diff --git a/noir/noir-repo/docs/docs/tutorials/noirjs_app.md b/noir/noir-repo/docs/docs/tutorials/noirjs_app.md index cbb1938a5c6..eac28168445 100644 --- a/noir/noir-repo/docs/docs/tutorials/noirjs_app.md +++ b/noir/noir-repo/docs/docs/tutorials/noirjs_app.md @@ -14,13 +14,13 @@ You can find the complete app code for this guide [here](https://github.com/noir :::note -Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.27.x matches `noir_js@0.27.x`, etc. +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.31.x matches `noir_js@0.31.x`, etc. -In this guide, we will be pinned to 0.27.0. +In this guide, we will be pinned to 0.31.0. ::: -Before we start, we want to make sure we have Node and Nargo installed. +Before we start, we want to make sure we have Node, Nargo and the Barretenberg proving system (`bb`) installed. We start by opening a terminal and executing `node --version`. If we don't get an output like `v20.10.0`, that means node is not installed. Let's do that by following the handy [nvm guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script). @@ -30,6 +30,9 @@ As for `Nargo`, we can follow the [Nargo guide](../getting_started/installation/ curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash ``` +Follow the instructions on [this page](https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/cpp/src/barretenberg/bb#installation) to install `bb`. +Version 0.41.0 is compatible with `nargo` version 0.31.0, which you can install with `bbup -v 0.41.0` once `bbup` is installed. + Easy enough. Onwards! ## Our project @@ -42,13 +45,17 @@ In fact, it's so simple that it comes nicely packaged in `nargo`. Let's do that! Run: -`nargo new circuit` +```bash +nargo new circuit +``` And... That's about it. Your program is ready to be compiled and run. To compile, let's `cd` into the `circuit` folder to enter our project, and call: -`nargo compile` +```bash +nargo compile +``` This compiles our circuit into `json` format and add it to a new `target` folder. @@ -92,30 +99,53 @@ Before we proceed with any coding, let's get our environment tailored for Noir. In your freshly minted `vite-project` folder, create a new file named `vite.config.js` and open it in your code editor. Paste the following to set the stage: ```javascript -import { defineConfig } from "vite"; -import copy from "rollup-plugin-copy"; - -export default defineConfig({ - esbuild: { - target: "esnext", +import { defineConfig } from 'vite'; +import copy from 'rollup-plugin-copy'; +import fs from 'fs'; +import path from 'path'; + +const wasmContentTypePlugin = { + name: 'wasm-content-type-plugin', + configureServer(server) { + server.middlewares.use(async (req, res, next) => { + if (req.url.endsWith('.wasm')) { + res.setHeader('Content-Type', 'application/wasm'); + const newPath = req.url.replace('deps', 'dist'); + const targetPath = path.join(__dirname, newPath); + const wasmContent = fs.readFileSync(targetPath); + return res.end(wasmContent); + } + next(); + }); }, - optimizeDeps: { - esbuildOptions: { - target: "esnext", - }, - }, - plugins: [ - copy({ - targets: [ - { src: "node_modules/**/*.wasm", dest: "node_modules/.vite/dist" }, +}; + +export default defineConfig(({ command }) => { + if (command === 'serve') { + return { + build: { + target: 'esnext', + rollupOptions: { + external: ['@aztec/bb.js'] + } + }, + optimizeDeps: { + esbuildOptions: { + target: 'esnext' + } + }, + plugins: [ + copy({ + targets: [{ src: 'node_modules/**/*.wasm', dest: 'node_modules/.vite/dist' }], + copySync: true, + hook: 'buildStart', + }), + command === 'serve' ? wasmContentTypePlugin : [], ], - copySync: true, - hook: "buildStart", - }), - ], - server: { - port: 3000, - }, + }; + } + + return {}; }); ``` @@ -124,7 +154,7 @@ export default defineConfig({ Now that our stage is set, install the necessary NoirJS packages along with our other dependencies: ```bash -npm install && npm install @noir-lang/backend_barretenberg@0.27.0 @noir-lang/noir_js@0.27.0 +npm install && npm install @noir-lang/backend_barretenberg@0.31.0 @noir-lang/noir_js@0.31.0 npm install rollup-plugin-copy --save-dev ``` @@ -193,17 +223,6 @@ Our love for Noir needs undivided attention, so let's just open `main.js` and de Start by pasting in this boilerplate code: ```js -const setup = async () => { - await Promise.all([ - import('@noir-lang/noirc_abi').then((module) => - module.default(new URL('@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm', import.meta.url).toString()), - ), - import('@noir-lang/acvm_js').then((module) => - module.default(new URL('@noir-lang/acvm_js/web/acvm_js_bg.wasm', import.meta.url).toString()), - ), - ]); -}; - function display(container, msg) { const c = document.getElementById(container); const p = document.createElement('p'); @@ -222,8 +241,6 @@ document.getElementById('submitGuess').addEventListener('click', async () => { The display function doesn't do much. We're simply manipulating our website to see stuff happening. For example, if the proof fails, it will simply log a broken heart 😢 -As for the `setup` function, it's just a sad reminder that dealing with `wasm` on the browser is not as easy as it should. Just copy, paste, and forget. - :::info At this point in the tutorial, your folder structure should look like this: @@ -310,9 +327,13 @@ Time to celebrate, yes! But we shouldn't trust machines so blindly. Let's add th ```js display('logs', 'Verifying proof... ⌛'); -const verificationKey = await backend.getVerificationKey(); -const verifier = new Verifier(); -const isValid = await verifier.verifyProof(proof, verificationKey); +const isValid = await backend.verifyProof(proof); + +// or to cache and use the verification key: +// const verificationKey = await backend.getVerificationKey(); +// const verifier = new Verifier(); +// const isValid = await verifier.verifyProof(proof, verificationKey); + if (isValid) display('logs', 'Verifying proof... ✅'); ``` @@ -325,3 +346,17 @@ You have successfully generated a client-side Noir web app! You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. + +## UltraHonk Backend + +Barretenberg has recently exposed a new UltraHonk backend. We can use UltraHonk in NoirJS after version 0.33.0. Everything will be the same as the tutorial above, except that the class we need to import will change: +```js +import { UltraHonkBackend, UltraHonkVerifier as Verifier } from '@noir-lang/backend_barretenberg'; +``` +The backend will then be instantiated as such: +```js +const backend = new UltraHonkBackend(circuit); +``` +Then all the commands to prove and verify your circuit will be same. + +The only feature currently unsupported with UltraHonk are [recursive proofs](../explainers/explainer-recursion.md). \ No newline at end of file diff --git a/noir/noir-repo/docs/docusaurus.config.ts b/noir/noir-repo/docs/docusaurus.config.ts index f0c986f1c28..29f612b0109 100644 --- a/noir/noir-repo/docs/docusaurus.config.ts +++ b/noir/noir-repo/docs/docusaurus.config.ts @@ -14,7 +14,6 @@ export default { favicon: 'img/favicon.ico', url: 'https://noir-lang.org', baseUrl: '/', - trailingSlash: true, onBrokenLinks: 'throw', onBrokenMarkdownLinks: 'throw', i18n: { diff --git a/noir/noir-repo/docs/src/components/Notes/_blackbox.mdx b/noir/noir-repo/docs/src/components/Notes/_blackbox.mdx index 226017072c8..514ca00a7e7 100644 --- a/noir/noir-repo/docs/src/components/Notes/_blackbox.mdx +++ b/noir/noir-repo/docs/src/components/Notes/_blackbox.mdx @@ -1,5 +1,5 @@ :::info -This is a black box function. Read [this section](../../black_box_fns) to learn more about black box functions in Noir. +This is a black box function. Read [this section](/docs/noir/standard_library/black_box_fns) to learn more about black box functions in Noir. ::: diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md index 34f8cd96fcd..0384ba4a0cd 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md @@ -84,7 +84,7 @@ assert(x != y); The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. -For more Noir syntax, check the [Language Concepts](../language_concepts/comments.md) chapter. +For more Noir syntax, check the [Language Concepts](../language_concepts/09_comments.md) chapter. ## Build In/Output Files diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md index d353606210a..67baa00f930 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md @@ -26,5 +26,5 @@ fn main() { > `false` in _Verifier.toml_. The boolean type is most commonly used in conditionals like `if` expressions and `assert` -statements. More about conditionals is covered in the [Control Flow](../control_flow.md) and -[Assert Function](../assert.md) sections. +statements. More about conditionals is covered in the [Control Flow](../02_control_flow.md) and +[Assert Function](../04_assert.md) sections. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md index 1424ca2df14..5b4a544cf37 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md @@ -56,7 +56,7 @@ You can instantiate a new array of a fixed size with the same value repeated for let array: [Field; 32] = [0; 32]; ``` -Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices.mdx), you can just call `as_slice` on your array: +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./05_slices.mdx), you can just call `as_slice` on your array: ```rust let array: [Field; 32] = [0; 32]; diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md index 87a09293ea8..753b6038703 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md @@ -81,7 +81,7 @@ use dep::std::scalar_mul::fixed_base_embedded_curve; ``` Lastly, as demonstrated in the -[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives.md#examples), you +[elliptic curve example](../standard_library/cryptographic_primitives/04_ec_primitives.md#examples), you can import multiple items in the same line by enclosing them in curly braces: ```rust diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md index e2b0af522f4..914856a0f43 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md @@ -213,7 +213,7 @@ you run `nargo test`. To print `println` statements in tests, use the `--show-ou Takes an optional `--exact` flag which allows you to select tests based on an exact name. -See an example on the [testing page](./testing.md). +See an example on the [testing page](./02_testing.md). ### Options diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md index 985bb7c879d..b115a450ed3 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md @@ -26,19 +26,19 @@ fn sha256(_input : [u8; N]) -> [u8; 32] {} Here is a list of the current black box functions that are supported by UltraPlonk: - AES -- [SHA256](./cryptographic_primitives/hashes.mdx#sha256) -- [Schnorr signature verification](./cryptographic_primitives/schnorr.mdx) -- [Blake2s](./cryptographic_primitives/hashes.mdx#blake2s) -- [Pedersen Hash](./cryptographic_primitives/hashes.mdx#pedersen_hash) -- [Pedersen Commitment](./cryptographic_primitives/hashes.mdx#pedersen_commitment) -- [HashToField128Security](./cryptographic_primitives/hashes.mdx#hash_to_field) -- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification.mdx) -- [Fixed base scalar multiplication](./cryptographic_primitives/scalar.mdx) +- [SHA256](./cryptographic_primitives/00_hashes.mdx#sha256) +- [Schnorr signature verification](./cryptographic_primitives/02_schnorr.mdx) +- [Blake2s](./cryptographic_primitives/00_hashes.mdx#blake2s) +- [Pedersen Hash](./cryptographic_primitives/00_hashes.mdx#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/00_hashes.mdx#pedersen_commitment) +- [HashToField128Security](./cryptographic_primitives/00_hashes.mdx#hash_to_field) +- [ECDSA signature verification](./cryptographic_primitives/03_ecdsa_sig_verification.mdx) +- [Fixed base scalar multiplication](./cryptographic_primitives/01_scalar.mdx) - [Compute merkle root](./merkle_trees.md#compute_merkle_root) - AND - XOR - RANGE -- [Keccak256](./cryptographic_primitives/hashes.mdx#keccak256) +- [Keccak256](./cryptographic_primitives/00_hashes.mdx#keccak256) - [Recursive proof verification](./recursion.md) Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/backend/_category_.json similarity index 63% rename from noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/_category_.json rename to noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/backend/_category_.json index 27a8e89228d..b82e92beb0c 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/_category_.json +++ b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/backend/_category_.json @@ -1,6 +1,6 @@ { "position": 1, - "label": "Install Barretenberg", + "label": "Install Proving Backend", "collapsible": true, "collapsed": true } diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/backend/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/backend/index.md new file mode 100644 index 00000000000..7192d954877 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/backend/index.md @@ -0,0 +1,31 @@ +--- +title: Proving Backend Installation +description: Proving backends offer command line tools for proving and verifying Noir programs. This page describes how to install `bb` as an example. +keywords: [ + Proving + Backend + Barretenberg + bb + bbup + Installation + Terminal + Command + CLI + Version +] +pagination_next: getting_started/hello_noir/index +--- + +Proving backends each provide their own tools for working with Noir programs, providing functionality like proof generation, proof verification, and verifier smart contract generation. + +For the latest information on tooling provided by each proving backend, installation instructions, Noir version compatibility... you may refer to the proving backends' own documentation. + +You can find the full list of proving backends compatible with Noir in [Awesome Noir](https://github.com/noir-lang/awesome-noir/?tab=readme-ov-file#proving-backends). + +## Example: Installing `bb` + +`bb` is the CLI tool provided by the [Barretenberg proving backend](https://github.com/AztecProtocol/barretenberg) developed by Aztec Labs. + +You can find the instructions for installation in [`bb`'s documentation](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/readme.md#installation). + +Once installed, we are ready to start working on [our first Noir program](../hello_noir/index.md). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/barretenberg/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/barretenberg/index.md deleted file mode 100644 index 0102c86770b..00000000000 --- a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/barretenberg/index.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: Barretenberg Installation -description: bb is a command line tool for interacting with Aztec's proving backend Barretenberg. This page is a quick guide on how to install `bb` -keywords: [ - Barretenberg - bb - Installation - Terminal Commands - Version Check - Nightlies - Specific Versions - Branches -] -pagination_next: getting_started/hello_noir/index ---- - -`bb` is the CLI tool for generating and verifying proofs for Noir programs using the Barretenberg proving library. It also allows generating solidity verifier contracts for which you can verify contracts which were constructed using `bb`. - -## Installing `bb` - -Open a terminal on your machine, and write: - -##### macOS (Apple Silicon) - -```bash -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash -source ~/.zshrc -bbup -v 0.41.0 -``` - -##### macOS (Intel) - -```bash -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash -source ~/.zshrc -bbup -v 0.41.0 -``` - -##### Linux (Bash) - -```bash -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash -source ~/.bashrc -bbup -v 0.41.0 -``` - -Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/hello_noir/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/hello_noir/index.md index 1ade3f09ae3..3baae217eb3 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/hello_noir/index.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/hello_noir/index.md @@ -17,22 +17,25 @@ sidebar_position: 1 --- -Now that we have installed Nargo, it is time to make our first hello world program! +Now that we have installed Nargo and a proving backend, it is time to make our first hello world program! -## Create a Project Directory +### 1. Create a new project directory Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home -directory to house our Noir programs. +directory to house our first Noir program. -For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by -running: +Create the directory and change directory into it by running: ```sh mkdir ~/projects cd ~/projects ``` -## Create Our First Nargo Project +## Nargo + +Nargo provides the ability to initiate and execute Noir projects. Read the [Nargo installation](../installation/index.md) section to learn more about Nargo and how to install it. + +### 2. Create a new Noir project Now that we are in the projects directory, create a new Nargo project by running: @@ -40,18 +43,15 @@ Now that we are in the projects directory, create a new Nargo project by running nargo new hello_world ``` -> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for -> demonstration. -> -> In production, the common practice is to name the project folder as `circuits` for better -> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, -> `test`). +`hello_world` can be any arbitrary project name, we are simply using `hello_world` for demonstration. + +In production, it is common practice to name the project folder, `circuits`, for clarity amongst other folders in the codebase (like: `contracts`, `scripts`, and `test`). A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and _Nargo.toml_ which contain the source code and environmental options of your Noir program respectively. -### Intro to Noir Syntax +#### Intro to Noir Syntax Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: @@ -81,7 +81,7 @@ The Noir syntax `assert` can be interpreted as something similar to constraints For more Noir syntax, check the [Language Concepts](../../noir/concepts/comments.md) chapter. -## Build In/Output Files +### 3. Build in/output files Change directory into _hello_world_ and build in/output files for your Noir program by running: @@ -92,7 +92,7 @@ nargo check A _Prover.toml_ file will be generated in your project directory, to allow specifying input values to the program. -## Execute Our Noir Program +### 4. Execute the Noir program Now that the project is set up, we can execute our Noir program. @@ -111,34 +111,41 @@ nargo execute witness-name The witness corresponding to this execution will then be written to the file `./target/witness-name.gz`. -## Prove Our Noir Program +The command also automatically compiles your Noir program if it was not already / was edited, which you may notice the compiled artifacts being written to the file `./target/hello_world.json`. + +## Proving Backend -:::info +Proving backends provide the ability to generate and verify proofs of executing Noir programs, following Noir's tooling that compiles and executes the programs. Read the [proving backend installation](../backend/index.md) section to learn more about proving backends and how to install them. -Nargo no longer handles communicating with backends in order to generate proofs. In order to prove/verify your Noir programs, you'll need an installation of [bb](../barretenberg/index.md). +Barretenberg is used as an example here to demonstrate how proving and verifying could be implemented and used. Read the [`bb` installation](../backend/index.md#example-installing-bb) section for how to install Barretenberg's CLI tool; refer to [`bb`'s documentation](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/readme.md) for full details about the tool. -::: +### 5. Prove an execution of the Noir program -Prove the valid execution of your Noir program using `bb`: +Using Barretenberg as an example, prove the valid execution of your Noir program running: ```sh -bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./proof +bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./target/proof ``` -A new file called `proof` will be generated in your project directory, containing the generated proof for your program. +The proof generated will then be written to the file `./target/proof`. -## Verify Our Noir Program +### 6. Verify the execution proof Once a proof is generated, we can verify correct execution of our Noir program by verifying the proof file. -Verify your proof by running: +Using Barretenberg as an example, compute the verification key for the Noir program by running: ```sh bb write_vk -b ./target/hello_world.json -o ./target/vk -bb verify -k ./target/vk -p ./proof ``` -The verification will complete in silence if it is successful. If it fails, it will log the corresponding error instead. +And verify your proof by running: + +```sh +bb verify -k ./target/vk -p ./target/proof +``` + +If successful, the verification will complete in silence; if unsuccessful, the command will trigger logging of the corresponding error. Congratulations, you have now created and verified a proof for your very first Noir program! diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/hello_noir/project_breakdown.md b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/hello_noir/project_breakdown.md index 29688df148f..96e653f6c08 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/hello_noir/project_breakdown.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/hello_noir/project_breakdown.md @@ -8,8 +8,7 @@ keywords: sidebar_position: 2 --- -This section breaks down our hello world program from the previous section. We elaborate on the project -structure and what the `prove` and `verify` commands did. +This section breaks down our hello world program from the previous section. ## Anatomy of a Nargo Project @@ -67,6 +66,7 @@ The package section defines a number of fields including: - `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) - `backend` (optional) - `license` (optional) +- `expression_width` (optional) - Sets the default backend expression width. This field will override the default backend expression width specified by the Noir compiler (currently set to width 4). #### Dependencies section diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/installation/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/installation/index.md index 4ef86aa5914..53ea9c7891c 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/installation/index.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/getting_started/installation/index.md @@ -19,11 +19,9 @@ keywords: [ pagination_next: getting_started/hello_noir/index --- -`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. +`nargo` is a tool for working with Noir programs on the CLI, providing you with the ability to start new projects, compile, execute and test Noir programs from the terminal. -With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. - -Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. +The name is inspired by Rust's package manager `cargo`; and similar to Rust's `rustup`, Noir also has an easy installation script `noirup`. ## Installing Noirup diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/tutorials/noirjs_app.md b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/tutorials/noirjs_app.md index cbb1938a5c6..8c23b639f12 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.31.0/tutorials/noirjs_app.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.31.0/tutorials/noirjs_app.md @@ -14,13 +14,13 @@ You can find the complete app code for this guide [here](https://github.com/noir :::note -Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.27.x matches `noir_js@0.27.x`, etc. +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.31.x matches `noir_js@0.31.x`, etc. -In this guide, we will be pinned to 0.27.0. +In this guide, we will be pinned to 0.31.0. ::: -Before we start, we want to make sure we have Node and Nargo installed. +Before we start, we want to make sure we have Node, Nargo and the Barretenberg proving system (`bb`) installed. We start by opening a terminal and executing `node --version`. If we don't get an output like `v20.10.0`, that means node is not installed. Let's do that by following the handy [nvm guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script). @@ -30,6 +30,9 @@ As for `Nargo`, we can follow the [Nargo guide](../getting_started/installation/ curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash ``` +Follow the instructions on [this page](https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/cpp/src/barretenberg/bb#installation) to install `bb`. +Version 0.41.0 is compatible with `nargo` version 0.31.0, which you can install with `bbup -v 0.41.0` once `bbup` is installed. + Easy enough. Onwards! ## Our project @@ -42,13 +45,17 @@ In fact, it's so simple that it comes nicely packaged in `nargo`. Let's do that! Run: -`nargo new circuit` +```bash +nargo new circuit +``` And... That's about it. Your program is ready to be compiled and run. To compile, let's `cd` into the `circuit` folder to enter our project, and call: -`nargo compile` +```bash +nargo compile +``` This compiles our circuit into `json` format and add it to a new `target` folder. @@ -92,30 +99,53 @@ Before we proceed with any coding, let's get our environment tailored for Noir. In your freshly minted `vite-project` folder, create a new file named `vite.config.js` and open it in your code editor. Paste the following to set the stage: ```javascript -import { defineConfig } from "vite"; -import copy from "rollup-plugin-copy"; - -export default defineConfig({ - esbuild: { - target: "esnext", - }, - optimizeDeps: { - esbuildOptions: { - target: "esnext", - }, +import { defineConfig } from 'vite'; +import copy from 'rollup-plugin-copy'; +import fs from 'fs'; +import path from 'path'; + +const wasmContentTypePlugin = { + name: 'wasm-content-type-plugin', + configureServer(server) { + server.middlewares.use(async (req, res, next) => { + if (req.url.endsWith('.wasm')) { + res.setHeader('Content-Type', 'application/wasm'); + const newPath = req.url.replace('deps', 'dist'); + const targetPath = path.join(__dirname, newPath); + const wasmContent = fs.readFileSync(targetPath); + return res.end(wasmContent); + } + next(); + }); }, - plugins: [ - copy({ - targets: [ - { src: "node_modules/**/*.wasm", dest: "node_modules/.vite/dist" }, +}; + +export default defineConfig(({ command }) => { + if (command === 'serve') { + return { + build: { + target: 'esnext', + rollupOptions: { + external: ['@aztec/bb.js'] + } + }, + optimizeDeps: { + esbuildOptions: { + target: 'esnext' + } + }, + plugins: [ + copy({ + targets: [{ src: 'node_modules/**/*.wasm', dest: 'node_modules/.vite/dist' }], + copySync: true, + hook: 'buildStart', + }), + command === 'serve' ? wasmContentTypePlugin : [], ], - copySync: true, - hook: "buildStart", - }), - ], - server: { - port: 3000, - }, + }; + } + + return {}; }); ``` @@ -124,7 +154,7 @@ export default defineConfig({ Now that our stage is set, install the necessary NoirJS packages along with our other dependencies: ```bash -npm install && npm install @noir-lang/backend_barretenberg@0.27.0 @noir-lang/noir_js@0.27.0 +npm install && npm install @noir-lang/backend_barretenberg@0.31.0 @noir-lang/noir_js@0.31.0 npm install rollup-plugin-copy --save-dev ``` @@ -193,17 +223,6 @@ Our love for Noir needs undivided attention, so let's just open `main.js` and de Start by pasting in this boilerplate code: ```js -const setup = async () => { - await Promise.all([ - import('@noir-lang/noirc_abi').then((module) => - module.default(new URL('@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm', import.meta.url).toString()), - ), - import('@noir-lang/acvm_js').then((module) => - module.default(new URL('@noir-lang/acvm_js/web/acvm_js_bg.wasm', import.meta.url).toString()), - ), - ]); -}; - function display(container, msg) { const c = document.getElementById(container); const p = document.createElement('p'); @@ -222,8 +241,6 @@ document.getElementById('submitGuess').addEventListener('click', async () => { The display function doesn't do much. We're simply manipulating our website to see stuff happening. For example, if the proof fails, it will simply log a broken heart 😢 -As for the `setup` function, it's just a sad reminder that dealing with `wasm` on the browser is not as easy as it should. Just copy, paste, and forget. - :::info At this point in the tutorial, your folder structure should look like this: @@ -310,9 +327,13 @@ Time to celebrate, yes! But we shouldn't trust machines so blindly. Let's add th ```js display('logs', 'Verifying proof... ⌛'); -const verificationKey = await backend.getVerificationKey(); -const verifier = new Verifier(); -const isValid = await verifier.verifyProof(proof, verificationKey); +const isValid = await backend.verifyProof(proof); + +// or to cache and use the verification key: +// const verificationKey = await backend.getVerificationKey(); +// const verifier = new Verifier(); +// const isValid = await verifier.verifyProof(proof, verificationKey); + if (isValid) display('logs', 'Verifying proof... ✅'); ``` diff --git a/noir/noir-repo/docs/docs/getting_started/barretenberg/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/backend/_category_.json similarity index 63% rename from noir/noir-repo/docs/docs/getting_started/barretenberg/_category_.json rename to noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/backend/_category_.json index 27a8e89228d..b82e92beb0c 100644 --- a/noir/noir-repo/docs/docs/getting_started/barretenberg/_category_.json +++ b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/backend/_category_.json @@ -1,6 +1,6 @@ { "position": 1, - "label": "Install Barretenberg", + "label": "Install Proving Backend", "collapsible": true, "collapsed": true } diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/backend/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/backend/index.md new file mode 100644 index 00000000000..7192d954877 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/backend/index.md @@ -0,0 +1,31 @@ +--- +title: Proving Backend Installation +description: Proving backends offer command line tools for proving and verifying Noir programs. This page describes how to install `bb` as an example. +keywords: [ + Proving + Backend + Barretenberg + bb + bbup + Installation + Terminal + Command + CLI + Version +] +pagination_next: getting_started/hello_noir/index +--- + +Proving backends each provide their own tools for working with Noir programs, providing functionality like proof generation, proof verification, and verifier smart contract generation. + +For the latest information on tooling provided by each proving backend, installation instructions, Noir version compatibility... you may refer to the proving backends' own documentation. + +You can find the full list of proving backends compatible with Noir in [Awesome Noir](https://github.com/noir-lang/awesome-noir/?tab=readme-ov-file#proving-backends). + +## Example: Installing `bb` + +`bb` is the CLI tool provided by the [Barretenberg proving backend](https://github.com/AztecProtocol/barretenberg) developed by Aztec Labs. + +You can find the instructions for installation in [`bb`'s documentation](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/readme.md#installation). + +Once installed, we are ready to start working on [our first Noir program](../hello_noir/index.md). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/index.md deleted file mode 100644 index 0102c86770b..00000000000 --- a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/barretenberg/index.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: Barretenberg Installation -description: bb is a command line tool for interacting with Aztec's proving backend Barretenberg. This page is a quick guide on how to install `bb` -keywords: [ - Barretenberg - bb - Installation - Terminal Commands - Version Check - Nightlies - Specific Versions - Branches -] -pagination_next: getting_started/hello_noir/index ---- - -`bb` is the CLI tool for generating and verifying proofs for Noir programs using the Barretenberg proving library. It also allows generating solidity verifier contracts for which you can verify contracts which were constructed using `bb`. - -## Installing `bb` - -Open a terminal on your machine, and write: - -##### macOS (Apple Silicon) - -```bash -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash -source ~/.zshrc -bbup -v 0.41.0 -``` - -##### macOS (Intel) - -```bash -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash -source ~/.zshrc -bbup -v 0.41.0 -``` - -##### Linux (Bash) - -```bash -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/cpp/installation/install | bash -source ~/.bashrc -bbup -v 0.41.0 -``` - -Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/index.md index 1ade3f09ae3..3baae217eb3 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/index.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/index.md @@ -17,22 +17,25 @@ sidebar_position: 1 --- -Now that we have installed Nargo, it is time to make our first hello world program! +Now that we have installed Nargo and a proving backend, it is time to make our first hello world program! -## Create a Project Directory +### 1. Create a new project directory Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home -directory to house our Noir programs. +directory to house our first Noir program. -For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by -running: +Create the directory and change directory into it by running: ```sh mkdir ~/projects cd ~/projects ``` -## Create Our First Nargo Project +## Nargo + +Nargo provides the ability to initiate and execute Noir projects. Read the [Nargo installation](../installation/index.md) section to learn more about Nargo and how to install it. + +### 2. Create a new Noir project Now that we are in the projects directory, create a new Nargo project by running: @@ -40,18 +43,15 @@ Now that we are in the projects directory, create a new Nargo project by running nargo new hello_world ``` -> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for -> demonstration. -> -> In production, the common practice is to name the project folder as `circuits` for better -> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, -> `test`). +`hello_world` can be any arbitrary project name, we are simply using `hello_world` for demonstration. + +In production, it is common practice to name the project folder, `circuits`, for clarity amongst other folders in the codebase (like: `contracts`, `scripts`, and `test`). A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and _Nargo.toml_ which contain the source code and environmental options of your Noir program respectively. -### Intro to Noir Syntax +#### Intro to Noir Syntax Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: @@ -81,7 +81,7 @@ The Noir syntax `assert` can be interpreted as something similar to constraints For more Noir syntax, check the [Language Concepts](../../noir/concepts/comments.md) chapter. -## Build In/Output Files +### 3. Build in/output files Change directory into _hello_world_ and build in/output files for your Noir program by running: @@ -92,7 +92,7 @@ nargo check A _Prover.toml_ file will be generated in your project directory, to allow specifying input values to the program. -## Execute Our Noir Program +### 4. Execute the Noir program Now that the project is set up, we can execute our Noir program. @@ -111,34 +111,41 @@ nargo execute witness-name The witness corresponding to this execution will then be written to the file `./target/witness-name.gz`. -## Prove Our Noir Program +The command also automatically compiles your Noir program if it was not already / was edited, which you may notice the compiled artifacts being written to the file `./target/hello_world.json`. + +## Proving Backend -:::info +Proving backends provide the ability to generate and verify proofs of executing Noir programs, following Noir's tooling that compiles and executes the programs. Read the [proving backend installation](../backend/index.md) section to learn more about proving backends and how to install them. -Nargo no longer handles communicating with backends in order to generate proofs. In order to prove/verify your Noir programs, you'll need an installation of [bb](../barretenberg/index.md). +Barretenberg is used as an example here to demonstrate how proving and verifying could be implemented and used. Read the [`bb` installation](../backend/index.md#example-installing-bb) section for how to install Barretenberg's CLI tool; refer to [`bb`'s documentation](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/readme.md) for full details about the tool. -::: +### 5. Prove an execution of the Noir program -Prove the valid execution of your Noir program using `bb`: +Using Barretenberg as an example, prove the valid execution of your Noir program running: ```sh -bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./proof +bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./target/proof ``` -A new file called `proof` will be generated in your project directory, containing the generated proof for your program. +The proof generated will then be written to the file `./target/proof`. -## Verify Our Noir Program +### 6. Verify the execution proof Once a proof is generated, we can verify correct execution of our Noir program by verifying the proof file. -Verify your proof by running: +Using Barretenberg as an example, compute the verification key for the Noir program by running: ```sh bb write_vk -b ./target/hello_world.json -o ./target/vk -bb verify -k ./target/vk -p ./proof ``` -The verification will complete in silence if it is successful. If it fails, it will log the corresponding error instead. +And verify your proof by running: + +```sh +bb verify -k ./target/vk -p ./target/proof +``` + +If successful, the verification will complete in silence; if unsuccessful, the command will trigger logging of the corresponding error. Congratulations, you have now created and verified a proof for your very first Noir program! diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/project_breakdown.md b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/project_breakdown.md index 525b8dabdd8..96e653f6c08 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/project_breakdown.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/hello_noir/project_breakdown.md @@ -8,8 +8,7 @@ keywords: sidebar_position: 2 --- -This section breaks down our hello world program from the previous section. We elaborate on the project -structure and what the `prove` and `verify` commands did. +This section breaks down our hello world program from the previous section. ## Anatomy of a Nargo Project diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/installation/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/installation/index.md index 4ef86aa5914..53ea9c7891c 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/installation/index.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/getting_started/installation/index.md @@ -19,11 +19,9 @@ keywords: [ pagination_next: getting_started/hello_noir/index --- -`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. +`nargo` is a tool for working with Noir programs on the CLI, providing you with the ability to start new projects, compile, execute and test Noir programs from the terminal. -With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. - -Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. +The name is inspired by Rust's package manager `cargo`; and similar to Rust's `rustup`, Noir also has an easy installation script `noirup`. ## Installing Noirup diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/tutorials/noirjs_app.md b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/tutorials/noirjs_app.md index cbb1938a5c6..8c23b639f12 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.32.0/tutorials/noirjs_app.md +++ b/noir/noir-repo/docs/versioned_docs/version-v0.32.0/tutorials/noirjs_app.md @@ -14,13 +14,13 @@ You can find the complete app code for this guide [here](https://github.com/noir :::note -Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.27.x matches `noir_js@0.27.x`, etc. +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.31.x matches `noir_js@0.31.x`, etc. -In this guide, we will be pinned to 0.27.0. +In this guide, we will be pinned to 0.31.0. ::: -Before we start, we want to make sure we have Node and Nargo installed. +Before we start, we want to make sure we have Node, Nargo and the Barretenberg proving system (`bb`) installed. We start by opening a terminal and executing `node --version`. If we don't get an output like `v20.10.0`, that means node is not installed. Let's do that by following the handy [nvm guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script). @@ -30,6 +30,9 @@ As for `Nargo`, we can follow the [Nargo guide](../getting_started/installation/ curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash ``` +Follow the instructions on [this page](https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/cpp/src/barretenberg/bb#installation) to install `bb`. +Version 0.41.0 is compatible with `nargo` version 0.31.0, which you can install with `bbup -v 0.41.0` once `bbup` is installed. + Easy enough. Onwards! ## Our project @@ -42,13 +45,17 @@ In fact, it's so simple that it comes nicely packaged in `nargo`. Let's do that! Run: -`nargo new circuit` +```bash +nargo new circuit +``` And... That's about it. Your program is ready to be compiled and run. To compile, let's `cd` into the `circuit` folder to enter our project, and call: -`nargo compile` +```bash +nargo compile +``` This compiles our circuit into `json` format and add it to a new `target` folder. @@ -92,30 +99,53 @@ Before we proceed with any coding, let's get our environment tailored for Noir. In your freshly minted `vite-project` folder, create a new file named `vite.config.js` and open it in your code editor. Paste the following to set the stage: ```javascript -import { defineConfig } from "vite"; -import copy from "rollup-plugin-copy"; - -export default defineConfig({ - esbuild: { - target: "esnext", - }, - optimizeDeps: { - esbuildOptions: { - target: "esnext", - }, +import { defineConfig } from 'vite'; +import copy from 'rollup-plugin-copy'; +import fs from 'fs'; +import path from 'path'; + +const wasmContentTypePlugin = { + name: 'wasm-content-type-plugin', + configureServer(server) { + server.middlewares.use(async (req, res, next) => { + if (req.url.endsWith('.wasm')) { + res.setHeader('Content-Type', 'application/wasm'); + const newPath = req.url.replace('deps', 'dist'); + const targetPath = path.join(__dirname, newPath); + const wasmContent = fs.readFileSync(targetPath); + return res.end(wasmContent); + } + next(); + }); }, - plugins: [ - copy({ - targets: [ - { src: "node_modules/**/*.wasm", dest: "node_modules/.vite/dist" }, +}; + +export default defineConfig(({ command }) => { + if (command === 'serve') { + return { + build: { + target: 'esnext', + rollupOptions: { + external: ['@aztec/bb.js'] + } + }, + optimizeDeps: { + esbuildOptions: { + target: 'esnext' + } + }, + plugins: [ + copy({ + targets: [{ src: 'node_modules/**/*.wasm', dest: 'node_modules/.vite/dist' }], + copySync: true, + hook: 'buildStart', + }), + command === 'serve' ? wasmContentTypePlugin : [], ], - copySync: true, - hook: "buildStart", - }), - ], - server: { - port: 3000, - }, + }; + } + + return {}; }); ``` @@ -124,7 +154,7 @@ export default defineConfig({ Now that our stage is set, install the necessary NoirJS packages along with our other dependencies: ```bash -npm install && npm install @noir-lang/backend_barretenberg@0.27.0 @noir-lang/noir_js@0.27.0 +npm install && npm install @noir-lang/backend_barretenberg@0.31.0 @noir-lang/noir_js@0.31.0 npm install rollup-plugin-copy --save-dev ``` @@ -193,17 +223,6 @@ Our love for Noir needs undivided attention, so let's just open `main.js` and de Start by pasting in this boilerplate code: ```js -const setup = async () => { - await Promise.all([ - import('@noir-lang/noirc_abi').then((module) => - module.default(new URL('@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm', import.meta.url).toString()), - ), - import('@noir-lang/acvm_js').then((module) => - module.default(new URL('@noir-lang/acvm_js/web/acvm_js_bg.wasm', import.meta.url).toString()), - ), - ]); -}; - function display(container, msg) { const c = document.getElementById(container); const p = document.createElement('p'); @@ -222,8 +241,6 @@ document.getElementById('submitGuess').addEventListener('click', async () => { The display function doesn't do much. We're simply manipulating our website to see stuff happening. For example, if the proof fails, it will simply log a broken heart 😢 -As for the `setup` function, it's just a sad reminder that dealing with `wasm` on the browser is not as easy as it should. Just copy, paste, and forget. - :::info At this point in the tutorial, your folder structure should look like this: @@ -310,9 +327,13 @@ Time to celebrate, yes! But we shouldn't trust machines so blindly. Let's add th ```js display('logs', 'Verifying proof... ⌛'); -const verificationKey = await backend.getVerificationKey(); -const verifier = new Verifier(); -const isValid = await verifier.verifyProof(proof, verificationKey); +const isValid = await backend.verifyProof(proof); + +// or to cache and use the verification key: +// const verificationKey = await backend.getVerificationKey(); +// const verifier = new Verifier(); +// const isValid = await verifier.verifyProof(proof, verificationKey); + if (isValid) display('logs', 'Verifying proof... ✅'); ``` diff --git a/noir/noir-repo/examples/recursion/recurse_leaf/src/main.nr b/noir/noir-repo/examples/recursion/recurse_leaf/src/main.nr index 4859e84d49e..1f111a1b5b0 100644 --- a/noir/noir-repo/examples/recursion/recurse_leaf/src/main.nr +++ b/noir/noir-repo/examples/recursion/recurse_leaf/src/main.nr @@ -7,12 +7,7 @@ fn main( num: u64 ) -> pub u64 { // verify sum so far was computed correctly - std::verify_proof( - verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), - key_hash - ); + std::verify_proof(verification_key, proof, public_inputs, key_hash); // Take output of previous proof and add another number to it. public_inputs[2] as u64 + num } diff --git a/noir/noir-repo/examples/recursion/recurse_node/src/main.nr b/noir/noir-repo/examples/recursion/recurse_node/src/main.nr index 60192493b54..05b717fc794 100644 --- a/noir/noir-repo/examples/recursion/recurse_node/src/main.nr +++ b/noir/noir-repo/examples/recursion/recurse_node/src/main.nr @@ -5,11 +5,6 @@ fn main( proof: [Field; 109] ) -> pub u64 { // verify sum was computed correctly - std::verify_proof( - verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), - key_hash - ); + std::verify_proof(verification_key, proof, public_inputs, key_hash); public_inputs[3] as u64 } diff --git a/noir/noir-repo/noir_stdlib/src/cmp.nr b/noir/noir-repo/noir_stdlib/src/cmp.nr index bdd5e2bc5ec..94cd284e238 100644 --- a/noir/noir-repo/noir_stdlib/src/cmp.nr +++ b/noir/noir-repo/noir_stdlib/src/cmp.nr @@ -1,9 +1,37 @@ +use crate::meta::derive_via; + +#[derive_via(derive_eq)] // docs:start:eq-trait trait Eq { fn eq(self, other: Self) -> bool; } // docs:end:eq-trait +comptime fn derive_eq(s: StructDefinition) -> Quoted { + let typ = s.as_type(); + + let impl_generics = s.generics().map(|g| quote { $g }).join(quote {,}); + + let where_clause = s.generics().map(|name| quote { $name: Eq }).join(quote {,}); + + // `(self.a == other.a) & (self.b == other.b) & ...` + let equalities = s.fields().map( + |f: (Quoted, Type)| { + let name = f.0; + quote { (self.$name == other.$name) } + } + ); + let body = equalities.join(quote { & }); + + quote { + impl<$impl_generics> Eq for $typ where $where_clause { + fn eq(self, other: Self) -> bool { + $body + } + } + } +} + impl Eq for Field { fn eq(self, other: Field) -> bool { self == other } } impl Eq for u64 { fn eq(self, other: u64) -> bool { self == other } } diff --git a/noir/noir-repo/noir_stdlib/src/default.nr b/noir/noir-repo/noir_stdlib/src/default.nr index 0acb3966034..4fbde09b512 100644 --- a/noir/noir-repo/noir_stdlib/src/default.nr +++ b/noir/noir-repo/noir_stdlib/src/default.nr @@ -1,9 +1,37 @@ +use crate::meta::derive_via; + +#[derive_via(derive_default)] // docs:start:default-trait trait Default { fn default() -> Self; } // docs:end:default-trait +comptime fn derive_default(s: StructDefinition) -> Quoted { + let typ = s.as_type(); + + let impl_generics = s.generics().map(|g| quote { $g }).join(quote {,}); + + let where_clause = s.generics().map(|name| quote { $name: Default }).join(quote {,}); + + // `foo: Default::default(), bar: Default::default(), ...` + let fields = s.fields().map( + |f: (Quoted, Type)| { + let name = f.0; + quote { $name: Default::default() } + } + ); + let fields = fields.join(quote {,}); + + quote { + impl<$impl_generics> Default for $typ where $where_clause { + fn default() -> Self { + Self { $fields } + } + } + } +} + impl Default for Field { fn default() -> Field { 0 } } impl Default for u8 { fn default() -> u8 { 0 } } diff --git a/noir/noir-repo/noir_stdlib/src/hash/mod.nr b/noir/noir-repo/noir_stdlib/src/hash/mod.nr index 320b89353d9..8e9fe75d982 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/mod.nr @@ -46,6 +46,7 @@ fn pedersen_commitment_with_separator(input: [Field; N], separator: } } +#[no_predicates] fn pedersen_commitment_with_separator_noir(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N]; for i in 0..N { @@ -56,15 +57,19 @@ fn pedersen_commitment_with_separator_noir(input: [Field; N], separa multi_scalar_mul(generators, points) } +#[no_predicates] fn pedersen_hash_with_separator_noir(input: [Field; N], separator: u32) -> Field { let mut scalars: Vec = Vec::from_slice([EmbeddedCurveScalar { lo: 0, hi: 0 }; N].as_slice()); //Vec::new(); for i in 0..N { scalars.set(i, from_field_unsafe(input[i])); } - scalars.push(EmbeddedCurveScalar { lo: N as Field, hi: 0 }); + scalars.push(EmbeddedCurveScalar { lo: N as Field, hi: 0 as Field }); let domain_generators :[EmbeddedCurvePoint; N]= derive_generators("DEFAULT_DOMAIN_SEPARATOR".as_bytes(), separator); - let mut vec_generators = Vec::from_slice(domain_generators.as_slice()); + let mut vec_generators = Vec::new(); + for i in 0..N { + vec_generators.push(domain_generators[i]); + } let length_generator : [EmbeddedCurvePoint; 1] = derive_generators("pedersen_hash_length".as_bytes(), 0); vec_generators.push(length_generator[0]); multi_scalar_mul_slice(vec_generators.slice, scalars.slice)[0] @@ -86,7 +91,7 @@ fn __pedersen_commitment_with_separator(input: [Field; N], separator #[field(bn254)] fn derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) -> [EmbeddedCurvePoint; N] { crate::assert_constant(domain_separator_bytes); - crate::assert_constant(starting_index); + // TODO(https://github.com/noir-lang/noir/issues/5672): Add back assert_constant on starting_index __derive_generators(domain_separator_bytes, starting_index) } diff --git a/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr b/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr index 08cf68d1f82..9626da0cf97 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr @@ -1,7 +1,7 @@ use crate::hash::Hasher; use crate::default::Default; -global RATE: u32 = 3; +comptime global RATE: u32 = 3; struct Poseidon2 { cache: [Field;3], diff --git a/noir/noir-repo/noir_stdlib/src/lib.nr b/noir/noir-repo/noir_stdlib/src/lib.nr index ac53941e752..2d559c43162 100644 --- a/noir/noir-repo/noir_stdlib/src/lib.nr +++ b/noir/noir-repo/noir_stdlib/src/lib.nr @@ -43,7 +43,12 @@ unconstrained pub fn println(input: T) { } #[foreign(recursive_aggregation)] -pub fn verify_proof(verification_key: [Field], proof: [Field], public_inputs: [Field], key_hash: Field) {} +pub fn verify_proof( + verification_key: [Field; N], + proof: [Field; M], + public_inputs: [Field; K], + key_hash: Field +) {} // Asserts that the given value is known at compile-time. // Useful for debugging for-loop bounds. diff --git a/noir/noir-repo/noir_stdlib/src/meta/mod.nr b/noir/noir-repo/noir_stdlib/src/meta/mod.nr index 395f09a453e..7ed5e3ff44f 100644 --- a/noir/noir-repo/noir_stdlib/src/meta/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/meta/mod.nr @@ -1,6 +1,11 @@ +use crate::collections::umap::UHashMap; +use crate::hash::BuildHasherDefault; +use crate::hash::poseidon2::Poseidon2Hasher; + +mod struct_def; mod trait_constraint; mod trait_def; -mod type_def; +mod typ; mod quoted; /// Calling unquote as a macro (via `unquote!(arg)`) will unquote @@ -9,3 +14,30 @@ mod quoted; pub comptime fn unquote(code: Quoted) -> Quoted { code } + +/// Returns the type of any value +#[builtin(type_of)] +pub comptime fn type_of(x: T) -> Type {} + +type DeriveFunction = fn(StructDefinition) -> Quoted; + +comptime mut global HANDLERS: UHashMap> = UHashMap::default(); + +#[varargs] +pub comptime fn derive(s: StructDefinition, traits: [TraitDefinition]) -> Quoted { + let mut result = quote {}; + + for trait_to_derive in traits { + let handler = HANDLERS.get(trait_to_derive); + assert(handler.is_some(), f"No derive function registered for `{trait_to_derive}`"); + + let trait_impl = handler.unwrap()(s); + result = quote { $result $trait_impl }; + } + + result +} + +unconstrained pub comptime fn derive_via(t: TraitDefinition, f: DeriveFunction) { + HANDLERS.insert(t, f); +} diff --git a/noir/noir-repo/noir_stdlib/src/meta/quoted.nr b/noir/noir-repo/noir_stdlib/src/meta/quoted.nr index 6273d64b10c..8e96e8828f8 100644 --- a/noir/noir-repo/noir_stdlib/src/meta/quoted.nr +++ b/noir/noir-repo/noir_stdlib/src/meta/quoted.nr @@ -1,4 +1,7 @@ impl Quoted { #[builtin(quoted_as_trait_constraint)] fn as_trait_constraint(self) -> TraitConstraint {} + + #[builtin(quoted_as_type)] + fn as_type(self) -> Type {} } diff --git a/noir/noir-repo/noir_stdlib/src/meta/type_def.nr b/noir/noir-repo/noir_stdlib/src/meta/struct_def.nr similarity index 58% rename from noir/noir-repo/noir_stdlib/src/meta/type_def.nr rename to noir/noir-repo/noir_stdlib/src/meta/struct_def.nr index c01aab4b141..8d3f9ceb8a5 100644 --- a/noir/noir-repo/noir_stdlib/src/meta/type_def.nr +++ b/noir/noir-repo/noir_stdlib/src/meta/struct_def.nr @@ -2,15 +2,14 @@ impl StructDefinition { /// Return a syntactic version of this struct definition as a type. /// For example, `as_type(quote { type Foo { ... } })` would return `Foo` #[builtin(struct_def_as_type)] - fn as_type(self) -> Quoted {} + fn as_type(self) -> Type {} - /// Return each generic on this struct. The names of these generics are unchanged - /// so users may need to keep name collisions in mind if this is used directly in a macro. + /// Return each generic on this struct. #[builtin(struct_def_generics)] - fn generics(self) -> [Quoted] {} + fn generics(self) -> [Type] {} /// Returns (name, type) pairs of each field in this struct. Each type is as-is /// with any generic arguments unchanged. #[builtin(struct_def_fields)] - fn fields(self) -> [(Quoted, Quoted)] {} + fn fields(self) -> [(Quoted, Type)] {} } diff --git a/noir/noir-repo/noir_stdlib/src/meta/trait_def.nr b/noir/noir-repo/noir_stdlib/src/meta/trait_def.nr index 5de7631e34d..ca381cb8e16 100644 --- a/noir/noir-repo/noir_stdlib/src/meta/trait_def.nr +++ b/noir/noir-repo/noir_stdlib/src/meta/trait_def.nr @@ -1,4 +1,25 @@ +use crate::hash::{Hash, Hasher}; +use crate::cmp::Eq; + impl TraitDefinition { #[builtin(trait_def_as_trait_constraint)] fn as_trait_constraint(_self: Self) -> TraitConstraint {} } + +impl Eq for TraitDefinition { + fn eq(self, other: Self) -> bool { + trait_def_eq(self, other) + } +} + +impl Hash for TraitDefinition { + fn hash(self, state: &mut H) where H: Hasher { + state.write(trait_def_hash(self)); + } +} + +#[builtin(trait_def_eq)] +fn trait_def_eq(_first: TraitDefinition, _second: TraitDefinition) -> bool {} + +#[builtin(trait_def_hash)] +fn trait_def_hash(_def: TraitDefinition) -> Field {} diff --git a/noir/noir-repo/noir_stdlib/src/meta/typ.nr b/noir/noir-repo/noir_stdlib/src/meta/typ.nr new file mode 100644 index 00000000000..2a043b373bc --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/meta/typ.nr @@ -0,0 +1,34 @@ +use crate::cmp::Eq; +use crate::option::Option; + +impl Type { + #[builtin(type_as_array)] + fn as_array(self) -> Option<(Type, Type)> {} + + #[builtin(type_as_constant)] + fn as_constant(self) -> Option {} + + #[builtin(type_as_integer)] + fn as_integer(self) -> Option<(bool, u8)> {} + + #[builtin(type_as_slice)] + fn as_slice(self) -> Option {} + + #[builtin(type_as_tuple)] + fn as_tuple(self) -> Option<[Type]> {} + + #[builtin(type_is_bool)] + fn is_bool(self) -> bool {} + + #[builtin(type_is_field)] + fn is_field(self) -> bool {} +} + +impl Eq for Type { + fn eq(self, other: Self) -> bool { + type_eq(self, other) + } +} + +#[builtin(type_eq)] +fn type_eq(_first: Type, _second: Type) -> bool {} diff --git a/noir/noir-repo/noir_stdlib/src/prelude.nr b/noir/noir-repo/noir_stdlib/src/prelude.nr index 3244329aa4b..0d423e3556d 100644 --- a/noir/noir-repo/noir_stdlib/src/prelude.nr +++ b/noir/noir-repo/noir_stdlib/src/prelude.nr @@ -6,3 +6,4 @@ use crate::uint128::U128; use crate::cmp::{Eq, Ord}; use crate::default::Default; use crate::convert::{From, Into}; +use crate::meta::{derive, derive_via}; diff --git a/noir/noir-repo/noir_stdlib/src/uint128.nr b/noir/noir-repo/noir_stdlib/src/uint128.nr index e99818bafa0..7b75cf4cae4 100644 --- a/noir/noir-repo/noir_stdlib/src/uint128.nr +++ b/noir/noir-repo/noir_stdlib/src/uint128.nr @@ -100,14 +100,14 @@ impl U128 { } fn decode_ascii(ascii: u8) -> Field { - if ascii < 58 { + (if ascii < 58 { ascii - 48 } else { let ascii = ascii + 32 * (U128::uconstrained_check_is_upper_ascii(ascii) as u8); assert(ascii >= 97); // enforce >= 'a' assert(ascii <= 102); // enforce <= 'f' ascii - 87 - } as Field + }) as Field } // TODO: Replace with a faster version. diff --git a/noir/noir-repo/scripts/install_bb.sh b/noir/noir-repo/scripts/install_bb.sh index 95dcfdda880..65a449be543 100755 --- a/noir/noir-repo/scripts/install_bb.sh +++ b/noir/noir-repo/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.46.1" +VERSION="0.47.1" BBUP_PATH=~/.bb/bbup diff --git a/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml b/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml deleted file mode 100644 index 8bdefbbbd21..00000000000 --- a/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "non_comptime_local_fn_call" -type = "bin" -authors = [""] -compiler_version = ">=0.23.0" - -[dependencies] diff --git a/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr b/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr deleted file mode 100644 index d75bb1a922a..00000000000 --- a/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr +++ /dev/null @@ -1,9 +0,0 @@ -fn main() { - comptime { - let _a = id(3); - } -} - -fn id(x: Field) -> Field { - x -} diff --git a/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_constructor/Nargo.toml b/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_constructor/Nargo.toml new file mode 100644 index 00000000000..ac7933fa250 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_constructor/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "type_annotation_needed_on_struct_constructor" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_constructor/src/main.nr b/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_constructor/src/main.nr new file mode 100644 index 00000000000..5207210dfbf --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_constructor/src/main.nr @@ -0,0 +1,6 @@ +struct Foo { +} + +fn main() { + let foo = Foo {}; +} diff --git a/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_new/Nargo.toml b/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_new/Nargo.toml new file mode 100644 index 00000000000..cb53d2924f4 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_new/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "type_annotation_needed_on_struct_new" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_new/src/main.nr b/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_new/src/main.nr new file mode 100644 index 00000000000..f740dfa6d37 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/type_annotation_needed_on_struct_new/src/main.nr @@ -0,0 +1,12 @@ +struct Foo { +} + +impl Foo { + fn new() -> Foo { + Foo {} + } +} + +fn main() { + let foo = Foo::new(); +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/arithmetic_generics/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/arithmetic_generics/Nargo.toml new file mode 100644 index 00000000000..2352ae0c562 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/arithmetic_generics/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "arithmetic_generics" +type = "bin" +authors = [""] +compiler_version = ">=0.32.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/arithmetic_generics/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/arithmetic_generics/src/main.nr new file mode 100644 index 00000000000..d4f71d38413 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/arithmetic_generics/src/main.nr @@ -0,0 +1,103 @@ +fn main() { + let (first, rest) = split_first([1, 2, 3, 4]); + assert_eq(first, 1); + assert_eq(rest, [2, 3, 4]); + + // Type inference works without the type constraints from assert_eq as well + let _ = split_first([1, 2, 3]); + + let _ = push_multiple([1, 2, 3]); +} + +fn split_first(array: [T; N]) -> (T, [T; N - 1]) { + std::static_assert(N != 0, "split_first called on empty array"); + let mut new_array: [T; N - 1] = std::unsafe::zeroed(); + + for i in 0..N - 1 { + new_array[i] = array[i + 1]; + } + + (array[0], new_array) +} + +fn push(array: [Field; N], element: Field) -> [Field; N + 1] { + let mut result: [_; N + 1] = std::unsafe::zeroed(); + result[array.len()] = element; + + for i in 0..array.len() { + result[i] = array[i]; + } + + result +} + +fn push_multiple(array: [Field; N]) -> [Field; N + 2] { + // : [Field; N + 1] + let array2 = push(array, 4); + + // : [Field; (N + 1) + 1] + let array3 = push(array2, 5); + + // [Field; (N + 1) + 1] = [Field; N + 2] + array3 +} + +// This signature fails because we can't match `_ + 1` to `3` at the call site +// fn push_multiple(array: [Field; 1 + N]) -> [Field; N + 3] { + +// ********************************************* +// The rest of this file is setup for demo_proof +// ********************************************* + +struct W { } + +struct Equiv { + // TODO(https://github.com/noir-lang/noir/issues/5644): + // Bug with struct_obj.field_thats_a_fn(x) + + to_: fn[TU](T) -> U, + fro_: fn[UT](U) -> T, + // .. other coherence conditions +} + +impl Equiv { + fn to(self, x: T) -> U { + (self.to_)(x) + } + + fn fro(self, x: U) -> T { + (self.fro_)(x) + } +} + +fn equiv_trans( + x: Equiv, + y: Equiv +) -> Equiv, Equiv), V, (Equiv, Equiv)> { + Equiv { to_: |z| { y.to(x.to(z)) }, fro_: |z| { x.fro(y.fro(z)) } } +} + +fn mul_one_r() -> Equiv, (), W, ()> { + Equiv { to_: |_x| { W {} }, fro_: |_x| { W {} } } +} + +fn add_equiv_r(_: Equiv, EN, W, EM>) -> Equiv, (), W, ()> { + Equiv { to_: |_x| { W {} }, fro_: |_x| { W {} } } +} + +fn mul_comm() -> Equiv, (), W, ()> { + Equiv { to_: |_x| { W {} }, fro_: |_x| { W {} } } +} + +fn mul_add() -> Equiv, (), W, ()> { + Equiv { to_: |_x| { W {} }, fro_: |_x| { W {} } } +} + +// (N + 1) * N == N * N + N +fn demo_proof() -> Equiv, (Equiv, (), W, ()>, Equiv, (Equiv, (), W, ()>, Equiv, (), W<(N * (N + 1))>, ()>), W, (Equiv, (), W<(N * (N + 1))>, ()>, Equiv, (), W, ()>)>), W, (Equiv, (Equiv, (), W, ()>, Equiv, (), W<(N * (N + 1))>, ()>), W, (Equiv, (), W<(N * (N + 1))>, ()>, Equiv, (), W, ()>)>, Equiv, (), W, ()>)> { + let p1: Equiv, (), W, ()> = mul_comm(); + let p2: Equiv, (), W, ()> = mul_add::(); + let p3_sub: Equiv, (), W, ()> = mul_one_r(); + let p3: Equiv, (), W, ()> = add_equiv_r::(p3_sub); + equiv_trans(equiv_trans(p1, p2), p3) +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_args/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_args/src/main.nr index 44b9c20460f..6178df5e749 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/attribute_args/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_args/src/main.nr @@ -1,9 +1,9 @@ -#[attr_with_args(a b, c d)] -#[varargs(one, two)] -#[varargs(one, two, three, four)] +#[attr_with_args(1, 2)] +#[varargs(1, 2)] +#[varargs(1, 2, 3, 4)] struct Foo {} -comptime fn attr_with_args(s: StructDefinition, a: Quoted, b: Quoted) { +comptime fn attr_with_args(s: StructDefinition, a: Field, b: Field) { // Ensure all variables are in scope. // We can't print them since that breaks the test runner. let _ = s; @@ -11,7 +11,8 @@ comptime fn attr_with_args(s: StructDefinition, a: Quoted, b: Quoted) { let _ = b; } -comptime fn varargs(s: StructDefinition, t: [Quoted]) { +#[varargs] +comptime fn varargs(s: StructDefinition, t: [Field]) { let _ = s; for _ in t {} assert(t.len() < 5); diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/Nargo.toml new file mode 100644 index 00000000000..84162d3c093 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "comptime_fmt_strings" +type = "bin" +authors = [""] +compiler_version = ">=0.32.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr new file mode 100644 index 00000000000..19572fd15a1 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr @@ -0,0 +1,15 @@ +fn main() { + // format strings are lowered as normal strings + let (s1, s2): (str<39>, str<4>) = comptime { + let x = 4; + let y = 5; + + // Can't print these at compile-time here since printing to stdout while + // compiling breaks the test runner. + let s1 = f"x is {x}, fake interpolation: \{y}, y is {y}"; + let s2 = std::unsafe::zeroed::>(); + (s1, s2) + }; + assert_eq(s1, "x is 4, fake interpolation: {y}, y is 5"); + assert_eq(s2, "\0\0\0\0"); +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr index 5c99f8c587e..2f2ca89cfb5 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_trait_constraint/src/main.nr @@ -24,16 +24,16 @@ fn main() { } } -comptime struct TestHasher { +struct TestHasher { result: Field, } -comptime impl Hasher for TestHasher { - comptime fn finish(self) -> Field { +impl Hasher for TestHasher { + fn finish(self) -> Field { self.result } - comptime fn write(&mut self, input: Field) { + fn write(&mut self, input: Field) { self.result += input; } } diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_traits/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_traits/src/main.nr index 8b1f81e6594..7d1e116dd0c 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_traits/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_traits/src/main.nr @@ -20,8 +20,8 @@ struct MyType { value: i32, } -comptime impl Neg for MyType { - comptime fn neg(self) -> Self { +impl Neg for MyType { + fn neg(self) -> Self { self } } diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_type/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/comptime_type/Nargo.toml new file mode 100644 index 00000000000..c5b9ca89240 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_type/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "comptime_type" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_type/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_type/src/main.nr new file mode 100644 index 00000000000..b2b724db6fd --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_type/src/main.nr @@ -0,0 +1,70 @@ +use std::meta::type_of; + +fn main() { + comptime + { + // Check type_of works correctly (relies on Eq for Type) + let a_field = 0; + let another_field = 1; + let an_i32: i32 = 0; + let field_type_1 = type_of(a_field); + let field_type_2 = type_of(another_field); + let i32_type = type_of(an_i32); + assert(field_type_1 == field_type_2); + assert(field_type_1 != i32_type); + + // Check Type::is_field + assert(field_type_1.is_field()); + assert(!i32_type.is_field()); + + // Check Type::as_integer + assert(field_type_1.as_integer().is_none()); + + let (signed, bits) = i32_type.as_integer().unwrap(); + assert(signed); + assert_eq(bits, 32); + + let a_u8: u8 = 0; + let u8_type = type_of(a_u8); + let (signed, bits) = u8_type.as_integer().unwrap(); + assert(!signed); + assert_eq(bits, 8); + + // Check Type::as_tuple + assert(u8_type.as_tuple().is_none()); + + let tuple = (an_i32, a_u8); + let tuple_type = type_of(tuple); + let tuple_types = tuple_type.as_tuple().unwrap(); + assert_eq(tuple_types.len(), 2); + assert_eq(tuple_types[0], i32_type); + assert_eq(tuple_types[1], u8_type); + + // Check Type::as_slice + assert(u8_type.as_slice().is_none()); + + let slice = &[1]; + let slice_type = type_of(slice); + let slice_type_element_type = slice_type.as_slice().unwrap(); + assert_eq(slice_type_element_type, field_type_1); + + // Check Type::as_array + assert(u8_type.as_array().is_none()); + + let array = [1, 2, 3]; + let array_type = type_of(array); + let (array_type_element_type , array_length) = array_type.as_array().unwrap(); + assert_eq(array_type_element_type, field_type_1); + + // Check Type::as_constant + assert(u8_type.as_constant().is_none()); + assert_eq(array_length.as_constant().unwrap(), 3); + + // Check Type::is_bool + assert(!u8_type.is_bool()); + + let yes = true; + let bool_type = type_of(yes); + assert(bool_type.is_bool()); + } +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr index 5463a61d969..69cb641e7c7 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr @@ -1,5 +1,5 @@ comptime fn derive_default(typ: StructDefinition) -> Quoted { - let generics: [Quoted] = typ.generics(); + let generics = typ.generics(); assert_eq( generics.len(), 0, "derive_default: Deriving Default on generic types is currently unimplemented" ); @@ -27,7 +27,7 @@ struct Foo { #[derive_default] struct Bar {} -comptime fn make_field_exprs(fields: [(Quoted, Quoted)]) -> [Quoted] { +comptime fn make_field_exprs(fields: [(Quoted, Type)]) -> [Quoted] { let mut result = &[]; for my_field in fields { let name = my_field.0; diff --git a/noir/noir-repo/test_programs/compile_success_empty/quoted_as_type/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/quoted_as_type/Nargo.toml new file mode 100644 index 00000000000..7d669ead363 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/quoted_as_type/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "quoted_as_type" +type = "bin" +authors = [""] +compiler_version = ">=0.32.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/quoted_as_type/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/quoted_as_type/src/main.nr new file mode 100644 index 00000000000..e06294592ca --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/quoted_as_type/src/main.nr @@ -0,0 +1,21 @@ +fn main() { + macro!().do_nothing(); +} + +comptime fn macro() -> Quoted { + let typ = quote { Foo }.as_type(); + quote { let foo: $typ = Foo {}; foo } +} + +struct Foo {} + +// Ensure we call the Foo impl +impl Foo { + fn do_nothing(_self: Self) { + assert(false); + } +} + +impl Foo { + fn do_nothing(_self: Self) {} +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/regression_5671/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/regression_5671/Nargo.toml new file mode 100644 index 00000000000..4ddf3413e5e --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/regression_5671/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_5671" +type = "bin" +authors = [""] +compiler_version = ">=0.32.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/compile_success_empty/regression_5671/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/regression_5671/src/main.nr new file mode 100644 index 00000000000..2bac98ef7c4 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/regression_5671/src/main.nr @@ -0,0 +1,20 @@ +#[foo] +struct MyOtherStruct { + field1: A, + field2: B, +} + +comptime fn foo(_s: StructDefinition) -> Quoted { + quote { + impl Eq for MyOtherStruct where A: Eq, B: Eq { + fn eq(self, other: Self) -> bool { + (self.field1 == other.field1) & (self.field2 == other.field2) + } + } + } +} + +fn main() { + let x = MyOtherStruct { field1: 1, field2: 2 }; + assert_eq(x, x); +} diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/trait_call_in_global/Nargo.toml similarity index 64% rename from noir/noir-repo/test_programs/execution_success/verify_honk_proof/Nargo.toml rename to noir/noir-repo/test_programs/compile_success_empty/trait_call_in_global/Nargo.toml index 8fce1bf44b6..005fec5bf36 100644 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Nargo.toml +++ b/noir/noir-repo/test_programs/compile_success_empty/trait_call_in_global/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "verify_honk_proof" +name = "trait_call_in_global" type = "bin" authors = [""] diff --git a/noir/noir-repo/test_programs/compile_success_empty/trait_call_in_global/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/trait_call_in_global/src/main.nr new file mode 100644 index 00000000000..775cb5f3b7d --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/trait_call_in_global/src/main.nr @@ -0,0 +1,5 @@ +global s: BoundedVec = From::from([0]); + +fn main() { + let _ = s; +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/zeroed_slice/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/zeroed_slice/Nargo.toml new file mode 100644 index 00000000000..650baead9e2 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/zeroed_slice/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "zeroed_slice" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/zeroed_slice/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/zeroed_slice/src/main.nr new file mode 100644 index 00000000000..44ccb2bd595 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/zeroed_slice/src/main.nr @@ -0,0 +1,3 @@ +fn main() { + let _: [u8] = std::unsafe::zeroed(); +} diff --git a/noir/noir-repo/test_programs/execution_success/databus/src/main.nr b/noir/noir-repo/test_programs/execution_success/databus/src/main.nr index 7e5c23d508d..1e4aa141eea 100644 --- a/noir/noir-repo/test_programs/execution_success/databus/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/databus/src/main.nr @@ -1,4 +1,4 @@ -fn main(mut x: u32, y: call_data u32, z: call_data [u32; 4]) -> return_data u32 { +fn main(mut x: u32, y: call_data(0) u32, z: call_data(0) [u32; 4]) -> return_data u32 { let a = z[x]; a + foo(y) } diff --git a/noir/noir-repo/test_programs/execution_success/derive/Nargo.toml b/noir/noir-repo/test_programs/execution_success/derive/Nargo.toml new file mode 100644 index 00000000000..f3846594305 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/derive/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "derive" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/derive/src/main.nr b/noir/noir-repo/test_programs/execution_success/derive/src/main.nr new file mode 100644 index 00000000000..f344defe41e --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/derive/src/main.nr @@ -0,0 +1,44 @@ +#[derive_via(derive_do_nothing)] +trait DoNothing { + fn do_nothing(self); +} + +#[derive(DoNothing)] +struct MyStruct { my_field: u32 } + +comptime fn derive_do_nothing(s: StructDefinition) -> Quoted { + let typ = s.as_type(); + let generics = s.generics().map(|g| quote { $g }).join(quote {,}); + quote { + impl<$generics> DoNothing for $typ { + fn do_nothing(_self: Self) { + // Traits can't tell us what to do + println("something"); + } + } + } +} + +// Test stdlib derive fns & multiple traits +#[derive(Eq, Default)] +struct MyOtherStruct { + field1: A, + field2: B, + field3: MyOtherOtherStruct, +} + +#[derive(Eq, Default)] +struct MyOtherOtherStruct { + x: T, +} + +fn main() { + let s = MyStruct { my_field: 1 }; + s.do_nothing(); + + let o: MyOtherStruct = MyOtherStruct::default(); + assert_eq(o, o); + + let o: MyOtherStruct]> = MyOtherStruct::default(); + assert_eq(o, o); +} diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_nested_proof/src/main.nr b/noir/noir-repo/test_programs/execution_success/double_verify_nested_proof/src/main.nr index 5f0eb1a5b53..75a5fa9ebda 100644 --- a/noir/noir-repo/test_programs/execution_success/double_verify_nested_proof/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/double_verify_nested_proof/src/main.nr @@ -19,17 +19,7 @@ fn main( key_hash: Field, proof_b: [Field; 109] ) { - std::verify_proof( - verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), - key_hash - ); + std::verify_proof(verification_key, proof, public_inputs, key_hash); - std::verify_proof( - verification_key.as_slice(), - proof_b.as_slice(), - public_inputs.as_slice(), - key_hash - ); + std::verify_proof(verification_key, proof_b, public_inputs, key_hash); } diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_proof/src/main.nr b/noir/noir-repo/test_programs/execution_success/double_verify_proof/src/main.nr index d3b909c3fa4..8d73bb09aa5 100644 --- a/noir/noir-repo/test_programs/execution_success/double_verify_proof/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/double_verify_proof/src/main.nr @@ -12,17 +12,7 @@ fn main( key_hash: Field, proof_b: [Field; 93] ) { - std::verify_proof( - verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), - key_hash - ); + std::verify_proof(verification_key, proof, public_inputs, key_hash); - std::verify_proof( - verification_key.as_slice(), - proof_b.as_slice(), - public_inputs.as_slice(), - key_hash - ); + std::verify_proof(verification_key, proof_b, public_inputs, key_hash); } diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/src/main.nr b/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/src/main.nr index 2555bbc4758..5137a538e42 100644 --- a/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/src/main.nr @@ -13,17 +13,7 @@ fn main( key_hash: Field, proof_b: [Field; 93] ) { - std::verify_proof( - verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), - key_hash - ); + std::verify_proof(verification_key, proof, public_inputs, key_hash); - std::verify_proof( - verification_key.as_slice(), - proof_b.as_slice(), - public_inputs.as_slice(), - key_hash - ); + std::verify_proof(verification_key, proof_b, public_inputs, key_hash); } diff --git a/noir/noir-repo/test_programs/execution_success/regression_5615/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_5615/Nargo.toml new file mode 100644 index 00000000000..738d99391a2 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_5615/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_5615" +type = "bin" +authors = [""] +compiler_version = ">=0.32.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_5615/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_5615/src/main.nr new file mode 100644 index 00000000000..afb641e510d --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_5615/src/main.nr @@ -0,0 +1,12 @@ +use std::collections::umap::UHashMap; +use std::hash::BuildHasherDefault; +use std::hash::poseidon2::Poseidon2Hasher; + +unconstrained fn main() { + comptime + { + let mut map: UHashMap> = UHashMap::default(); + + map.insert(1, 2); + } +} diff --git a/noir/noir-repo/test_programs/execution_success/slice_regex/Nargo.toml b/noir/noir-repo/test_programs/execution_success/slice_regex/Nargo.toml new file mode 100644 index 00000000000..ac95636c74a --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/slice_regex/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "slice_regex" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/slice_regex/src/main.nr b/noir/noir-repo/test_programs/execution_success/slice_regex/src/main.nr new file mode 100644 index 00000000000..43bd4433c69 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/slice_regex/src/main.nr @@ -0,0 +1,811 @@ +struct Match { + succeeded: bool, + match_ends: u32, + leftover: [u8], +} + +impl Match { + fn empty(leftover: [u8]) -> Self { + Match { succeeded: true, match_ends: 0, leftover } + } +} + +impl Eq for Match { + fn eq(self, other: Self) -> bool { + (self.succeeded == other.succeeded) & + (self.match_ends == other.match_ends) + // (self.leftover == other.leftover) + } +} + +// TODO: load match into str and assert that it's the correct length +// impl From for str + +trait Regex { + fn match(self, input: [u8]) -> Match; +} + +// Empty +impl Regex for () { + fn match(_self: Self, input: [u8]) -> Match { + Match::empty(input) + } +} + +// Exact +impl Regex for str { + fn match(self, input: [u8]) -> Match { + let mut leftover = input; + let mut matches_input = true; + let self_as_bytes = self.as_bytes(); + for c in self_as_bytes { + if leftover.len() != 0 { + let (first_elem, popped_slice) = leftover.pop_front(); + leftover = popped_slice; + matches_input &= first_elem == c; + } else { + matches_input = false; + } + } + if matches_input { + Match { + succeeded: true, + match_ends: self_as_bytes.len(), + leftover, + } + } else { + Match { + succeeded: false, + match_ends: 0, + leftover: input, + } + } + } +} + +// And +impl Regex for (T, U) where T: Regex, U: Regex { + fn match(self, input: [u8]) -> Match { + let lhs_result = self.0.match(input); + if lhs_result.succeeded { + let rhs_result = self.1.match(lhs_result.leftover); + if rhs_result.succeeded { + Match { + succeeded: true, + match_ends: lhs_result.match_ends + rhs_result.match_ends, + leftover: rhs_result.leftover, + } + } else { + Match { + succeeded: false, + match_ends: 0, + leftover: input, + } + } + } else { + Match { + succeeded: false, + match_ends: 0, + leftover: input, + } + } + } +} + +// N T's: (T, (T, (T, T))) +struct Repeated { + inner: T, +} + +impl Regex for Repeated where T: Regex { + fn match(self, input: [u8]) -> Match { + let mut result = Match::empty(input); + for _ in 0..N { + if result.succeeded { + let next_result = self.inner.match(result.leftover); + result = Match { + succeeded: next_result.succeeded, + match_ends: result.match_ends + next_result.match_ends, + leftover: next_result.leftover, + }; + } + } + result + } +} + +struct Or { + lhs: T, + rhs: U, +} + +impl Regex for Or where T: Regex, U: Regex { + fn match(self, input: [u8]) -> Match { + let lhs_result = self.lhs.match(input); + if lhs_result.succeeded { + lhs_result + } else { + self.rhs.match(input) + } + } +} + +struct Question { + inner: T, +} + +impl Regex for Question where T: Regex { + fn match(self, input: [u8]) -> Match { + Or { + lhs: self.inner, + rhs: (), + }.match(input) + } +} + +// 0 <= num_matches <= N +struct Star { + inner: T, +} + +impl Regex for Star where T: Regex { + fn match(self, input: [u8]) -> Match { + let regex: Repeated<_, N> = Repeated { + inner: Question { inner: self.inner }, + }; + regex.match(input) + } +} + +// 0 < num_matches <= N +struct Plus { + inner: T, +} + +impl Regex for Plus where T: Regex { + fn match(self, input: [u8]) -> Match { + std::static_assert(N_PRED + 1 == N, "N - 1 != N_PRED"); + let star: Star = Star { inner: self.inner }; + ( + self.inner, + star + ).match(input) + } +} + +fn main() { + // gr(a|e)y + let graey_regex = ("gr", (Or { lhs: "a", rhs: "e" }, "y")); + + // NOTE: leftover ignored in Eq: Match + let result = graey_regex.match("gray".as_bytes().as_slice()); + println(result); + assert_eq(result, Match { succeeded: true, match_ends: 4, leftover: &[] }); + + // NOTE: leftover ignored in Eq: Match + let result = graey_regex.match("grey".as_bytes().as_slice()); + println(result); + assert_eq(result, Match { succeeded: true, match_ends: 4, leftover: &[] }); + + // colou?r + let colour_regex = ("colo", (Question { inner: "u" }, "r")); + + let result = colour_regex.match("color".as_bytes().as_slice()); + println(result); + assert_eq(result, Match { succeeded: true, match_ends: 5, leftover: &[] }); + + let result = colour_regex.match("colour".as_bytes().as_slice()); + println(result); + assert_eq(result, Match { succeeded: true, match_ends: 6, leftover: &[] }); + + // parse the empty string three times + // EMPTY{3} + let three_empties_regex: Repeated<(), 3> = Repeated { inner: () }; + + let result = three_empties_regex.match("111".as_bytes().as_slice()); + println(result); + assert_eq(result, Match { succeeded: true, match_ends: 0, leftover: &[] }); + + // 1{0} + let zero_ones_regex: Repeated, 0> = Repeated { inner: "1" }; + + let result = zero_ones_regex.match("111".as_bytes().as_slice()); + println(result); + assert_eq(result, Match { succeeded: true, match_ends: 0, leftover: &[] }); + + // 1{1} + let one_ones_regex: Repeated, 1> = Repeated { inner: "1" }; + + let result = one_ones_regex.match("111".as_bytes().as_slice()); + println(result); + assert_eq(result, Match { succeeded: true, match_ends: 1, leftover: &[] }); + + // 1{2} + let two_ones_regex: Repeated, 2> = Repeated { inner: "1" }; + + let result = two_ones_regex.match("111".as_bytes().as_slice()); + println(result); + assert_eq(result, Match { succeeded: true, match_ends: 2, leftover: &[] }); + + // 1{3} + let three_ones_regex: Repeated, 3> = Repeated { inner: "1" }; + + let result = three_ones_regex.match("1111".as_bytes().as_slice()); + println(result); + assert_eq(result, Match { succeeded: true, match_ends: 3, leftover: &[] }); + // TODO(https://github.com/noir-lang/noir/issues/5462): re-enable these cases and complete the test using array_regex below + // + // // 1* + // let ones_regex: Star, 5> = Star { inner: "1" }; + // + // let result = ones_regex.match("11000".as_bytes().as_slice()); + // println(result); + // assert_eq(result, Match { succeeded: true, match_ends: 2, leftover: &[] }); + // + // let result = ones_regex.match("11".as_bytes().as_slice()); + // println(result); + // assert_eq(result, Match { succeeded: true, match_ends: 2, leftover: &[] }); + // + // let result = ones_regex.match("111111".as_bytes().as_slice()); + // println(result); + // assert_eq(result, Match { succeeded: true, match_ends: 5, leftover: &[] }); + // + // + // // 1+ + // let nonempty_ones_regex: Plus, 5, 4> = Plus { inner: "1" }; + // + // let result = nonempty_ones_regex.match("111111".as_bytes().as_slice()); + // println(result); + // assert_eq(result, Match { succeeded: true, match_ends: 5, leftover: &[] }); + // + // // 2^n-1 in binary: 1+0 + // let pred_pow_two_regex = (nonempty_ones_regex, "0"); + // + // let result = pred_pow_two_regex.match("1110".as_bytes().as_slice()); + // println(result); + // assert_eq(result, Match { succeeded: true, match_ends: 3, leftover: &[] }); + // + // // (0|1)* + // let binary_regex: Star, str<1>>, 5> = Star { inner: Or { lhs: "0", rhs: "1" } }; + // + // let result = binary_regex.match("110100".as_bytes().as_slice()); + // println(result); + // assert_eq(result, Match { succeeded: true, match_ends: 5, leftover: &[] }); + // + // // even numbers in binary: 1(0|1)*0 + // let even_binary_regex = ("1", (binary_regex, "0")); + // + // let result = even_binary_regex.match("1111110".as_bytes().as_slice()); + // println(result); + // assert_eq(result, Match { succeeded: true, match_ends: 6, leftover: &[] }); + // 2-letter capitalized words: [A-Z][a-z] + // numbers: \d+ + // [0-9]+ + // words: \w+ + // [a-Z]+ + // adapted URL parser: (https?:\/\/)?([\da-z.\-]+)\.([a-z.]+)([\/\w \.\-]*)*\/? + // // panics (at compile time) when input string is too short + // let foo_regex = ( + // "colo", + // ( + // Question { + // inner: "u", + // }, + // "r" + // ) + // ); + // + // let result = foo_regex.match("colo".as_bytes().as_slice()); + // println(result); + // assert_eq(result, Match { + // succeeded: true, + // match_ends: 4, + // leftover: &[], + // }); +} + +// array_regex: use to complete test once https://github.com/noir-lang/noir/issues/5462 is resolved +// +// // offset <= len <= N +// struct Bvec { +// inner: [T; N], +// +// // elements at indices < offset are zero +// offset: u32, +// +// // elements at indices >= len are zero +// len: u32, +// } +// +// impl Eq for Bvec where T: Eq { +// fn eq(self, other: Self) -> bool { +// (self.inner == other.inner) & +// (self.offset == other.offset) & +// (self.len == other.len) +// } +// } +// +// impl Bvec { +// fn empty() -> Self { +// Self { inner: [std::unsafe::zeroed(); N], offset: 0, len: 0 } +// } +// +// fn new(array: [T; N]) -> Self { +// let mut result = Bvec::empty(); +// for x in array { +// result = result.push(x); +// } +// result +// } +// +// // pushing when len == N is a no-op +// fn push(self, x: T) -> Self { +// let mut inner = self.inner; +// let mut len = self.len; +// if self.len < N { +// inner[self.len] = x; +// len += 1; +// } +// +// Self { inner, offset: self.offset, len } +// } +// +// fn pop_front(self) -> (T, Self) { +// assert(self.offset <= self.inner.len()); +// assert(self.len != 0); +// +// let first_elem = self.inner[self.offset]; +// let popped_slice = Self { inner: self.inner, offset: self.offset + 1, len: self.len - 1 }; +// +// (first_elem, popped_slice) +// } +// } +// +// struct Match { +// succeeded: bool, +// match_ends: u32, +// leftover: Bvec, +// } +// +// impl Match { +// fn empty(leftover: Bvec) -> Self { +// Match { succeeded: true, match_ends: 0, leftover } +// } +// +// fn failed(leftover: Bvec) -> Self { +// Match { succeeded: false, match_ends: 0, leftover } +// } +// } +// +// impl Eq for Match { +// fn eq(self, other: Self) -> bool { +// (self.succeeded == other.succeeded) & +// (self.match_ends == other.match_ends) & +// (self.leftover == other.leftover) +// } +// } +// +// // TODO: load match into str and assert that it's the correct length +// // impl From for str +// +// trait Regex { +// // Perform a match without backtracking +// fn match(self, input: Bvec) -> Match; +// } +// +// // Empty +// impl Regex for () { +// fn match(_self: Self, input: Bvec) -> Match { +// Match::empty(input) +// } +// } +// +// // Exact +// impl Regex for str { +// fn match(self, input: Bvec) -> Match { +// let mut leftover = input; +// let mut matches_input = true; +// let self_as_bytes = self.as_bytes(); +// for c in self_as_bytes { +// if leftover.len != 0 { +// let (first_elem, popped_slice) = leftover.pop_front(); +// leftover = popped_slice; +// matches_input &= first_elem == c; +// } else { +// matches_input = false; +// } +// } +// if matches_input { +// Match { +// succeeded: true, +// match_ends: self_as_bytes.len(), +// leftover, +// } +// } else { +// Match { +// succeeded: false, +// match_ends: 0, +// leftover: input, +// } +// } +// } +// } +// +// // And +// impl Regex for (T, U) where T: Regex, U: Regex { +// fn match(self, input: Bvec) -> Match { +// let lhs_result = self.0.match(input); +// if lhs_result.succeeded { +// let rhs_result = self.1.match(lhs_result.leftover); +// if rhs_result.succeeded { +// Match { +// succeeded: true, +// match_ends: lhs_result.match_ends + rhs_result.match_ends, +// leftover: rhs_result.leftover, +// } +// } else { +// Match { +// succeeded: false, +// match_ends: 0, +// leftover: input, +// } +// } +// } else { +// Match { +// succeeded: false, +// match_ends: 0, +// leftover: input, +// } +// } +// } +// } +// +// // N T's: (T, (T, (T, T))) +// struct Repeated { +// inner: T, +// } +// +// impl Regex for Repeated where T: Regex { +// fn match(self, input: Bvec) -> Match { +// let mut result = Match::empty(input); +// for _ in 0..M { +// if result.succeeded { +// let next_result = self.inner.match(result.leftover); +// result = Match { +// succeeded: next_result.succeeded, +// match_ends: result.match_ends + next_result.match_ends, +// leftover: next_result.leftover, +// }; +// } +// } +// result +// } +// } +// +// struct Or { +// lhs: T, +// rhs: U, +// } +// +// impl Regex for Or where T: Regex, U: Regex { +// fn match(self, input: Bvec) -> Match { +// let lhs_result = self.lhs.match(input); +// if lhs_result.succeeded { +// lhs_result +// } else { +// self.rhs.match(input) +// } +// } +// } +// +// struct Question { +// inner: T, +// } +// +// impl Regex for Question where T: Regex { +// fn match(self, input: Bvec) -> Match { +// Or { +// lhs: self.inner, +// rhs: (), +// }.match(input) +// } +// } +// +// // 0 <= num_matches <= N +// struct Star { +// inner: T, +// } +// +// impl Regex for Star where T: Regex { +// fn match(self, input: Bvec) -> Match { +// let regex: Repeated<_, M> = Repeated { +// inner: Question { inner: self.inner }, +// }; +// regex.match(input) +// } +// } +// +// // 0 < num_matches <= N +// struct Plus { +// inner: T, +// } +// +// impl Regex for Plus where T: Regex { +// fn match(self, input: Bvec) -> Match { +// std::static_assert(M_PRED + 1 == M, "M - 1 != M_PRED"); +// let star: Star = Star { inner: self.inner }; +// ( +// self.inner, +// star +// ).match(input) +// } +// } +// +// // Repeated is to (,) as AnyOf is to Or +// struct AnyOf { +// inner: [T; N], +// } +// +// impl Regex for AnyOf where T: Regex { +// fn match(self, input: Bvec) -> Match { +// let mut result = Match::failed(input); +// for i in 0..M { +// if !result.succeeded { +// result = self.inner[i].match(result.leftover); +// } +// } +// result +// } +// } +// +// fn reverse_array(input: [T; N]) -> [T; N] { +// let mut output = [std::unsafe::zeroed(); N]; +// for i in 0..N { +// output[i] = input[N - (i + 1)]; +// } +// output +// } +// +// fn main() { +// assert_eq(reverse_array([1, 2, 3, 4]), [4, 3, 2, 1]); +// +// let mut xs: Bvec = Bvec::empty(); +// +// xs = xs.push(0); +// assert_eq(xs, Bvec { inner: [0, 0, 0], offset: 0, len: 1 }); +// +// xs = xs.push(1); +// assert_eq(xs, Bvec { inner: [0, 1, 0], offset: 0, len: 2 }); +// +// xs = xs.push(2); +// assert_eq(xs, Bvec { inner: [0, 1, 2], offset: 0, len: 3 }); +// +// xs = xs.push(3); +// assert_eq(xs, Bvec { inner: [0, 1, 2], offset: 0, len: 3 }); +// +// let ys = Bvec::new([0, 1, 2]); +// assert_eq(xs, ys); +// +// // test that pop_front gives all contents, in order, +// // followed by std::unsafe::zeroed() +// println(xs); +// let (x, new_xs) = xs.pop_front(); +// assert_eq(x, 0); +// +// xs = new_xs; +// println(xs); +// let (x, new_xs) = xs.pop_front(); +// assert_eq(x, 1); +// +// xs = new_xs; +// println(xs); +// let (x, new_xs) = xs.pop_front(); +// assert_eq(x, 2); +// +// xs = new_xs; +// println(xs); +// if xs.len != 0 { +// let (x, _new_xs) = xs.pop_front(); +// assert_eq(x, std::unsafe::zeroed()); +// } +// +// assert_eq(new_xs, Bvec { inner: [0, 1, 2], offset: 3, len: 0 }); +// +// // gr(a|e)y +// let graey_regex = ("gr", (Or { lhs: "a", rhs: "e" }, "y")); +// +// let result = graey_regex.match(Bvec::new("gray".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 4); +// assert_eq(result.leftover.len, 0); +// +// let result = graey_regex.match(Bvec::new("grey".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 4); +// assert_eq(result.leftover.len, 0); +// +// // colou?r +// let colour_regex = ("colo", (Question { inner: "u" }, "r")); +// +// let result = colour_regex.match(Bvec::new("color".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 5); +// assert_eq(result.leftover.len, 0); +// +// let result = colour_regex.match(Bvec::new("colour".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 6); +// assert_eq(result.leftover.len, 0); +// +// // parse the empty string three times +// // EMPTY{3} +// let three_empties_regex: Repeated<(), 3> = Repeated { inner: () }; +// +// let result = three_empties_regex.match(Bvec::new("111".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 0); +// assert_eq(result.leftover.len, 3); +// +// // 1{0} +// let zero_ones_regex: Repeated, 0> = Repeated { inner: "1" }; +// +// let result = zero_ones_regex.match(Bvec::new("111".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 0); +// assert_eq(result.leftover.len, 3); +// +// // 1{1} +// let one_ones_regex: Repeated, 1> = Repeated { inner: "1" }; +// +// let result = one_ones_regex.match(Bvec::new("111".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 1); +// assert_eq(result.leftover.len, 2); +// +// // 1{2} +// let two_ones_regex: Repeated, 2> = Repeated { inner: "1" }; +// +// let result = two_ones_regex.match(Bvec::new("111".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 2); +// assert_eq(result.leftover.len, 1); +// +// // 1{3} +// let three_ones_regex: Repeated, 3> = Repeated { inner: "1" }; +// +// let result = three_ones_regex.match(Bvec::new("1111".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 3); +// assert_eq(result.leftover.len, 1); +// +// // 1* +// let ones_regex: Star, 5> = Star { inner: "1" }; +// +// let result = ones_regex.match(Bvec::new("11000".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 2); +// assert_eq(result.leftover.len, 3); +// +// let result = ones_regex.match(Bvec::new("11".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 2); +// assert_eq(result.leftover.len, 0); +// +// let result = ones_regex.match(Bvec::new("111111".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 5); +// assert_eq(result.leftover.len, 1); +// +// // 1+ +// let nonempty_ones_regex: Plus, 5, 4> = Plus { inner: "1" }; +// +// let result = nonempty_ones_regex.match(Bvec::new("111111".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 5); +// assert_eq(result.leftover.len, 1); +// +// // 2^n-1 in binary: 1+0 +// let pred_pow_two_regex = (nonempty_ones_regex, "0"); +// +// let result = pred_pow_two_regex.match(Bvec::new("1110".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 4); +// assert_eq(result.leftover.len, 0); +// +// // (0|1)* +// let binary_regex: Star, str<1>>, 5> = Star { inner: Or { lhs: "0", rhs: "1" } }; +// +// let result = binary_regex.match(Bvec::new("110100".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 5); +// assert_eq(result.leftover.len, 1); +// +// // even numbers in binary: 1(0|1)*0 +// let even_binary_regex = ("1", (binary_regex, "0")); +// +// let result = even_binary_regex.match(Bvec::new("1111110".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 7); +// assert_eq(result.leftover.len, 0); +// +// // digit: \d+ +// // [0-9] +// let digit_regex = AnyOf { +// inner: [ +// "0", +// "1", +// "2", +// "3", +// "4", +// "5", +// "6", +// "7", +// "8", +// "9" +// ] +// }; +// +// let result = digit_regex.match(Bvec::new("157196345823795".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 1); +// assert_eq(result.leftover.len, 14); +// +// let result = digit_regex.match(Bvec::new("hi".as_bytes())); +// println(result); +// assert(!result.succeeded); +// assert_eq(result.match_ends, 0); +// assert_eq(result.leftover.len, 2); +// +// // digits: \d+ +// // [0-9]+ +// let digits_regex: Plus, 10>, 32, 31> = Plus { inner: digit_regex }; +// +// let result = digits_regex.match(Bvec::new("123456789012345".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 15); +// assert_eq(result.leftover.len, 0); +// +// let result = digits_regex.match(Bvec::new("123456789012345 then words".as_bytes())); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 15); +// assert_eq(result.leftover.len, 11); +// +// // multiples of 10 +// // apply to a reversed input string (because there isn't backtracking) +// // 0\d+ +// let backwards_mult_of_10_regex = ("0", digits_regex); +// +// let result = backwards_mult_of_10_regex.match(Bvec::new(reverse_array("1230".as_bytes()))); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 4); +// assert_eq(result.leftover.len, 0); +// +// let ten_pow_16: str<17> = "10000000000000000"; +// let result = backwards_mult_of_10_regex.match(Bvec::new(reverse_array(ten_pow_16.as_bytes()))); +// println(result); +// assert(result.succeeded); +// assert_eq(result.match_ends, 17); +// assert_eq(result.leftover.len, 0); +// // adapted URL parser: (https?:\/\/)?([\da-c.\-]+)\.([a-c.]+)([\/\w \.\-]*)*\/? +// } + diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml deleted file mode 100644 index fc5e6002dbf..00000000000 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml +++ /dev/null @@ -1,4 +0,0 @@ -key_hash = "0x096129b1c6e108252fc5c829c4cc9b7e8f0d1fd9f29c2532b563d6396645e08f" -proof = ["0x0000000000000000000000000000000000000000000000000000000000000010","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000001","0x00000000000000000000000000000079ea57b3d7247e1b84fc1ab449de746345","0x000000000000000000000000000000000023fb17d477c91e0fb057233a66ef2a","0x000000000000000000000000000000146353d3faf24455819947aa0a25868174","0x00000000000000000000000000000000000093b1c637419c9f016bb0261cdfc6","0x000000000000000000000000000000325b128a84544d31fa1c577232c742b574","0x00000000000000000000000000000000002b3db93a2fca4c31308471d4f55fa2","0x00000000000000000000000000000054d9d87932eee6280c37d802ec8d47ca02","0x000000000000000000000000000000000000397167bb1e36d061487e93e4d97e","0x000000000000000000000000000000143b0960a1b9f19a44ad1cf2b7059832d6","0x0000000000000000000000000000000000158446576b2d43f78b48799ff7e760","0x000000000000000000000000000000cf640bad8ccc1890d738ab917d6caa957e","0x00000000000000000000000000000000001d6fd185d8771b864545438c6a1d68","0x000000000000000000000000000000a33cd928d0d4c7f244824b63b15f4c5423","0x00000000000000000000000000000000000433ccd872d2a302104048474e0bea","0x000000000000000000000000000000eaf7d13e5e9706e1b8a9343bd493a060af","0x00000000000000000000000000000000001a062842ba351b311ae52693f5114e","0x000000000000000000000000000000a33cd928d0d4c7f244824b63b15f4c5423","0x00000000000000000000000000000000000433ccd872d2a302104048474e0bea","0x000000000000000000000000000000eaf7d13e5e9706e1b8a9343bd493a060af","0x00000000000000000000000000000000001a062842ba351b311ae52693f5114e","0x000000000000000000000000000000160d90f214f524875c01cb9cf0f2d272b9","0x000000000000000000000000000000000015d5f906c4fe06017b0f9824434d09","0x0000000000000000000000000000007fc2db3cfe49b7666aeafd8cf6973c9fed","0x00000000000000000000000000000000000c7fc1e545a8ee19a7bc6ad6f2ea47","0x000000000000000000000000000000fc3c9df244afbba117cd897a4c929edb84","0x0000000000000000000000000000000000216f0c3a2e5e8683d9717ad40eadde","0x000000000000000000000000000000c381b45048aa5163e0129e4031e29058cb","0x00000000000000000000000000000000002f11022de88492201c28f87582684d","0x000000000000000000000000000000c98462e08c423124d92a41110c378db160","0x00000000000000000000000000000000000106dafb059575ec9b926aa90edfef","0x0000000000000000000000000000007d0cc0465628f6b0f3918aa9d7cf33ff38","0x00000000000000000000000000000000002cff01344fc7c6f81399b7ae660ad4","0x07eff01a06f356d255515e5f27cb51e8873277beb3f986c215181b475df4dd8e","0x28745e58da3e495762fee75759b60674a1017089c5bfe9cf2ec9da4c920b2273","0x1d5b7b751e509ac70caa253595be4523d1963cf7bd6751d2c146e2fc10d00196","0x26fe27f73b55be7d49b4c1c11f085f47f6a241ba5ea0d48b47964e7adf5e8e5a","0x239206c519de2576a554a70f387cdf5d525a599541be2ecd9260e52d572ae07c","0x04e35b29a57c31c89c72a6387bf89613b64c2827e0c2402b8dfb2c1cfea0c878","0x1e8398c5dd85d15154110c2480f2249030aecd7595242ae86bbdf7b2730ca070","0x2ba9986a038e85a4dd96badffb6a44950c37360fd6e8ec6c4b9647377bcb45f5","0x27ca7a06ceea23d329c52dac8c0715440238d37362ab0fb1e26544b18bb79a3b","0x23b768d51fa7922f8292309455adc5730b8964818c328a42dff60a57add32f50","0x24e8634d5381475abe5821450299d9d8d725a472610fe265e44c8360c4708c95","0x0cdbb73fe5c035427113e66a15b8c41e963ae215e491d855a3ce8c3ab200fb3b","0x0e8acd2ed6af85e4f71b96c51d2a57bceea5c50fb405b7888359f0635b415da7","0x2914cc0244acf5ac6d674d3c96d543ee2f3e95d0248ee66daf0cf2932107e300","0x00ff0384250d2c2e59cd1cf58cebd1d3b1ebab7989eb2eaa6b6bbce69f9e8ba0","0x253f7a5007d47d3d858fc0e172c971cb54f97cea5c63ca60efe61589913b2499","0x2d34704fc711dabe0f716dbebc5dfd0eaa5667006847d333dadc86e15bf672c0","0x0bdd67ff40c61242e46a234c0d438663a9ccae833d1e0b22833ffe41e2828bb4","0x04c7ba2edccfb340eba0c94a7a5d5d53b010939621053c7c0fd27f2ba4b08273","0x0c3f68e6de8042a10098596e80ea79882b37d22c6a6adaa64f5c668739932fa5","0x14bcb10845b45cb8fdcac13e41ad755f6d966756ee2f3d4ed8a5791d4b345ea8","0x0dd68c1e3d122d4d4b28a8ac7e6a592146afe70e3852906c27ccc7e345f745e2","0x06816aff04192007cb2b3ed2cee4b22e044ced0199b136942348ced61990c1a7","0x3013f13664687bc3cbe26314f17cf309486ef71ffb55ce2589075554fc31ee69","0x1941a602d47af0e52f06a272998b6a59313f316508c0778714a36d7bb4f5669b","0x268750f15f2ac995d1d59859b7d636ae814e751b84318218ac1ce75a14b00e18","0x2aaff14fd98aa13ffdf34e3f689e16b2e8cb7695db9a014dd270b021968e3bb2","0x090087ad0d688396823bbd90a8770c1101e5907efd1c4fbafff8a1e9f2f84d89","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x152deae3a77021b0201a74d98b30d842baea62c0d2531d69d5e866e59f48e052","0x084acb08dc53880864266b6dea02ec7a85ffab2ab590ba9a4adb32ad2c8ffe53","0x1b8ab1a2e47a839fdbf19d2cbea2abe79c57722270123cf96289a11e233cd175","0x03493f800f9abbe4e405f0f637f41f22dcc10e44e836a09115ed5821cd5856e6","0x24c358e686e47c512bbec4a1b9ac562c251e004ea142df44ea3b18cf214baa47","0x18296076ac89be1c4c24a04553be7bd07bba5a41d1c33de2bec14cfd1262ab9f","0x0e30341606dc2577a451251241394b3871e9db0e1758d250d36348bcbb8b6fdb","0x15f846978484540ac3c35eee38ccd980f01e8bda6050a645c4abca6f22b24619","0x2735dd2b603cde2937bf842002e8704ef1e3883d2d0a6e999dd7015496c10302","0x23c47d9891d04bdb88ca239119e423afdc6d2bd45fb92f5f19b8b0a9583fc863","0x1ce47f9088eecc7268d4558aa02a4902282bccaacbe882917cc57969af2236d0","0x2b5a6f937fcc921cced568de248e19fd3801e688505ee44af6499e14885c9879","0x2ae2f654890e7018bae8308b5a97230cdcd3b63b24334a05dc4fdc4107cff73d","0x06a87313997c2a5318a8ce0f75e26b9c4a2a83bd9c3578f10d1c1f3bfded8f29","0x0afe95fddb76f390d58e15b7e647e9ed083a66aa7829a18963125d865b64ef7f","0x1ff7ecaf04f4e8a9d57f79c85dd963099f6005f542df7c20505af69061473114","0x26ca489f39024294da78a601feda0a17c40d46e2c7d0787b47dc0afaf027a8c8","0x2da37034033c950b2f85c32be2b0f1102dae5ec01e13681ffc9a9a3033469a8d","0x22c35dc92f5bf1cb569ad756b45208ffa8a85d825ebacf8e7354e9162651d1fa","0x0e443f72c90fec92786098f7ec90cea01f6202db6998b34dbb1e7b0293f4bebd","0x049684508bb0af0f27bcaaf96aa53eac25a425e159eb33e031db157d63c22fb9","0x20d990716bfec57f52f603d50d0d81c4c851bfc231894eb573fa54f2ac70c9dd","0x1fd19e900621d01488be88d4a6d95c2583c19c6d1d49e8cd139bce76051b71bc","0x1679a31a104b20b301737b9214f12a0707727bd4510d5a53e5bec1321816cdfa","0x27b3d8000581372f35039477c28a268065b3717dbd9337c06a82162781e0d131","0x23b79b53bdb698ef8c7c01afaf3350deb78b5e841e09b13b6ef86fc68f97bcab","0x1d4abc42698589c40b05e187c12af268fffe64010756a8d08ea817105305a770","0x0f744ca06905efa1f604f387284979be483c00ee9298134e7337bd8bb4a88933","0x0be6790122704c6ed4c37fef0b524b413e63b88c2dadbe67a5ba039cf11cc628","0x19fa34479d41d734a17619048627633807d706b2b1035a326efada8f3e8eb183","0x1b208f5cc663a9560e8685c351cb17b8e5862eb16f1407cf654e8ffae331aa9b","0x1b140725b61fe2e1057d72525aecf1d319ecb509a392f68e4058d13cea209993","0x1b140725b61fe2e1057d72525aecf1d319ecb509a392f68e4058d13cea209993","0x0d1703eac9b276094d72a50322dd82033960a6f1f6176aa2b029e39a1375bb51","0x09ba2a48cfdcc27f6b6db2ca277c5016d4f5a177e65eec6f68e30a67d4b06c1b","0x0e243bf8b2d6b8e46ed75902fe60781b2b41cf45287f367df850ce50de7f86af","0x1be244289270e4c0dc8517edfe335954fa7b56c3bf6fe06bc2d268f7db7a68ee","0x116ef1bfcfbca0612c92872aa3d07d32cb0b9716b1ba735846888a56e03c6207","0x0de8a7471ceb058680d2e0afa73e3dd843b527db1c16ebfaf8612447ffbee858","0x16911fee4120f72d81b0dfb0eeeb7380611119ee990daec5669b711cb35e2756","0x1c278b26a16e1ee7e21a71b67a31cb0b9907dae80776aa1dc7094ea5b4e2c34e","0x0f5c67db668b1f1880c51f805ec3d40aa27d34b4c8833f755d4466c285264399","0x000000000000000000000000000000dc2546d68fbe5a4913dde8ed73f673bc5f","0x00000000000000000000000000000000001310657525d78319e5b15c92398dcf","0x0000000000000000000000000000000fde9a035776897ed560b4d9ae338b5f85","0x00000000000000000000000000000000000f84fecfb3ea28426f114d9de93cb3","0x000000000000000000000000000000d3ea685110f3ff69bf91cc32cc5170b62e","0x0000000000000000000000000000000000179205f5ebaf3eaf5d50be462f830d","0x00000000000000000000000000000024a7284c15d725d62b8f5c1090b08b58b7","0x00000000000000000000000000000000002b6fdb2139f7b9443cbd82e6423486","0x00000000000000000000000000000006489f49eed3370ee31c80590eed2d0c3a","0x000000000000000000000000000000000010c11c3a122e00a12e0cf7a58d81ae","0x000000000000000000000000000000eb2d1eef7e7c7c0c054859600d264176e9","0x000000000000000000000000000000000028ac3239a0917c7c3761e11fbf9541","0x0000000000000000000000000000006ecbe6a2ccf0c9e1b743a84e1540796b81","0x0000000000000000000000000000000000098a99a81cbc111660301a03f77d96","0x000000000000000000000000000000c4f256019891f39b00b1b00428b3a154a5","0x00000000000000000000000000000000001bc2f83790ff1d3086273e4560135c","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x000000000000000000000000000000868795ebcbf38bffa96f455a314c7b9310","0x00000000000000000000000000000000002e43e0a550d7cce874e869ed0ef545","0x0000000000000000000000000000001e5a780edfd01526758b69bfaf25803f67","0x00000000000000000000000000000000000f0991f4b5dc348354f019ecc66502","0x000000000000000000000000000000cb917b7819afd60fc86ea477594ffca008","0x000000000000000000000000000000000002beaa7c144fc6620870e72ee8064c","0x000000000000000000000000000000b7f4dfed23506dadd1726a896e226d7a34","0x00000000000000000000000000000000001bb28f2fcfb40843aa5f5e38d689e1"] -public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] -verification_key = ["0x0000000000000000000000000000000000000000000000000000000000000010","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000008c068dccb0e55d4b56c7c32ebfafeb5b02","0x0000000000000000000000000000000000266c985199590a284abf77ba01c36e","0x00000000000000000000000000000044fb25551548bb4059099673aed8646b35","0x000000000000000000000000000000000023ab8c745db114ee56b97a8aa27934","0x000000000000000000000000000000a56563b42599f0e4ad5455a8fd988b9ff3","0x00000000000000000000000000000000000a94e3640063f3f4758fcfe457d298","0x0000000000000000000000000000008a51861ca043ceae044d6ab4f136331514","0x00000000000000000000000000000000001812e50744ac8ed3cc4a9594a701cc","0x000000000000000000000000000000b911c8cf179747b410c1cb9fd8a8bde095","0x00000000000000000000000000000000001826edb9faf624498fe82f5a61008d","0x000000000000000000000000000000ed158ea534a9c72ec9e614906fd7adff9a","0x000000000000000000000000000000000017cb9637e464dc2647b9b8688c5fa0","0x0000000000000000000000000000004b5064dd55e5ec8cd9bdd01c0e22eb7122","0x00000000000000000000000000000000002c7cff0caa8ba3fec7523dcbc934a8","0x000000000000000000000000000000f268df76bf0d78739ded43daba9c339499","0x00000000000000000000000000000000002e11974b75c78b276ae16219b99dc9","0x000000000000000000000000000000cfc293980c0ecf813f4f1436ff140740c3","0x000000000000000000000000000000000016ff2972a7eedf8ff27f494904fa47","0x00000000000000000000000000000085a92cc2b6efec726ea10710b20776ee70","0x0000000000000000000000000000000000278709e98b64a3553dc3e6e514e7ff","0x0000000000000000000000000000004391d81714b7d7ad40642b9308d02258b4","0x0000000000000000000000000000000000207710f769c857fbe624a2333097b2","0x0000000000000000000000000000002f767ee4790206ca5c193b742aa672d6d8","0x00000000000000000000000000000000001044cdbbd63806d10426ca4cb77cbc","0x000000000000000000000000000000314be7aecd2a710b8966befe7c0b08f574","0x00000000000000000000000000000000000558190b4fa7d726895b6d7d9c0bef","0x000000000000000000000000000000d64f3a11faf61b8776b0e778ab7a16c09c","0x00000000000000000000000000000000000d1c3d5e8fe0193b17834424ce605d","0x000000000000000000000000000000d8019ded441b9e454eb4045069cefee487","0x00000000000000000000000000000000002c066d46d386975a57df073e19403b","0x0000000000000000000000000000006bf779063abc501d4102fbfc99d4227c16","0x00000000000000000000000000000000001bbf8b9e8c4b2184984b994c744d21","0x0000000000000000000000000000003896ea793e6b3f6a14218d476534109610","0x00000000000000000000000000000000000e84090add56f2500ab518c655cae6","0x00000000000000000000000000000065df446fdddba972f3c4414ad3c901f4f9","0x00000000000000000000000000000000002b78a584bd6ae88cf4ec7c65c90e0b","0x00000000000000000000000000000094e611b5d59a27773f744710b476fbd30f","0x00000000000000000000000000000000001bd6129f9646aa21af0d77e7b1cc97","0x000000000000000000000000000000139a9d1593d56e65e710b2f344756b721e","0x00000000000000000000000000000000002f8d492d76a22b6834f0b88e2d4096","0x00000000000000000000000000000026c814cd7c5e1ba2094969bb1d74f1c66b","0x000000000000000000000000000000000013129f0714c3307644809495e01504","0x0000000000000000000000000000007d4549a4df958fe4825e7cb590563154ab","0x00000000000000000000000000000000000e7d5873232b1bdd0ce181513b47d1","0x000000000000000000000000000000a54541a8f32c0d9f8645edf17aac8fa230","0x00000000000000000000000000000000001e0677756494ded8010e8ef02518b2","0x0000000000000000000000000000008b101700e2d4f9116b01bfaaf3c458a423","0x0000000000000000000000000000000000021e43a3c385eba62bcc47aad7b9ea","0x00000000000000000000000000000099559d1c1ed6758494d18b9890bb5e3f97","0x00000000000000000000000000000000002e68b3c679543d2933bf9f7f77d422","0x000000000000000000000000000000c842dceb89f5cf4c130810f4802014a67f","0x00000000000000000000000000000000000d647daa6d2a8ac14f2da194b3a27e","0x000000000000000000000000000000af641be24f11d735581ad2e14787470194","0x00000000000000000000000000000000001e90f381ece8401026212fdbb26199","0x000000000000000000000000000000f601a4b716e755b0cf516d07e403265e27","0x00000000000000000000000000000000002d49d628876caa6993afe9fc30a764","0x0000000000000000000000000000008e9de4c6ce2e85105ec90ab63303b61502","0x00000000000000000000000000000000001b063563a7858b064132573e0aca86","0x00000000000000000000000000000021c200c8468139aa32fcf13fd1d8570828","0x0000000000000000000000000000000000023a4e744c62548c3b32986b3bc73a","0x0000000000000000000000000000000af941f79a4d93c6e9aad19c6049e1fa53","0x000000000000000000000000000000000003db2201f4b1b9a4d3646331e1f8e1","0x00000000000000000000000000000005d91fe16bd2b8dd3ce8b7d70ce6222b4f","0x0000000000000000000000000000000000102db0f3fd668e06f49d133d1bf994","0x0000000000000000000000000000009459915944c39a12b978a433efb6517d0f","0x00000000000000000000000000000000000b1c9fa9f4ce17e53f3acd13be4078","0x0000000000000000000000000000007c8d45be92476f8867dca4078fb7b6b2f8","0x00000000000000000000000000000000001f21afb9b7ccd5c404f0115253d2a6","0x0000000000000000000000000000004d78a34b40208c31be4fb8b39d23f1d1de","0x00000000000000000000000000000000000f3090488b19df76c4358537728d9a","0x00000000000000000000000000000060b0272756debcae50a25a3ee7d7095ea9","0x00000000000000000000000000000000002e84bca0d93b098853cca06147ec94","0x000000000000000000000000000000a0875603e0a017ce12ff79764af43e7421","0x0000000000000000000000000000000000245798a7b19502ba14b46eb68dc771","0x00000000000000000000000000000089b25e854077925674d0645ed1e784c929","0x000000000000000000000000000000000008b8347d14433adba1d9e9406eb1db","0x000000000000000000000000000000d0d3258758dfa9bae9e415f6d48d990e16","0x0000000000000000000000000000000000224948ddbcddb1e360efa2ac511aac","0x000000000000000000000000000000f6a101330e9f928dc80a3d3b9afefb373a","0x00000000000000000000000000000000001011627c159ab9f3ff0a0416a01df6","0x0000000000000000000000000000002ec420ad50087360c152c131400547bcc6","0x000000000000000000000000000000000018dab63316305864682bfe7b586e91","0x0000000000000000000000000000004bd9f352c132c7ae6bed5ea997693e6300","0x00000000000000000000000000000000001edb4d30542aa0ac4fe8eb31fc2ce0","0x0000000000000000000000000000008bcf42c24591e90cf41fc687829fe0b0aa","0x000000000000000000000000000000000027a49cd522a4fbbdfc8846331514de","0x000000000000000000000000000000bdfbf1d964fcfb887c3631ef202797fc2f","0x00000000000000000000000000000000001432caafa62e791082fd900fcb34a1","0x0000000000000000000000000000006f99a40f79f14ed78a291d53d0425ddc9d","0x000000000000000000000000000000000007ea92c2de0345ded1d25b237f0845","0x000000000000000000000000000000bc1328fa2c343da93cb98486d414f0a40a","0x0000000000000000000000000000000000255aeaa6894472e3cb6b0a790cf290","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000008775499e69e8bd2c39af33bd5fa0b4079a","0x0000000000000000000000000000000000024236bda126650fb5228cf424a087","0x000000000000000000000000000000b0eb1a867b06854066589b967455259b32","0x0000000000000000000000000000000000233cda9292be02cfa2da9d0fc7b0ea"] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr deleted file mode 100644 index a18403eba71..00000000000 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr +++ /dev/null @@ -1,21 +0,0 @@ - -// This circuit aggregates a single Honk proof from `assert_statement_recursive`. -global SIZE_OF_PROOF_IF_LOGN_IS_28 : u32 = 393; -fn main( - verification_key: [Field; 103], - // This is the proof without public inputs attached. - // - // This means: the size of this does not change with the number of public inputs. - proof: [Field; SIZE_OF_PROOF_IF_LOGN_IS_28], - public_inputs: pub [Field; 1], - // This is currently not public. It is fine given that the vk is a part of the circuit definition. - // I believe we want to eventually make it public too though. - key_hash: Field -) { - std::verify_proof( - verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), - key_hash - ); -} diff --git a/noir/noir-repo/tooling/lsp/src/lib.rs b/noir/noir-repo/tooling/lsp/src/lib.rs index c7b70339e1d..88aab65c6fa 100644 --- a/noir/noir-repo/tooling/lsp/src/lib.rs +++ b/noir/noir-repo/tooling/lsp/src/lib.rs @@ -236,38 +236,14 @@ fn byte_span_to_range<'a, F: files::Files<'a> + ?Sized>( } } -pub(crate) fn resolve_workspace_for_source_path( - file_path: &Path, - root_path: &Option, -) -> Result { - // If there's a LSP root path, starting from file_path go up the directory tree - // searching for Nargo.toml files. The last one we find is the one we'll use - // (we'll assume Noir workspaces aren't nested) - if let Some(root_path) = root_path { - let mut current_path = file_path; - let mut current_toml_path = None; - while current_path.starts_with(root_path) { - if let Some(toml_path) = find_file_manifest(current_path) { - current_toml_path = Some(toml_path); - - if let Some(next_path) = current_path.parent() { - current_path = next_path; - } else { - break; - } - } else { - break; - } - } - - if let Some(toml_path) = current_toml_path { - return resolve_workspace_from_toml( - &toml_path, - PackageSelection::All, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) - .map_err(|err| LspError::WorkspaceResolutionError(err.to_string())); - } +pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result { + if let Some(toml_path) = find_file_manifest(file_path) { + return resolve_workspace_from_toml( + &toml_path, + PackageSelection::All, + Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), + ) + .map_err(|err| LspError::WorkspaceResolutionError(err.to_string())); } let Some(parent_folder) = file_path @@ -313,7 +289,7 @@ pub(crate) fn prepare_package<'file_manager, 'parsed_files>( package: &Package, ) -> (Context<'file_manager, 'parsed_files>, CrateId) { let (mut context, crate_id) = nargo::prepare_package(file_manager, parsed_files, package); - context.track_references(); + context.activate_lsp_mode(); (context, crate_id) } @@ -334,7 +310,7 @@ fn prepare_source(source: String, state: &mut LspState) -> (Context<'static, 'st let parsed_files = parse_diff(&file_manager, state); let mut context = Context::new(file_manager, parsed_files); - context.track_references(); + context.activate_lsp_mode(); let root_crate_id = prepare_crate(&mut context, file_name); @@ -428,7 +404,7 @@ fn prepare_package_from_source_string() { let mut state = LspState::new(&client, acvm::blackbox_solver::StubbedBlackBoxSolver); let (mut context, crate_id) = crate::prepare_source(source.to_string(), &mut state); - let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false, None); + let _check_result = noirc_driver::check_crate(&mut context, crate_id, &Default::default()); let main_func_id = context.get_main_function(&crate_id); assert!(main_func_id.is_some()); } diff --git a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs index 24409e85db8..56aef90cfde 100644 --- a/noir/noir-repo/tooling/lsp/src/notifications/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/notifications/mod.rs @@ -37,15 +37,9 @@ pub(super) fn on_did_open_text_document( state.input_files.insert(params.text_document.uri.to_string(), params.text_document.text); let document_uri = params.text_document.uri; - let only_process_document_uri_package = false; let output_diagnostics = true; - match process_workspace_for_noir_document( - state, - document_uri, - only_process_document_uri_package, - output_diagnostics, - ) { + match process_workspace_for_noir_document(state, document_uri, output_diagnostics) { Ok(_) => { state.open_documents_count += 1; ControlFlow::Continue(()) @@ -62,15 +56,9 @@ pub(super) fn on_did_change_text_document( state.input_files.insert(params.text_document.uri.to_string(), text.clone()); let document_uri = params.text_document.uri; - let only_process_document_uri_package = true; - let output_diagnotics = false; + let output_diagnostics = false; - match process_workspace_for_noir_document( - state, - document_uri, - only_process_document_uri_package, - output_diagnotics, - ) { + match process_workspace_for_noir_document(state, document_uri, output_diagnostics) { Ok(_) => ControlFlow::Continue(()), Err(err) => ControlFlow::Break(Err(err)), } @@ -90,15 +78,9 @@ pub(super) fn on_did_close_text_document( } let document_uri = params.text_document.uri; - let only_process_document_uri_package = true; - let output_diagnotics = false; + let output_diagnostics = false; - match process_workspace_for_noir_document( - state, - document_uri, - only_process_document_uri_package, - output_diagnotics, - ) { + match process_workspace_for_noir_document(state, document_uri, output_diagnostics) { Ok(_) => ControlFlow::Continue(()), Err(err) => ControlFlow::Break(Err(err)), } @@ -109,15 +91,9 @@ pub(super) fn on_did_save_text_document( params: DidSaveTextDocumentParams, ) -> ControlFlow> { let document_uri = params.text_document.uri; - let only_process_document_uri_package = false; - let output_diagnotics = true; + let output_diagnostics = true; - match process_workspace_for_noir_document( - state, - document_uri, - only_process_document_uri_package, - output_diagnotics, - ) { + match process_workspace_for_noir_document(state, document_uri, output_diagnostics) { Ok(_) => ControlFlow::Continue(()), Err(err) => ControlFlow::Break(Err(err)), } @@ -129,17 +105,15 @@ pub(super) fn on_did_save_text_document( pub(crate) fn process_workspace_for_noir_document( state: &mut LspState, document_uri: lsp_types::Url, - only_process_document_uri_package: bool, output_diagnostics: bool, ) -> Result<(), async_lsp::Error> { let file_path = document_uri.to_file_path().map_err(|_| { ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") })?; - let workspace = - resolve_workspace_for_source_path(&file_path, &state.root_path).map_err(|lsp_error| { - ResponseError::new(ErrorCode::REQUEST_FAILED, lsp_error.to_string()) - })?; + let workspace = resolve_workspace_for_source_path(&file_path).map_err(|lsp_error| { + ResponseError::new(ErrorCode::REQUEST_FAILED, lsp_error.to_string()) + })?; let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); insert_all_files_for_workspace_into_file_manager( @@ -155,14 +129,10 @@ pub(crate) fn process_workspace_for_noir_document( .flat_map(|package| -> Vec { let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into(); - if only_process_document_uri_package && !file_path.starts_with(&package.root_dir) { - return vec![]; - } - let (mut context, crate_id) = crate::prepare_package(&workspace_file_manager, &parsed_files, package); - let file_diagnostics = match check_crate(&mut context, crate_id, false, false, None) { + let file_diagnostics = match check_crate(&mut context, crate_id, &Default::default()) { Ok(((), warnings)) => warnings, Err(errors_and_warnings) => errors_and_warnings, }; diff --git a/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs b/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs index 51336a324da..9799cf875a9 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/code_lens_request.rs @@ -63,8 +63,7 @@ fn on_code_lens_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not read file from disk") })?; - let workspace = - resolve_workspace_for_source_path(file_path.as_path(), &state.root_path).unwrap(); + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); let package = crate::workspace_package_for_file(&workspace, &file_path).ok_or_else(|| { ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file") @@ -73,7 +72,7 @@ fn on_code_lens_request_inner( let (mut context, crate_id) = prepare_source(source_string, state); // We ignore the warnings and errors produced by compilation for producing code lenses // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false, None); + let _ = check_crate(&mut context, crate_id, &Default::default()); let collected_lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None); diff --git a/noir/noir-repo/tooling/lsp/src/requests/document_symbol.rs b/noir/noir-repo/tooling/lsp/src/requests/document_symbol.rs index 67e2505d8fd..20fdfb6ece7 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/document_symbol.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/document_symbol.rs @@ -427,7 +427,7 @@ impl<'a> DocumentSymbolCollector<'a> { return; }; - let name = name_path.last_segment(); + let name = name_path.last_ident(); let Some(name_location) = self.to_lsp_location(name.span()) else { return; diff --git a/noir/noir-repo/tooling/lsp/src/requests/hover.rs b/noir/noir-repo/tooling/lsp/src/requests/hover.rs index 161fd20f555..73ea504b496 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/hover.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/hover.rs @@ -1,17 +1,18 @@ use std::future::{self, Future}; use async_lsp::ResponseError; +use fm::FileMap; use lsp_types::{Hover, HoverContents, HoverParams, MarkupContent, MarkupKind}; use noirc_frontend::{ ast::Visibility, graph::CrateId, hir::def_map::ModuleId, - hir_def::stmt::HirPattern, + hir_def::{stmt::HirPattern, traits::Trait}, macros_api::{NodeInterner, StructId}, node_interner::{ DefinitionId, DefinitionKind, FuncId, GlobalId, ReferenceId, TraitId, TypeAliasId, }, - Generics, Type, + Generics, Shared, StructType, Type, TypeAlias, TypeBinding, TypeVariable, }; use crate::LspState; @@ -23,39 +24,45 @@ pub(crate) fn on_hover_request( params: HoverParams, ) -> impl Future, ResponseError>> { let result = process_request(state, params.text_document_position_params, |args| { - args.interner.reference_at_location(args.location).map(|reference| { + args.interner.reference_at_location(args.location).and_then(|reference| { let location = args.interner.reference_location(reference); let lsp_location = to_lsp_location(args.files, location.file, location.span); - Hover { + format_reference(reference, &args).map(|formatted| Hover { range: lsp_location.map(|location| location.range), contents: HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, - value: format_reference(reference, &args), + value: formatted, }), - } + }) }) }); future::ready(result) } -fn format_reference(reference: ReferenceId, args: &ProcessRequestCallbackArgs) -> String { +fn format_reference(reference: ReferenceId, args: &ProcessRequestCallbackArgs) -> Option { match reference { ReferenceId::Module(id) => format_module(id, args), - ReferenceId::Struct(id) => format_struct(id, args), - ReferenceId::StructMember(id, field_index) => format_struct_member(id, field_index, args), - ReferenceId::Trait(id) => format_trait(id, args), - ReferenceId::Global(id) => format_global(id, args), - ReferenceId::Function(id) => format_function(id, args), - ReferenceId::Alias(id) => format_alias(id, args), - ReferenceId::Local(id) => format_local(id, args), + ReferenceId::Struct(id) => Some(format_struct(id, args)), + ReferenceId::StructMember(id, field_index) => { + Some(format_struct_member(id, field_index, args)) + } + ReferenceId::Trait(id) => Some(format_trait(id, args)), + ReferenceId::Global(id) => Some(format_global(id, args)), + ReferenceId::Function(id) => Some(format_function(id, args)), + ReferenceId::Alias(id) => Some(format_alias(id, args)), + ReferenceId::Local(id) => Some(format_local(id, args)), ReferenceId::Reference(location, _) => { format_reference(args.interner.find_referenced(location).unwrap(), args) } } } -fn format_module(id: ModuleId, args: &ProcessRequestCallbackArgs) -> String { - let module_attributes = args.interner.module_attributes(&id); +fn format_module(id: ModuleId, args: &ProcessRequestCallbackArgs) -> Option { + // Note: it's not clear why `try_module_attributes` might return None here, but it happens. + // This is a workaround to avoid panicking in that case (which brings the LSP server down). + // Cases where this happens are related to generated code, so once that stops happening + // this won't be an issue anymore. + let module_attributes = args.interner.try_module_attributes(&id)?; let mut string = String::new(); if format_parent_module_from_module_id( @@ -68,7 +75,7 @@ fn format_module(id: ModuleId, args: &ProcessRequestCallbackArgs) -> String { string.push_str(" "); string.push_str("mod "); string.push_str(&module_attributes.name); - string + Some(string) } fn format_struct(id: StructId, args: &ProcessRequestCallbackArgs) -> String { @@ -114,6 +121,7 @@ fn format_struct_member( string.push_str(&field_name.0.contents); string.push_str(": "); string.push_str(&format!("{}", field_type)); + string.push_str(&go_to_type_links(field_type, args.interner, args.files)); string } @@ -145,6 +153,7 @@ fn format_global(id: GlobalId, args: &ProcessRequestCallbackArgs) -> String { string.push_str(&global_info.ident.0.contents); string.push_str(": "); string.push_str(&format!("{}", typ)); + string.push_str(&go_to_type_links(&typ, args.interner, args.files)); string } @@ -200,6 +209,8 @@ fn format_function(id: FuncId, args: &ProcessRequestCallbackArgs) -> String { } } + string.push_str(&go_to_type_links(return_type, args.interner, args.files)); + string } @@ -244,6 +255,9 @@ fn format_local(id: DefinitionId, args: &ProcessRequestCallbackArgs) -> String { string.push_str(": "); string.push_str(&format!("{}", typ)); } + + string.push_str(&go_to_type_links(&typ, args.interner, args.files)); + string } @@ -355,6 +369,148 @@ fn format_parent_module_from_module_id( true } +fn go_to_type_links(typ: &Type, interner: &NodeInterner, files: &FileMap) -> String { + let mut gatherer = TypeLinksGatherer { interner, files, links: Vec::new() }; + gatherer.gather_type_links(typ); + + let links = gatherer.links; + if links.is_empty() { + "".to_string() + } else { + let mut string = String::new(); + string.push_str("\n\n"); + string.push_str("Go to "); + for (index, link) in links.iter().enumerate() { + if index > 0 { + string.push_str(" | "); + } + string.push_str(link); + } + string + } +} + +struct TypeLinksGatherer<'a> { + interner: &'a NodeInterner, + files: &'a FileMap, + links: Vec, +} + +impl<'a> TypeLinksGatherer<'a> { + fn gather_type_links(&mut self, typ: &Type) { + match typ { + Type::Array(typ, _) => self.gather_type_links(typ), + Type::Slice(typ) => self.gather_type_links(typ), + Type::Tuple(types) => { + for typ in types { + self.gather_type_links(typ); + } + } + Type::Struct(struct_type, generics) => { + self.gather_struct_type_links(struct_type); + for generic in generics { + self.gather_type_links(generic); + } + } + Type::Alias(type_alias, generics) => { + self.gather_type_alias_links(type_alias); + for generic in generics { + self.gather_type_links(generic); + } + } + Type::TypeVariable(var, _) => { + self.gather_type_variable_links(var); + } + Type::TraitAsType(trait_id, _, generics) => { + let some_trait = self.interner.get_trait(*trait_id); + self.gather_trait_links(some_trait); + for generic in generics { + self.gather_type_links(generic); + } + } + Type::NamedGeneric(var, _, _) => { + self.gather_type_variable_links(var); + } + Type::Function(args, return_type, env) => { + for arg in args { + self.gather_type_links(arg); + } + self.gather_type_links(return_type); + self.gather_type_links(env); + } + Type::MutableReference(typ) => self.gather_type_links(typ), + Type::InfixExpr(lhs, _, rhs) => { + self.gather_type_links(lhs); + self.gather_type_links(rhs); + } + Type::FieldElement + | Type::Integer(..) + | Type::Bool + | Type::String(_) + | Type::FmtString(_, _) + | Type::Unit + | Type::Forall(_, _) + | Type::Constant(_) + | Type::Quoted(_) + | Type::Error => (), + } + } + + fn gather_struct_type_links(&mut self, struct_type: &Shared) { + let struct_type = struct_type.borrow(); + if let Some(lsp_location) = + to_lsp_location(self.files, struct_type.location.file, struct_type.name.span()) + { + self.push_link(format_link(struct_type.name.to_string(), lsp_location)); + } + } + + fn gather_type_alias_links(&mut self, type_alias: &Shared) { + let type_alias = type_alias.borrow(); + if let Some(lsp_location) = + to_lsp_location(self.files, type_alias.location.file, type_alias.name.span()) + { + self.push_link(format_link(type_alias.name.to_string(), lsp_location)); + } + } + + fn gather_trait_links(&mut self, some_trait: &Trait) { + if let Some(lsp_location) = + to_lsp_location(self.files, some_trait.location.file, some_trait.name.span()) + { + self.push_link(format_link(some_trait.name.to_string(), lsp_location)); + } + } + + fn gather_type_variable_links(&mut self, var: &TypeVariable) { + let var = &*var.borrow(); + match var { + TypeBinding::Bound(typ) => { + self.gather_type_links(typ); + } + TypeBinding::Unbound(..) => (), + } + } + + fn push_link(&mut self, link: String) { + if !self.links.contains(&link) { + self.links.push(link); + } + } +} + +fn format_link(name: String, location: lsp_types::Location) -> String { + format!( + "[{}]({}#L{},{}-{},{})", + name, + location.uri, + location.range.start.line + 1, + location.range.start.character + 1, + location.range.end.line + 1, + location.range.end.character + 1 + ) +} + #[cfg(test)] mod hover_tests { use crate::test_utils; @@ -529,6 +685,24 @@ mod hover_tests { .await; } + #[test] + async fn hover_on_local_var_whose_type_you_can_navigate_to() { + let workspace_on_src_lib_path = std::env::current_dir() + .unwrap() + .join("test_programs/workspace/one/src/lib.nr") + .canonicalize() + .expect("Could not resolve root path"); + let workspace_on_src_lib_path = workspace_on_src_lib_path.to_string_lossy(); + + assert_hover( + "workspace", + "two/src/lib.nr", + Position { line: 51, character: 8 }, + &format!(" let x: BoundedVec\n\nGo to [SubOneStruct](file://{}#L4,12-4,24)", workspace_on_src_lib_path), + ) + .await; + } + #[test] async fn hover_on_parameter() { assert_hover( diff --git a/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs b/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs index 2afa5fa44fd..2ed441c623e 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/inlay_hint.rs @@ -4,8 +4,8 @@ use std::future::{self, Future}; use async_lsp::ResponseError; use fm::{FileId, FileMap, PathString}; use lsp_types::{ - InlayHint, InlayHintKind, InlayHintLabel, InlayHintLabelPart, InlayHintParams, Position, - TextDocumentPositionParams, + InlayHint, InlayHintKind, InlayHintLabel, InlayHintLabelPart, InlayHintParams, Position, Range, + TextDocumentPositionParams, TextEdit, }; use noirc_errors::{Location, Span}; use noirc_frontend::{ @@ -173,7 +173,7 @@ impl<'a> InlayHintCollector<'a> { self.collect_in_expression(&assign_statement.expression); } StatementKind::For(for_loop_statement) => { - self.collect_in_ident(&for_loop_statement.identifier); + self.collect_in_ident(&for_loop_statement.identifier, false); self.collect_in_expression(&for_loop_statement.block); } StatementKind::Comptime(statement) => self.collect_in_statement(statement), @@ -251,7 +251,15 @@ impl<'a> InlayHintCollector<'a> { self.collect_in_expression(expression); } } - ExpressionKind::Lambda(lambda) => self.collect_in_expression(&lambda.body), + ExpressionKind::Lambda(lambda) => { + for (pattern, typ) in &lambda.parameters { + if matches!(typ.typ, UnresolvedTypeData::Unspecified) { + self.collect_in_pattern(pattern); + } + } + + self.collect_in_expression(&lambda.body); + } ExpressionKind::Parenthesized(parenthesized) => { self.collect_in_expression(parenthesized); } @@ -276,7 +284,7 @@ impl<'a> InlayHintCollector<'a> { match pattern { Pattern::Identifier(ident) => { - self.collect_in_ident(ident); + self.collect_in_ident(ident, true); } Pattern::Mutable(pattern, _span, _is_synthesized) => { self.collect_in_pattern(pattern); @@ -294,7 +302,7 @@ impl<'a> InlayHintCollector<'a> { } } - fn collect_in_ident(&mut self, ident: &Ident) { + fn collect_in_ident(&mut self, ident: &Ident, editable: bool) { if !self.options.type_hints.enabled { return; } @@ -308,17 +316,17 @@ impl<'a> InlayHintCollector<'a> { let global_info = self.interner.get_global(global_id); let definition_id = global_info.definition_id; let typ = self.interner.definition_type(definition_id); - self.push_type_hint(lsp_location, &typ); + self.push_type_hint(lsp_location, &typ, editable); } ReferenceId::Local(definition_id) => { let typ = self.interner.definition_type(definition_id); - self.push_type_hint(lsp_location, &typ); + self.push_type_hint(lsp_location, &typ, editable); } ReferenceId::StructMember(struct_id, field_index) => { let struct_type = self.interner.get_struct(struct_id); let struct_type = struct_type.borrow(); let (_field_name, field_type) = struct_type.field_at(field_index); - self.push_type_hint(lsp_location, field_type); + self.push_type_hint(lsp_location, field_type, false); } ReferenceId::Module(_) | ReferenceId::Struct(_) @@ -331,7 +339,7 @@ impl<'a> InlayHintCollector<'a> { } } - fn push_type_hint(&mut self, location: lsp_types::Location, typ: &Type) { + fn push_type_hint(&mut self, location: lsp_types::Location, typ: &Type, editable: bool) { let position = location.range.end; let mut parts = Vec::new(); @@ -342,7 +350,14 @@ impl<'a> InlayHintCollector<'a> { position, label: InlayHintLabel::LabelParts(parts), kind: Some(InlayHintKind::TYPE), - text_edits: None, + text_edits: if editable { + Some(vec![TextEdit { + range: Range { start: location.range.end, end: location.range.end }, + new_text: format!(": {}", typ), + }]) + } else { + None + }, tooltip: None, padding_left: None, padding_right: None, @@ -584,6 +599,7 @@ fn push_type_parts(typ: &Type, parts: &mut Vec, files: &File | Type::NamedGeneric(..) | Type::Forall(..) | Type::Constant(..) + | Type::InfixExpr(..) | Type::Quoted(..) | Type::Error => { parts.push(string_part(typ.to_string())); @@ -609,7 +625,7 @@ fn push_type_variable_parts( fn get_expression_name(expression: &Expression) -> Option { match &expression.kind { - ExpressionKind::Variable(path, _) => Some(path.last_segment().to_string()), + ExpressionKind::Variable(path) => Some(path.last_name().to_string()), ExpressionKind::Prefix(prefix) => get_expression_name(&prefix.rhs), ExpressionKind::MemberAccess(member_access) => Some(member_access.rhs.to_string()), ExpressionKind::Call(call) => get_expression_name(&call.func), @@ -756,8 +772,10 @@ mod inlay_hints_tests { let inlay_hints = get_inlay_hints(0, 3, type_hints()).await; assert_eq!(inlay_hints.len(), 1); + let position = Position { line: 1, character: 11 }; + let inlay_hint = &inlay_hints[0]; - assert_eq!(inlay_hint.position, Position { line: 1, character: 11 }); + assert_eq!(inlay_hint.position, position); if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { assert_eq!(labels.len(), 2); @@ -770,6 +788,14 @@ mod inlay_hints_tests { } else { panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); } + + assert_eq!( + inlay_hint.text_edits, + Some(vec![TextEdit { + range: Range { start: position, end: position }, + new_text: ": Field".to_string(), + }]) + ); } #[test] @@ -777,8 +803,10 @@ mod inlay_hints_tests { let inlay_hints = get_inlay_hints(12, 15, type_hints()).await; assert_eq!(inlay_hints.len(), 1); + let position = Position { line: 13, character: 11 }; + let inlay_hint = &inlay_hints[0]; - assert_eq!(inlay_hint.position, Position { line: 13, character: 11 }); + assert_eq!(inlay_hint.position, position); if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { assert_eq!(labels.len(), 2); @@ -798,6 +826,34 @@ mod inlay_hints_tests { } else { panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); } + + assert_eq!( + inlay_hint.text_edits, + Some(vec![TextEdit { + range: Range { start: position, end: position }, + new_text: ": Foo".to_string(), + }]) + ); + } + + #[test] + async fn test_type_inlay_hints_in_struct_member_pattern() { + let inlay_hints = get_inlay_hints(94, 96, type_hints()).await; + assert_eq!(inlay_hints.len(), 1); + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, Position { line: 95, character: 24 }); + + if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { + assert_eq!(labels.len(), 2); + assert_eq!(labels[0].value, ": "); + assert_eq!(labels[0].location, None); + assert_eq!(labels[1].value, "i32"); + } else { + panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); + } + + assert_eq!(inlay_hint.text_edits, None); } #[test] @@ -816,6 +872,8 @@ mod inlay_hints_tests { } else { panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); } + + assert_eq!(inlay_hint.text_edits, None); } #[test] @@ -823,8 +881,10 @@ mod inlay_hints_tests { let inlay_hints = get_inlay_hints(19, 21, type_hints()).await; assert_eq!(inlay_hints.len(), 1); + let position = Position { line: 20, character: 10 }; + let inlay_hint = &inlay_hints[0]; - assert_eq!(inlay_hint.position, Position { line: 20, character: 10 }); + assert_eq!(inlay_hint.position, position); if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { assert_eq!(labels.len(), 2); @@ -834,6 +894,42 @@ mod inlay_hints_tests { } else { panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); } + + assert_eq!( + inlay_hint.text_edits, + Some(vec![TextEdit { + range: Range { start: position, end: position }, + new_text: ": Field".to_string(), + }]) + ); + } + + #[test] + async fn test_type_inlay_hints_in_lambda() { + let inlay_hints = get_inlay_hints(102, 105, type_hints()).await; + assert_eq!(inlay_hints.len(), 1); + + let position = Position { line: 104, character: 35 }; + + let inlay_hint = &inlay_hints[0]; + assert_eq!(inlay_hint.position, position); + + if let InlayHintLabel::LabelParts(labels) = &inlay_hint.label { + assert_eq!(labels.len(), 2); + assert_eq!(labels[0].value, ": "); + assert_eq!(labels[0].location, None); + assert_eq!(labels[1].value, "i32"); + } else { + panic!("Expected InlayHintLabel::LabelParts, got {:?}", inlay_hint.label); + } + + assert_eq!( + inlay_hint.text_edits, + Some(vec![TextEdit { + range: Range { start: position, end: position }, + new_text: ": i32".to_string(), + }]) + ); } #[test] @@ -855,6 +951,7 @@ mod inlay_hints_tests { let inlay_hint = &inlay_hints[0]; assert_eq!(inlay_hint.position, Position { line: 25, character: 12 }); + assert_eq!(inlay_hint.text_edits, None); if let InlayHintLabel::String(label) = &inlay_hint.label { assert_eq!(label, "one: "); } else { @@ -863,6 +960,7 @@ mod inlay_hints_tests { let inlay_hint = &inlay_hints[1]; assert_eq!(inlay_hint.position, Position { line: 25, character: 15 }); + assert_eq!(inlay_hint.text_edits, None); if let InlayHintLabel::String(label) = &inlay_hint.label { assert_eq!(label, "two: "); } else { @@ -877,6 +975,7 @@ mod inlay_hints_tests { let inlay_hint = &inlay_hints[0]; assert_eq!(inlay_hint.position, Position { line: 38, character: 18 }); + assert_eq!(inlay_hint.text_edits, None); if let InlayHintLabel::String(label) = &inlay_hint.label { assert_eq!(label, "one: "); } else { diff --git a/noir/noir-repo/tooling/lsp/src/requests/mod.rs b/noir/noir-repo/tooling/lsp/src/requests/mod.rs index 4d261c1b50a..09794574709 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/mod.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/mod.rs @@ -1,3 +1,4 @@ +use std::path::PathBuf; use std::{collections::HashMap, future::Future}; use crate::insert_all_files_for_workspace_into_file_manager; @@ -324,7 +325,12 @@ where let file_name = files.name(file_id).ok()?; let path = file_name.to_string(); - let uri = Url::from_file_path(path).ok()?; + + // `path` might be a relative path so we canonicalize it to get an absolute path + let path_buf = PathBuf::from(path); + let path_buf = path_buf.canonicalize().unwrap_or(path_buf); + + let uri = Url::from_file_path(path_buf.to_str()?).ok()?; Some(Location { uri, range }) } @@ -358,8 +364,7 @@ where ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") })?; - let workspace = - resolve_workspace_for_source_path(file_path.as_path(), &state.root_path).unwrap(); + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); let package = crate::workspace_package_for_file(&workspace, &file_path).ok_or_else(|| { ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find package for file") })?; @@ -382,7 +387,7 @@ where interner = def_interner; } else { // We ignore the warnings and errors produced by compilation while resolving the definition - let _ = noirc_driver::check_crate(&mut context, crate_id, false, false, None); + let _ = noirc_driver::check_crate(&mut context, crate_id, &Default::default()); interner = &context.def_interner; } diff --git a/noir/noir-repo/tooling/lsp/src/requests/references.rs b/noir/noir-repo/tooling/lsp/src/requests/references.rs index 375e0b69aed..c720156659d 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/references.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/references.rs @@ -94,6 +94,10 @@ mod references_tests { check_references_succeeds("rename_function", "another_function", 0, false).await; } + // Ignored because making this work slows down everything, so for now things will not work + // as ideally, but they'll be fast. + // See https://github.com/noir-lang/noir/issues/5460 + #[ignore] #[test] async fn test_on_references_request_works_accross_workspace_packages() { let (mut state, noir_text_document) = test_utils::init_lsp_server("workspace").await; @@ -108,13 +112,11 @@ mod references_tests { let two_lib = Url::from_file_path(workspace_dir.join("two/src/lib.nr")).unwrap(); // We call this to open the document, so that the entire workspace is analyzed - let only_process_document_uri_package = false; let output_diagnostics = true; notifications::process_workspace_for_noir_document( &mut state, one_lib.clone(), - only_process_document_uri_package, output_diagnostics, ) .unwrap(); diff --git a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs index bf4d9763faf..fc4054633e2 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs @@ -61,7 +61,7 @@ fn on_test_run_request_inner( Some(package) => { let (mut context, crate_id) = crate::prepare_package(&workspace_file_manager, &parsed_files, package); - if check_crate(&mut context, crate_id, false, false, None).is_err() { + if check_crate(&mut context, crate_id, &Default::default()).is_err() { let result = NargoTestRunResult { id: params.id.clone(), result: "error".to_string(), diff --git a/noir/noir-repo/tooling/lsp/src/requests/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/tests.rs index 20b96029696..7203aca7f09 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/tests.rs @@ -65,7 +65,7 @@ fn on_tests_request_inner( crate::prepare_package(&workspace_file_manager, &parsed_files, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false, None); + let _ = check_crate(&mut context, crate_id, &Default::default()); // We don't add test headings for a package if it contains no `#[test]` functions get_package_tests_in_crate(&context, &crate_id, &package.name) diff --git a/noir/noir-repo/tooling/lsp/test_programs/inlay_hints/src/main.nr b/noir/noir-repo/tooling/lsp/test_programs/inlay_hints/src/main.nr index 2b53f8de339..b2bbed2b1e5 100644 --- a/noir/noir-repo/tooling/lsp/test_programs/inlay_hints/src/main.nr +++ b/noir/noir-repo/tooling/lsp/test_programs/inlay_hints/src/main.nr @@ -92,3 +92,15 @@ fn call_yet_another_function() { yet_another_function(some_name) // Should not show parameter names ("name" is a suffix of "some_name") } +fn struct_member_hint() { + let SomeStruct { one } = SomeStruct { one: 1 }; +} + +fn some_map(x: T, f: fn(T) -> U) -> U { + f(x) +} + +fn hint_on_lambda_parameter() { + let value: i32 = 1; + let _: i32 = some_map(value, |x| x + 1); +} diff --git a/noir/noir-repo/tooling/nargo_cli/build.rs b/noir/noir-repo/tooling/nargo_cli/build.rs index 74e07efb5c1..3f8cd055569 100644 --- a/noir/noir-repo/tooling/nargo_cli/build.rs +++ b/noir/noir-repo/tooling/nargo_cli/build.rs @@ -218,7 +218,7 @@ fn generate_compile_success_empty_tests(test_file: &mut File, test_data_dir: &Pa &test_dir, &format!( r#" - nargo.arg("info").arg("--json").arg("--force"); + nargo.arg("info").arg("--arithmetic-generics").arg("--json").arg("--force"); {assert_zero_opcodes}"#, ), diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index d40bae1ecfd..5239070b4d2 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -81,14 +81,7 @@ fn check_package( allow_overwrite: bool, ) -> Result { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - check_crate_and_report_errors( - &mut context, - crate_id, - compile_options.deny_warnings, - compile_options.disable_macros, - compile_options.silence_warnings, - compile_options.debug_comptime_in_file.as_deref(), - )?; + check_crate_and_report_errors(&mut context, crate_id, compile_options)?; if package.is_library() || package.is_contract() { // Libraries do not have ABIs while contracts have many, so we cannot generate a `Prover.toml` file. @@ -157,14 +150,10 @@ fn create_input_toml_template( pub(crate) fn check_crate_and_report_errors( context: &mut Context, crate_id: CrateId, - deny_warnings: bool, - disable_macros: bool, - silence_warnings: bool, - debug_comptime_in_file: Option<&str>, + options: &CompileOptions, ) -> Result<(), CompileError> { - let result = - check_crate(context, crate_id, deny_warnings, disable_macros, debug_comptime_in_file); - report_errors(result, &context.file_manager, deny_warnings, silence_warnings) + let result = check_crate(context, crate_id, options); + report_errors(result, &context.file_manager, options.deny_warnings, options.silence_warnings) } #[cfg(test)] diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index a2877ebdeac..3e3560c91bf 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -9,8 +9,8 @@ use nargo::package::Package; use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::file_manager_with_stdlib; use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; +use noirc_driver::{file_manager_with_stdlib, DEFAULT_EXPRESSION_WIDTH}; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract}; use noirc_frontend::graph::CrateName; @@ -250,12 +250,6 @@ fn save_contract( } } -/// Default expression width used for Noir compilation. -/// The ACVM native type `ExpressionWidth` has its own default which should always be unbounded, -/// while we can sometimes expect the compilation target width to change. -/// Thus, we set it separately here rather than trying to alter the default derivation of the type. -const DEFAULT_EXPRESSION_WIDTH: ExpressionWidth = ExpressionWidth::Bounded { width: 4 }; - /// If a target width was not specified in the CLI we can safely override the default. pub(crate) fn get_target_width( package_default_width: Option, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs index 1b7ba97d68d..19add7f30dc 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs @@ -83,14 +83,7 @@ fn compile_exported_functions( compile_options: &CompileOptions, ) -> Result<(), CliError> { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - check_crate_and_report_errors( - &mut context, - crate_id, - compile_options.deny_warnings, - compile_options.disable_macros, - compile_options.silence_warnings, - compile_options.debug_comptime_in_file.as_deref(), - )?; + check_crate_and_report_errors(&mut context, crate_id, compile_options)?; let exported_functions = context.get_all_exported_functions_in_crate(&crate_id); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index c8848e2e304..1cf5b32c381 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -171,14 +171,8 @@ fn run_test + Default>( // We then need to construct a separate copy for each test. let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - check_crate( - &mut context, - crate_id, - compile_options.deny_warnings, - compile_options.disable_macros, - compile_options.debug_comptime_in_file.as_deref(), - ) - .expect("Any errors should have occurred when collecting test functions"); + check_crate(&mut context, crate_id, compile_options) + .expect("Any errors should have occurred when collecting test functions"); let test_functions = context .get_all_test_functions_in_crate_matching(&crate_id, FunctionNameMatch::Exact(fn_name)); @@ -237,14 +231,7 @@ fn get_tests_in_package( compile_options: &CompileOptions, ) -> Result, CliError> { let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - check_crate_and_report_errors( - &mut context, - crate_id, - compile_options.deny_warnings, - compile_options.disable_macros, - compile_options.silence_warnings, - compile_options.debug_comptime_in_file.as_deref(), - )?; + check_crate_and_report_errors(&mut context, crate_id, compile_options)?; Ok(context .get_all_test_functions_in_crate_matching(&crate_id, fn_name) diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs index c4cc792438e..0444f79d371 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs @@ -33,7 +33,7 @@ fn run_stdlib_tests() { let (mut context, dummy_crate_id) = prepare_package(&file_manager, &parsed_files, &dummy_package); - let result = check_crate(&mut context, dummy_crate_id, false, false, None); + let result = check_crate(&mut context, dummy_crate_id, &Default::default()); report_errors(result, &context.file_manager, true, false) .expect("Error encountered while compiling standard library"); diff --git a/noir/noir-repo/tooling/nargo_fmt/src/rewrite/expr.rs b/noir/noir-repo/tooling/nargo_fmt/src/rewrite/expr.rs index 015644c15cb..5673baf2893 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/rewrite/expr.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/rewrite/expr.rs @@ -1,5 +1,6 @@ use noirc_frontend::ast::{ - ArrayLiteral, BlockExpression, Expression, ExpressionKind, Literal, UnaryOp, UnresolvedType, + ArrayLiteral, BlockExpression, Expression, ExpressionKind, Literal, Path, PathKind, UnaryOp, + UnresolvedType, }; use noirc_frontend::{macros_api::Span, token::Token}; @@ -161,12 +162,7 @@ pub(crate) fn rewrite( visitor.format_if(*if_expr) } - ExpressionKind::Variable(path, generics) => { - let path_string = visitor.slice(path.span); - - let turbofish = rewrite_turbofish(visitor, shape, generics); - format!("{path_string}{turbofish}") - } + ExpressionKind::Variable(path) => rewrite_path(visitor, shape, path), ExpressionKind::Lambda(_) => visitor.slice(span).to_string(), ExpressionKind::Quote(_) => visitor.slice(span).to_string(), ExpressionKind::Comptime(block, block_span) => { @@ -192,6 +188,25 @@ fn rewrite_block(visitor: &FmtVisitor, block: BlockExpression, span: Span) -> St visitor.finish() } +fn rewrite_path(visitor: &FmtVisitor, shape: Shape, path: Path) -> String { + let mut string = String::new(); + + if path.kind != PathKind::Plain { + string.push_str(&path.kind.to_string()); + string.push_str("::"); + } + + for (index, segment) in path.segments.iter().enumerate() { + if index > 0 { + string.push_str("::"); + } + string.push_str(&segment.ident.to_string()); + string.push_str(&rewrite_turbofish(visitor, shape, segment.generics.clone())); + } + + string +} + fn rewrite_turbofish( visitor: &FmtVisitor, shape: Shape, diff --git a/noir/noir-repo/tooling/nargo_fmt/src/utils.rs b/noir/noir-repo/tooling/nargo_fmt/src/utils.rs index 020f411ae2f..83634b718e2 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/utils.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/utils.rs @@ -146,9 +146,10 @@ impl HasItem for Param { fn format(self, visitor: &FmtVisitor, shape: Shape) -> String { let pattern = visitor.slice(self.pattern.span()); let visibility = match self.visibility { - Visibility::Public => "pub", - Visibility::Private => "", - Visibility::DataBus => "call_data", + Visibility::Public => "pub".to_string(), + Visibility::Private => "".to_string(), + Visibility::CallData(x) => format!("call_data({x})"), + Visibility::ReturnData => "return_data".to_string(), }; if self.pattern.is_synthesized() || self.typ.is_synthesized() { @@ -187,6 +188,9 @@ impl HasItem for UnresolvedGeneric { result.push_str(&typ); result } + UnresolvedGeneric::Resolved(..) => { + unreachable!("Found macro result UnresolvedGeneric::Resolved in formatter") + } } } } diff --git a/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs b/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs index 5aaaf20ff47..0c9f61a7d40 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs @@ -120,8 +120,11 @@ impl super::FmtVisitor<'_> { let visibility = match func.def.return_visibility { Visibility::Public => "pub", - Visibility::DataBus => "return_data", + Visibility::ReturnData => "return_data", Visibility::Private => "", + Visibility::CallData(_) => { + unreachable!("call_data cannot be used for return value") + } }; result.push_str(&append_space_if_nonempty(visibility.into())); diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/expected/expr.nr b/noir/noir-repo/tooling/nargo_fmt/tests/expected/expr.nr index 03a26835ee3..babaf5b356e 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/expected/expr.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/expected/expr.nr @@ -129,9 +129,9 @@ fn return_if_expr() { } fn if_if() { - if cond { + (if cond { some(); } else { none(); - }.bar().baz(); + }).bar().baz(); } diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/input/expr.nr b/noir/noir-repo/tooling/nargo_fmt/tests/input/expr.nr index b4edcbbed5f..9ecefad7dfd 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/input/expr.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/input/expr.nr @@ -147,7 +147,7 @@ fn return_if_expr() { } fn if_if() { -if cond { some(); } else { none(); } +(if cond { some(); } else { none(); }) .bar() .baz(); } \ No newline at end of file diff --git a/noir/noir-repo/tooling/noir_js_backend_barretenberg/package.json b/noir/noir-repo/tooling/noir_js_backend_barretenberg/package.json index 7087052602c..aeca5fe543f 100644 --- a/noir/noir-repo/tooling/noir_js_backend_barretenberg/package.json +++ b/noir/noir-repo/tooling/noir_js_backend_barretenberg/package.json @@ -41,7 +41,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "portal:../../../../barretenberg/ts", + "@aztec/bb.js": "0.47.1", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/backend.ts b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/backend.ts index 8ede6a07b50..4fd256a7a81 100644 --- a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/backend.ts +++ b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/backend.ts @@ -2,8 +2,8 @@ import { decompressSync as gunzip } from 'fflate'; import { acirToUint8Array } from './serialize.js'; import { Backend, CompiledCircuit, ProofData, VerifierBackend } from '@noir-lang/types'; import { BackendOptions } from './types.js'; -import { deflattenPublicInputs } from './public_inputs.js'; -import { reconstructProofWithPublicInputs } from './verifier.js'; +import { deflattenFields } from './public_inputs.js'; +import { reconstructProofWithPublicInputs, reconstructProofWithPublicInputsHonk } from './verifier.js'; import { type Barretenberg } from '@aztec/bb.js'; // This is the number of bytes in a UltraPlonk proof @@ -50,6 +50,7 @@ export class BarretenbergBackend implements Backend, VerifierBackend { this.acirUncompressedBytecode, honkRecursion, ); + const crs = await Crs.new(subgroupSize + 1); await api.commonInitSlabAllocator(subgroupSize); await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); @@ -73,7 +74,7 @@ export class BarretenbergBackend implements Backend, VerifierBackend { const publicInputsConcatenated = proofWithPublicInputs.slice(0, splitIndex); const proof = proofWithPublicInputs.slice(splitIndex); - const publicInputs = deflattenPublicInputs(publicInputsConcatenated); + const publicInputs = deflattenFields(publicInputsConcatenated); return { proof, publicInputs }; } @@ -143,3 +144,140 @@ export class BarretenbergBackend implements Backend, VerifierBackend { await this.api.destroy(); } } + +// Buffers are prepended with their size. The size takes 4 bytes. +const serializedBufferSize = 4; +const fieldByteSize = 32; +const publicInputOffset = 3; +const publicInputsOffsetBytes = publicInputOffset * fieldByteSize; + +export class UltraHonkBackend implements Backend, VerifierBackend { + // These type assertions are used so that we don't + // have to initialize `api` in the constructor. + // These are initialized asynchronously in the `init` function, + // constructors cannot be asynchronous which is why we do this. + + protected api!: Barretenberg; + protected acirUncompressedBytecode: Uint8Array; + + constructor( + acirCircuit: CompiledCircuit, + protected options: BackendOptions = { threads: 1 }, + ) { + const acirBytecodeBase64 = acirCircuit.bytecode; + this.acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64); + } + + /** @ignore */ + async instantiate(): Promise { + if (!this.api) { + if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { + this.options.threads = navigator.hardwareConcurrency; + } else { + try { + const os = await import('os'); + this.options.threads = os.cpus().length; + } catch (e) { + console.log('Could not detect environment. Falling back to one thread.', e); + } + } + const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); + const api = await Barretenberg.new(this.options); + + const honkRecursion = true; + const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes( + this.acirUncompressedBytecode, + honkRecursion, + ); + const crs = await Crs.new(subgroupSize + 1); + await api.commonInitSlabAllocator(subgroupSize); + await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); + + // We don't init a proving key here in the Honk API + // await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode); + this.api = api; + } + } + + async generateProof(decompressedWitness: Uint8Array): Promise { + await this.instantiate(); + const proofWithPublicInputs = await this.api.acirProveUltraHonk( + this.acirUncompressedBytecode, + gunzip(decompressedWitness), + ); + const proofAsStrings = deflattenFields(proofWithPublicInputs.slice(4)); + + const numPublicInputs = Number(proofAsStrings[1]); + + // Account for the serialized buffer size at start + const publicInputsOffset = publicInputsOffsetBytes + serializedBufferSize; + // Get the part before and after the public inputs + const proofStart = proofWithPublicInputs.slice(0, publicInputsOffset); + const publicInputsSplitIndex = numPublicInputs * fieldByteSize; + const proofEnd = proofWithPublicInputs.slice(publicInputsOffset + publicInputsSplitIndex); + // Construct the proof without the public inputs + const proof = new Uint8Array([...proofStart, ...proofEnd]); + + // Fetch the number of public inputs out of the proof string + const publicInputsConcatenated = proofWithPublicInputs.slice( + publicInputsOffset, + publicInputsOffset + publicInputsSplitIndex, + ); + const publicInputs = deflattenFields(publicInputsConcatenated); + + return { proof, publicInputs }; + } + + async verifyProof(proofData: ProofData): Promise { + const { RawBuffer } = await import('@aztec/bb.js'); + + const proof = reconstructProofWithPublicInputsHonk(proofData); + + await this.instantiate(); + const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode); + + return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(vkBuf)); + } + + async getVerificationKey(): Promise { + await this.instantiate(); + return await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode); + } + + // TODO(https://github.com/noir-lang/noir/issues/5661): Update this to handle Honk recursive aggregation in the browser once it is ready in the backend itself + async generateRecursiveProofArtifacts( + _proofData: ProofData, + _numOfPublicInputs: number, + ): Promise<{ proofAsFields: string[]; vkAsFields: string[]; vkHash: string }> { + await this.instantiate(); + // TODO(https://github.com/noir-lang/noir/issues/5661): This needs to be updated to handle recursive aggregation. + // There is still a proofAsFields method but we could consider getting rid of it as the proof itself + // is a list of field elements. + // UltraHonk also does not have public inputs directly prepended to the proof and they are still instead + // inserted at an offset. + // const proof = reconstructProofWithPublicInputs(proofData); + // const proofAsFields = (await this.api.acirProofAsFieldsUltraHonk(proof)).slice(numOfPublicInputs); + + // TODO: perhaps we should put this in the init function. Need to benchmark + // TODO how long it takes. + const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode); + const vk = await this.api.acirVkAsFieldsUltraHonk(vkBuf); + + return { + // TODO(https://github.com/noir-lang/noir/issues/5661) + proofAsFields: [], + vkAsFields: vk.map((vk) => vk.toString()), + // We use an empty string for the vk hash here as it is unneeded as part of the recursive artifacts + // The user can be expected to hash the vk inside their circuit to check whether the vk is the circuit + // they expect + vkHash: '', + }; + } + + async destroy(): Promise { + if (!this.api) { + return; + } + await this.api.destroy(); + } +} diff --git a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/index.ts b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/index.ts index cefef07520f..6786c1eec48 100644 --- a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/index.ts +++ b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/index.ts @@ -1,5 +1,5 @@ -export { BarretenbergBackend } from './backend.js'; -export { BarretenbergVerifier } from './verifier.js'; +export { BarretenbergBackend, UltraHonkBackend } from './backend.js'; +export { BarretenbergVerifier, UltraHonkVerifier } from './verifier.js'; // typedoc exports export { Backend, CompiledCircuit, ProofData } from '@noir-lang/types'; diff --git a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/public_inputs.ts b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/public_inputs.ts index ed771ab0d34..10b4ee6ab32 100644 --- a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/public_inputs.ts +++ b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/public_inputs.ts @@ -1,16 +1,16 @@ import { WitnessMap } from '@noir-lang/types'; -export function flattenPublicInputsAsArray(publicInputs: string[]): Uint8Array { - const flattenedPublicInputs = publicInputs.map(hexToUint8Array); +export function flattenFieldsAsArray(fields: string[]): Uint8Array { + const flattenedPublicInputs = fields.map(hexToUint8Array); return flattenUint8Arrays(flattenedPublicInputs); } -export function deflattenPublicInputs(flattenedPublicInputs: Uint8Array): string[] { +export function deflattenFields(flattenedFields: Uint8Array): string[] { const publicInputSize = 32; const chunkedFlattenedPublicInputs: Uint8Array[] = []; - for (let i = 0; i < flattenedPublicInputs.length; i += publicInputSize) { - const publicInput = flattenedPublicInputs.slice(i, i + publicInputSize); + for (let i = 0; i < flattenedFields.length; i += publicInputSize) { + const publicInput = flattenedFields.slice(i, i + publicInputSize); chunkedFlattenedPublicInputs.push(publicInput); } diff --git a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/verifier.ts b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/verifier.ts index fe9fa9cfffd..58612672b35 100644 --- a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/verifier.ts +++ b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/verifier.ts @@ -1,6 +1,6 @@ import { ProofData } from '@noir-lang/types'; import { BackendOptions } from './types.js'; -import { flattenPublicInputsAsArray } from './public_inputs.js'; +import { flattenFieldsAsArray } from './public_inputs.js'; import { type Barretenberg } from '@aztec/bb.js'; export class BarretenbergVerifier { @@ -69,10 +69,86 @@ export class BarretenbergVerifier { export function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Array { // Flatten publicInputs - const publicInputsConcatenated = flattenPublicInputsAsArray(proofData.publicInputs); + const publicInputsConcatenated = flattenFieldsAsArray(proofData.publicInputs); // Concatenate publicInputs and proof const proofWithPublicInputs = Uint8Array.from([...publicInputsConcatenated, ...proofData.proof]); return proofWithPublicInputs; } + +export class UltraHonkVerifier { + // These type assertions are used so that we don't + // have to initialize `api` in the constructor. + // These are initialized asynchronously in the `init` function, + // constructors cannot be asynchronous which is why we do this. + + private api!: Barretenberg; + + constructor(private options: BackendOptions = { threads: 1 }) {} + + /** @ignore */ + async instantiate(): Promise { + if (!this.api) { + if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) { + this.options.threads = navigator.hardwareConcurrency; + } else { + try { + const os = await import('os'); + this.options.threads = os.cpus().length; + } catch (e) { + console.log('Could not detect environment. Falling back to one thread.', e); + } + } + const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); + + // This is the number of CRS points necessary to verify a Barretenberg proof. + const NUM_CRS_POINTS_FOR_VERIFICATION: number = 0; + const [api, crs] = await Promise.all([Barretenberg.new(this.options), Crs.new(NUM_CRS_POINTS_FOR_VERIFICATION)]); + + await api.commonInitSlabAllocator(NUM_CRS_POINTS_FOR_VERIFICATION); + await api.srsInitSrs( + new RawBuffer([] /* crs.getG1Data() */), + NUM_CRS_POINTS_FOR_VERIFICATION, + new RawBuffer(crs.getG2Data()), + ); + + this.api = api; + } + } + + /** @description Verifies a proof */ + async verifyProof(proofData: ProofData, verificationKey: Uint8Array): Promise { + const { RawBuffer } = await import('@aztec/bb.js'); + + await this.instantiate(); + + const proof = reconstructProofWithPublicInputsHonk(proofData); + return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(verificationKey)); + } + + async destroy(): Promise { + if (!this.api) { + return; + } + await this.api.destroy(); + } +} + +const serializedBufferSize = 4; +const fieldByteSize = 32; +const publicInputOffset = 3; +const publicInputsOffsetBytes = publicInputOffset * fieldByteSize; + +export function reconstructProofWithPublicInputsHonk(proofData: ProofData): Uint8Array { + // Flatten publicInputs + const publicInputsConcatenated = flattenFieldsAsArray(proofData.publicInputs); + + const proofStart = proofData.proof.slice(0, publicInputsOffsetBytes + serializedBufferSize); + const proofEnd = proofData.proof.slice(publicInputsOffsetBytes + serializedBufferSize); + + // Concatenate publicInputs and proof + const proofWithPublicInputs = Uint8Array.from([...proofStart, ...publicInputsConcatenated, ...proofEnd]); + + return proofWithPublicInputs; +} diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/build.sh b/noir/noir-repo/tooling/noirc_abi_wasm/build.sh index c07d2d8a4c1..16fb26e55db 100755 --- a/noir/noir-repo/tooling/noirc_abi_wasm/build.sh +++ b/noir/noir-repo/tooling/noirc_abi_wasm/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -#require_command wasm-opt +require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/noir/noir-repo/yarn.lock b/noir/noir-repo/yarn.lock index f77e9f7e72e..40d6ccc55e6 100644 --- a/noir/noir-repo/yarn.lock +++ b/noir/noir-repo/yarn.lock @@ -221,18 +221,19 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg": - version: 0.0.0-use.local - resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg" +"@aztec/bb.js@npm:0.47.1": + version: 0.47.1 + resolution: "@aztec/bb.js@npm:0.47.1" dependencies: comlink: ^4.4.1 commander: ^10.0.1 debug: ^4.3.4 tslib: ^2.4.0 bin: - bb.js: ./dest/node/main.js + bb.js: dest/node/main.js + checksum: fa06d2ab58b2a23bacc578df7654f5c7eb90553229fc9730aaaf7479bc96b39f10f24a4f3a7eae8f73df3cdd8a3ffb07627cad61dff9896cabdb275ce5b6f09b languageName: node - linkType: soft + linkType: hard "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -4160,7 +4161,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": "portal:../../../../barretenberg/ts" + "@aztec/bb.js": 0.47.1 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3